diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 00000000..ff261bad --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,9 @@ +ARG VARIANT="3.9" +FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} + +USER vscode + +RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash +ENV PATH=/home/vscode/.rye/shims:$PATH + +RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 00000000..c17fdc16 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,43 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/debian +{ + "name": "Debian", + "build": { + "dockerfile": "Dockerfile", + "context": ".." + }, + + "postStartCommand": "rye sync --all-features", + + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ], + "settings": { + "terminal.integrated.shell.linux": "/bin/bash", + "python.pythonPath": ".venv/bin/python", + "python.defaultInterpreterPath": ".venv/bin/python", + "python.typeChecking": "basic", + "terminal.integrated.env.linux": { + "PATH": "/home/vscode/.rye/shims:${env:PATH}" + } + } + } + }, + "features": { + "ghcr.io/devcontainers/features/node:1": {} + } + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.github/workflows/bandit-ci.yml b/.github/workflows/bandit-ci.yml deleted file mode 100644 index 0b93e205..00000000 --- a/.github/workflows/bandit-ci.yml +++ /dev/null @@ -1,63 +0,0 @@ -name: Bandit - -on: - # Scan changed files in PRs: - pull_request: {} - -jobs: - bandit-scan: - name: Bandit - runs-on: ubuntu-22.04 - if: (github.actor != 'dependabot[bot]') && (github.actor != 'github-actions[bot]') - steps: - - name: Install PyCQA/bandit - shell: bash - run: | - pip install bandit - - name: Checkout base branch - uses: actions/checkout@v3 - with: - ref: ${{ github.event.pull_request.base.ref }} - fetch-depth: 1 - submodules: false - - name: Run a baseline scan - shell: bash - run: | - bandit --recursive --aggregate file . -f json -o baseline.json || true - - name: Checkout feature branch - shell: bash - run: | - git fetch origin $GITHUB_HEAD_REF - git checkout $GITHUB_HEAD_REF - - name: Run Scan off of baseline - shell: bash - run: | - bandit --recursive --aggregate file . --baseline baseline.json -f json -o results.json || true - - name: Install logging prerequisites - shell: bash {0} - run: | - sudo apt-get -y install jq curl - - name: Generate logger template - shell: bash {0} # don't fail the job if the logging fails - run: | - jq -n --arg organization $GITHUB_REPOSITORY_OWNER \ - -n --arg time $( date +'%Y-%m-%dT%H:%M:%SZ' ) \ - -n --arg action $GITHUB_WORKFLOW \ - -n --arg repository $GITHUB_REPOSITORY \ - -n --arg sha $GITHUB_SHA \ - -n --arg branch $GITHUB_HEAD_REF \ - -n --arg link "https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" \ - -f .github/workflows/output-template.json > tmp-output.json - - name: Format results appropriately from results.json - shell: bash {0} # don't fail the job if the logging fails - run: | - jq '.results | map({"path": .filename, "message": .issue_text, "line": .line_number})' results.json > tmp.json - jq --argjson scanResults "$( output.json - - name: Send unified results to logging cluster - shell: bash {0} # don't fail the job if the logging fails - run: | - curl -X POST \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer ${{ secrets.N8N_PRODSEC_ACTIONS_TOKEN }}" \ - -d @./output.json \ - ${{ secrets.N8N_PRODSEC_ACTIONS_ENDPOINT }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..02569035 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,96 @@ +name: CI +on: + push: + branches-ignore: + - 'generated' + - 'codegen/**' + - 'integrated/**' + - 'stl-preview-head/**' + - 'stl-preview-base/**' + pull_request: + branches-ignore: + - 'stl-preview-head/**' + - 'stl-preview-base/**' + +jobs: + lint: + timeout-minutes: 10 + name: lint + runs-on: ${{ github.repository == 'stainless-sdks/agentex-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run lints + run: ./scripts/lint + + build: + if: github.repository == 'stainless-sdks/agentex-sdk-python' && (github.event_name == 'push' || github.event.pull_request.head.repo.fork) + timeout-minutes: 10 + name: build + permissions: + contents: read + id-token: write + runs-on: depot-ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Install dependencies + run: rye sync --all-features + + - name: Run build + run: rye build + + - name: Get GitHub OIDC Token + id: github-oidc + uses: actions/github-script@v6 + with: + script: core.setOutput('github_token', await core.getIDToken()); + + - name: Upload tarball + env: + URL: https://pkg.stainless.com/s + AUTH: ${{ steps.github-oidc.outputs.github_token }} + SHA: ${{ github.sha }} + run: ./scripts/utils/upload-artifact.sh + + test: + timeout-minutes: 10 + name: test + runs-on: ${{ github.repository == 'stainless-sdks/agentex-sdk-python' && 'depot-ubuntu-24.04' || 'ubuntu-latest' }} + if: github.event_name == 'push' || github.event.pull_request.head.repo.fork + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Bootstrap + run: ./scripts/bootstrap + + - name: Run tests + run: ./scripts/test diff --git a/.github/workflows/output-template.json b/.github/workflows/output-template.json deleted file mode 100644 index e6303bcf..00000000 --- a/.github/workflows/output-template.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "source": "github", - "organization": "\($organization)", - "timestamp": "\($time)", - "action": "\($action)", - "meta": { - "repository": "\($repository)", - "commit": "\($sha)", - "branch": "\($branch)", - "link": "\($link)" - }, - "results": [] -} diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml new file mode 100644 index 00000000..7e8ac3bd --- /dev/null +++ b/.github/workflows/publish-pypi.yml @@ -0,0 +1,31 @@ +# This workflow is triggered when a GitHub release is created. +# It can also be run manually to re-publish to PyPI in case it failed for some reason. +# You can run this workflow by navigating to https://www.github.com/scaleapi/agentex-python/actions/workflows/publish-pypi.yml +name: Publish PyPI +on: + workflow_dispatch: + + release: + types: [published] + +jobs: + publish: + name: publish + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Install Rye + run: | + curl -sSf https://rye.astral.sh/get | bash + echo "$HOME/.rye/shims" >> $GITHUB_PATH + env: + RYE_VERSION: '0.44.0' + RYE_INSTALL_OPTION: '--yes' + + - name: Publish to PyPI + run: | + bash ./bin/publish-pypi + env: + PYPI_TOKEN: ${{ secrets.AGENTEX_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.github/workflows/release-doctor.yml b/.github/workflows/release-doctor.yml new file mode 100644 index 00000000..0521b7f6 --- /dev/null +++ b/.github/workflows/release-doctor.yml @@ -0,0 +1,21 @@ +name: Release Doctor +on: + pull_request: + branches: + - main + workflow_dispatch: + +jobs: + release_doctor: + name: release doctor + runs-on: ubuntu-latest + if: github.repository == 'scaleapi/agentex-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') + + steps: + - uses: actions/checkout@v4 + + - name: Check release environment + run: | + bash ./bin/check-release-environment + env: + PYPI_TOKEN: ${{ secrets.AGENTEX_PYPI_TOKEN || secrets.PYPI_TOKEN }} diff --git a/.gitignore b/.gitignore index 94993c3a..87797408 100644 --- a/.gitignore +++ b/.gitignore @@ -1,173 +1,16 @@ -# Logs -logs -*.log -npm-debug.log* -*.pth - -# Runtime data -pids -*.pid -*.seed -*.pid.lock - -# IntelliJ -**/.idea -*.iml - -# VSCode +.prism.log .vscode -*.code-workspace - -# filesystem files -.DS_Store - -# Local environment files -*.env -.env.* -*.envrc -frontend/.npmrc -local*.yaml - -# filesystem databases -dump.rdb -*.sqlite -*.db - -# Temp dirs -tmp - -### PYTHON - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot +_dev -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal +__pycache__ +.mypy_cache -# Flask stuff: -instance/ -.webassets-cache +dist -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints -_temp_extension -junit.xml -[uU]ntitled* -notebook/static/* -!notebook/static/favicons -notebook/labextension -notebook/schemas -docs/source/changelog.md -docs/source/contributing.md - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# pdm -.pdm.toml - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Environments -.env .venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json +.idea -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ +.env +.envrc +codegen.log +Brewfile.lock.json diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..43077b24 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9.18 diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 00000000..55f722d9 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "0.0.1-alpha.1" +} \ No newline at end of file diff --git a/.stats.yml b/.stats.yml new file mode 100644 index 00000000..f89209bb --- /dev/null +++ b/.stats.yml @@ -0,0 +1,4 @@ +configured_endpoints: 36 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/sgp%2Fagentex-sdk-021b55c88964b7a5bfc9d692d32a52c6b0150445656d2407c4cb8e9dd1e5f100.yml +openapi_spec_hash: ed92c0d5d6bed9cb5617f8a776ac42c9 +config_hash: ea7ccb4f8ed1981b364cef82aa595243 diff --git a/Brewfile b/Brewfile new file mode 100644 index 00000000..492ca37b --- /dev/null +++ b/Brewfile @@ -0,0 +1,2 @@ +brew "rye" + diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..e2079d6e --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 0.0.1-alpha.1 (2025-07-22) + +Full Changelog: [v0.0.1-alpha.0...v0.0.1-alpha.1](https://github.com/scaleapi/agentex-python/compare/v0.0.1-alpha.0...v0.0.1-alpha.1) + +### Chores + +* sync repo ([bc305f4](https://github.com/scaleapi/agentex-python/commit/bc305f43efedb5b7d7b28eaa059bce1d280c9dbb)) +* update SDK settings ([e5a06b4](https://github.com/scaleapi/agentex-python/commit/e5a06b4e3d8f8ad15d55b92393d7ddd833415f86)) diff --git a/CODEOWNERS b/CODEOWNERS deleted file mode 100644 index e69de29b..00000000 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..305c811e --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,128 @@ +## Setting up the environment + +### With Rye + +We use [Rye](https://rye.astral.sh/) to manage dependencies because it will automatically provision a Python environment with the expected Python version. To set it up, run: + +```sh +$ ./scripts/bootstrap +``` + +Or [install Rye manually](https://rye.astral.sh/guide/installation/) and run: + +```sh +$ rye sync --all-features +``` + +You can then run scripts using `rye run python script.py` or by activating the virtual environment: + +```sh +# Activate the virtual environment - https://docs.python.org/3/library/venv.html#how-venvs-work +$ source .venv/bin/activate + +# now you can omit the `rye run` prefix +$ python script.py +``` + +### Without Rye + +Alternatively if you don't want to install `Rye`, you can stick with the standard `pip` setup by ensuring you have the Python version specified in `.python-version`, create a virtual environment however you desire and then install dependencies using this command: + +```sh +$ pip install -r requirements-dev.lock +``` + +## Modifying/Adding code + +Most of the SDK is generated code. Modifications to code will be persisted between generations, but may +result in merge conflicts between manual patches and changes from the generator. The generator will never +modify the contents of the `src/agentex/lib/` and `examples/` directories. + +## Adding and running examples + +All files in the `examples/` directory are not modified by the generator and can be freely edited or added to. + +```py +# add an example to examples/.py + +#!/usr/bin/env -S rye run python +… +``` + +```sh +$ chmod +x examples/.py +# run the example against your api +$ ./examples/.py +``` + +## Using the repository from source + +If you’d like to use the repository from source, you can either install from git or link to a cloned repository: + +To install via git: + +```sh +$ pip install git+ssh://git@github.com/scaleapi/agentex-python.git +``` + +Alternatively, you can build from source and install the wheel file: + +Building this package will create two files in the `dist/` directory, a `.tar.gz` containing the source files and a `.whl` that can be used to install the package efficiently. + +To create a distributable version of the library, all you have to do is run this command: + +```sh +$ rye build +# or +$ python -m build +``` + +Then to install: + +```sh +$ pip install ./path-to-wheel-file.whl +``` + +## Running tests + +Most tests require you to [set up a mock server](https://github.com/stoplightio/prism) against the OpenAPI spec to run the tests. + +```sh +# you will need npm installed +$ npx prism mock path/to/your/openapi.yml +``` + +```sh +$ ./scripts/test +``` + +## Linting and formatting + +This repository uses [ruff](https://github.com/astral-sh/ruff) and +[black](https://github.com/psf/black) to format the code in the repository. + +To lint: + +```sh +$ ./scripts/lint +``` + +To format and fix all ruff issues automatically: + +```sh +$ ./scripts/format +``` + +## Publishing and releases + +Changes made to this repository via the automated release PR pipeline should publish to PyPI automatically. If +the changes aren't made through the automated pipeline, you may want to make releases manually. + +### Publish with a GitHub workflow + +You can release to package managers by using [the `Publish PyPI` GitHub action](https://www.github.com/scaleapi/agentex-python/actions/workflows/publish-pypi.yml). This requires a setup organization or repository secret to be set up. + +### Publish manually + +If you need to manually release a package, you can run the `bin/publish-pypi` script with a `PYPI_TOKEN` set on +the environment. diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..d2ee12de --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2025 Agentex + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 79378b6b..db9c7afa 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,379 @@ -# repository-template -A repository template for repository creation at Scale AI. +# Agentex Python API library + + +[![PyPI version](https://img.shields.io/pypi/v/agentex.svg?label=pypi%20(stable))](https://pypi.org/project/agentex/) + +The Agentex Python library provides convenient access to the Agentex REST API from any Python 3.8+ +application. The library includes type definitions for all request params and response fields, +and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). + +It is generated with [Stainless](https://www.stainless.com/). + +## Documentation + +The full API of this library can be found in [api.md](api.md). + +## Installation + +```sh +# install from the production repo +pip install git+ssh://git@github.com/scaleapi/agentex-python.git +``` + +> [!NOTE] +> Once this package is [published to PyPI](https://www.stainless.com/docs/guides/publish), this will become: `pip install --pre agentex` ## Usage -### Automatic -Request a new repository from the slackbot `Onyx` using `/onyx` and input the appropriate information such as desired language(s) -### Manual -Requires repository creation permissions and an appropriately-permissioned REPO_SETUP_TOKEN +The full API of this library can be found in [api.md](api.md). + +```python +import os +from agentex import Agentex + +client = Agentex( + api_key=os.environ.get("AGENTEX_SDK_API_KEY"), # This is the default and can be omitted +) + +response = client.echo.send( + message="message", +) +``` + +While you can provide an `api_key` keyword argument, +we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) +to add `AGENTEX_SDK_API_KEY="My API Key"` to your `.env` file +so that your API Key is not stored in source control. + +## Async usage + +Simply import `AsyncAgentex` instead of `Agentex` and use `await` with each API call: + +```python +import os +import asyncio +from agentex import AsyncAgentex + +client = AsyncAgentex( + api_key=os.environ.get("AGENTEX_SDK_API_KEY"), # This is the default and can be omitted +) + + +async def main() -> None: + response = await client.echo.send( + message="message", + ) + + +asyncio.run(main()) +``` + +Functionality between the synchronous and asynchronous clients is otherwise identical. + +### With aiohttp + +By default, the async client uses `httpx` for HTTP requests. However, for improved concurrency performance you may also use `aiohttp` as the HTTP backend. + +You can enable this by installing `aiohttp`: + +```sh +# install from the production repo +pip install 'agentex[aiohttp] @ git+ssh://git@github.com/scaleapi/agentex-python.git' +``` + +Then you can enable it by instantiating the client with `http_client=DefaultAioHttpClient()`: + +```python +import asyncio +from agentex import DefaultAioHttpClient +from agentex import AsyncAgentex + + +async def main() -> None: + async with AsyncAgentex( + api_key="My API Key", + http_client=DefaultAioHttpClient(), + ) as client: + response = await client.echo.send( + message="message", + ) + + +asyncio.run(main()) +``` + +## Using types + +Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: + +- Serializing back into JSON, `model.to_json()` +- Converting to a dictionary, `model.to_dict()` + +Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. + +## Handling errors + +When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `agentex.APIConnectionError` is raised. + +When the API returns a non-success status code (that is, 4xx or 5xx +response), a subclass of `agentex.APIStatusError` is raised, containing `status_code` and `response` properties. + +All errors inherit from `agentex.APIError`. + +```python +import agentex +from agentex import Agentex + +client = Agentex() + +try: + client.echo.send( + message="message", + ) +except agentex.APIConnectionError as e: + print("The server could not be reached") + print(e.__cause__) # an underlying Exception, likely raised within httpx. +except agentex.RateLimitError as e: + print("A 429 status code was received; we should back off a bit.") +except agentex.APIStatusError as e: + print("Another non-200-range status code was received") + print(e.status_code) + print(e.response) +``` + +Error codes are as follows: + +| Status Code | Error Type | +| ----------- | -------------------------- | +| 400 | `BadRequestError` | +| 401 | `AuthenticationError` | +| 403 | `PermissionDeniedError` | +| 404 | `NotFoundError` | +| 422 | `UnprocessableEntityError` | +| 429 | `RateLimitError` | +| >=500 | `InternalServerError` | +| N/A | `APIConnectionError` | + +### Retries + +Certain errors are automatically retried 2 times by default, with a short exponential backoff. +Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, +429 Rate Limit, and >=500 Internal errors are all retried by default. + +You can use the `max_retries` option to configure or disable retry settings: + +```python +from agentex import Agentex + +# Configure the default for all requests: +client = Agentex( + # default is 2 + max_retries=0, +) + +# Or, configure per-request: +client.with_options(max_retries=5).echo.send( + message="message", +) +``` + +### Timeouts + +By default requests time out after 1 minute. You can configure this with a `timeout` option, +which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/timeouts/#fine-tuning-the-configuration) object: + +```python +from agentex import Agentex + +# Configure the default for all requests: +client = Agentex( + # 20 seconds (default is 1 minute) + timeout=20.0, +) + +# More granular control: +client = Agentex( + timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), +) + +# Override per-request: +client.with_options(timeout=5.0).echo.send( + message="message", +) +``` + +On timeout, an `APITimeoutError` is thrown. + +Note that requests that time out are [retried twice by default](#retries). + +## Advanced + +### Logging + +We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. + +You can enable logging by setting the environment variable `AGENTEX_LOG` to `info`. + +```shell +$ export AGENTEX_LOG=info +``` + +Or to `debug` for more verbose logging. + +### How to tell whether `None` means `null` or missing + +In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: + +```py +if response.my_field is None: + if 'my_field' not in response.model_fields_set: + print('Got json like {}, without a "my_field" key present at all.') + else: + print('Got json like {"my_field": null}.') +``` + +### Accessing raw response data (e.g. headers) + +The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., + +```py +from agentex import Agentex + +client = Agentex() +response = client.echo.with_raw_response.send( + message="message", +) +print(response.headers.get('X-My-Header')) + +echo = response.parse() # get the object that `echo.send()` would have returned +print(echo) +``` + +These methods return an [`APIResponse`](https://github.com/scaleapi/agentex-python/tree/main/src/agentex/_response.py) object. + +The async client returns an [`AsyncAPIResponse`](https://github.com/scaleapi/agentex-python/tree/main/src/agentex/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. + +#### `.with_streaming_response` + +The above interface eagerly reads the full response body when you make the request, which may not always be what you want. + +To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. + +```python +with client.echo.with_streaming_response.send( + message="message", +) as response: + print(response.headers.get("X-My-Header")) + + for line in response.iter_lines(): + print(line) +``` + +The context manager is required so that the response will reliably be closed. + +### Making custom/undocumented requests + +This library is typed for convenient access to the documented API. + +If you need to access undocumented endpoints, params, or response properties, the library can still be used. + +#### Undocumented endpoints + +To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other +http verbs. Options on the client will be respected (such as retries) when making this request. + +```py +import httpx + +response = client.post( + "/foo", + cast_to=httpx.Response, + body={"my_param": True}, +) + +print(response.headers.get("x-foo")) +``` + +#### Undocumented request params + +If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request +options. + +#### Undocumented response properties + +To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You +can also get all the extra fields on the Pydantic model as a dict with +[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). + +### Configuring the HTTP client + +You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: + +- Support for [proxies](https://www.python-httpx.org/advanced/proxies/) +- Custom [transports](https://www.python-httpx.org/advanced/transports/) +- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality + +```python +import httpx +from agentex import Agentex, DefaultHttpxClient + +client = Agentex( + # Or use the `AGENTEX_BASE_URL` env var + base_url="http://my.test.server.example.com:8083", + http_client=DefaultHttpxClient( + proxy="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` + +You can also customize the client on a per-request basis by using `with_options()`: + +```python +client.with_options(http_client=DefaultHttpxClient(...)) +``` + +### Managing HTTP resources + +By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. + +```py +from agentex import Agentex + +with Agentex() as client: + # make requests here + ... + +# HTTP client is now closed +``` + +## Versioning + +This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: + +1. Changes that only affect static types, without breaking runtime behavior. +2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals.)_ +3. Changes that we do not expect to impact the vast majority of users in practice. + +We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. + +We are keen for your feedback; please open an [issue](https://www.github.com/scaleapi/agentex-python/issues) with questions, bugs, or suggestions. + +### Determining the installed version + +If you've upgraded to the latest version but aren't seeing any new features you were expecting then your python environment is likely still using an older version. + +You can determine the version that is being used at runtime with: + +```py +import agentex +print(agentex.__version__) +``` + +## Requirements + +Python 3.8 or higher. + +## Contributing -1. Create a new repository using this template -2. Add a secret `REPO_SETUP_TOKEN` to the new repository -3. Run the GitHub workflow `repository-setup`, inputting parameters as desired. -4. Allow the workflow to run and set up language-specific files and settings. +See [the contributing documentation](./CONTRIBUTING.md). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..d7ff1dab --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,23 @@ +# Security Policy + +## Reporting Security Issues + +This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. + +To report a security issue, please contact the Stainless team at security@stainless.com. + +## Responsible Disclosure + +We appreciate the efforts of security researchers and individuals who help us maintain the security of +SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible +disclosure practices by allowing us a reasonable amount of time to investigate and address the issue +before making any information public. + +## Reporting Non-SDK Related Security Issues + +If you encounter security issues that are not directly related to SDKs but pertain to the services +or products provided by Agentex, please follow the respective company's security reporting guidelines. + +--- + +Thank you for helping us keep the SDKs and systems they interact with secure. diff --git a/api.md b/api.md new file mode 100644 index 00000000..45be6a64 --- /dev/null +++ b/api.md @@ -0,0 +1,153 @@ +# Agentex + +Methods: + +- client.get_root() -> object + +# Echo + +Methods: + +- client.echo.send(\*\*params) -> object + +# Agents + +Types: + +```python +from agentex.types import AcpType, Agent, AgentRpcRequest, AgentListResponse +``` + +Methods: + +- client.agents.retrieve(agent_id) -> Agent +- client.agents.list(\*\*params) -> AgentListResponse +- client.agents.delete(agent_id) -> Agent +- client.agents.rpc(agent_id, \*\*params) -> object + +## Name + +Methods: + +- client.agents.name.retrieve(agent_name) -> Agent +- client.agents.name.delete(agent_name) -> Agent +- client.agents.name.rpc(agent_name, \*\*params) -> object + +# Tasks + +Types: + +```python +from agentex.types import Task, TaskListResponse +``` + +Methods: + +- client.tasks.retrieve(task_id) -> Task +- client.tasks.list() -> TaskListResponse +- client.tasks.delete(task_id) -> Task +- client.tasks.stream_events(task_id) -> object + +## Name + +Methods: + +- client.tasks.name.retrieve(task_name) -> Task +- client.tasks.name.delete(task_name) -> Task +- client.tasks.name.stream_events(task_name) -> object + +# Messages + +Types: + +```python +from agentex.types import ( + DataContent, + MessageAuthor, + MessageStyle, + StreamingStatus, + TaskMessage, + TextContent, + ToolRequestContent, + ToolResponseContent, + MessageListResponse, +) +``` + +Methods: + +- client.messages.create(\*\*params) -> TaskMessage +- client.messages.retrieve(message_id) -> TaskMessage +- client.messages.update(message_id, \*\*params) -> TaskMessage +- client.messages.list(\*\*params) -> MessageListResponse + +## Batch + +Types: + +```python +from agentex.types.messages import BatchCreateResponse, BatchUpdateResponse +``` + +Methods: + +- client.messages.batch.create(\*\*params) -> BatchCreateResponse +- client.messages.batch.update(\*\*params) -> BatchUpdateResponse + +# Spans + +Types: + +```python +from agentex.types import Span, SpanListResponse +``` + +Methods: + +- client.spans.create(\*\*params) -> Span +- client.spans.retrieve(span_id) -> Span +- client.spans.update(span_id, \*\*params) -> Span +- client.spans.list(\*\*params) -> SpanListResponse + +# States + +Types: + +```python +from agentex.types import State, StateListResponse +``` + +Methods: + +- client.states.create(\*\*params) -> State +- client.states.retrieve(state_id) -> State +- client.states.update(state_id, \*\*params) -> State +- client.states.list(\*\*params) -> StateListResponse +- client.states.delete(state_id) -> State + +# Events + +Types: + +```python +from agentex.types import Event, EventListResponse +``` + +Methods: + +- client.events.retrieve(event_id) -> Event +- client.events.list(\*\*params) -> EventListResponse + +# Tracker + +Types: + +```python +from agentex.types import AgentTaskTracker, TrackerListResponse +``` + +Methods: + +- client.tracker.retrieve(tracker_id) -> AgentTaskTracker +- client.tracker.update(tracker_id, \*\*params) -> AgentTaskTracker +- client.tracker.list(\*\*params) -> TrackerListResponse diff --git a/bin/check-release-environment b/bin/check-release-environment new file mode 100644 index 00000000..b845b0f4 --- /dev/null +++ b/bin/check-release-environment @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +errors=() + +if [ -z "${PYPI_TOKEN}" ]; then + errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") +fi + +lenErrors=${#errors[@]} + +if [[ lenErrors -gt 0 ]]; then + echo -e "Found the following errors in the release environment:\n" + + for error in "${errors[@]}"; do + echo -e "- $error\n" + done + + exit 1 +fi + +echo "The environment is ready to push releases!" diff --git a/bin/publish-pypi b/bin/publish-pypi new file mode 100644 index 00000000..826054e9 --- /dev/null +++ b/bin/publish-pypi @@ -0,0 +1,6 @@ +#!/usr/bin/env bash + +set -eux +mkdir -p dist +rye build --clean +rye publish --yes --token=$PYPI_TOKEN diff --git a/examples/.keep b/examples/.keep new file mode 100644 index 00000000..d8c73e93 --- /dev/null +++ b/examples/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store example files demonstrating usage of this SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..91619053 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,50 @@ +[mypy] +pretty = True +show_error_codes = True + +# Exclude _files.py because mypy isn't smart enough to apply +# the correct type narrowing and as this is an internal module +# it's fine to just use Pyright. +# +# We also exclude our `tests` as mypy doesn't always infer +# types correctly and Pyright will still catch any type errors. +exclude = ^(src/agentex/_files\.py|_dev/.*\.py|tests/.*)$ + +strict_equality = True +implicit_reexport = True +check_untyped_defs = True +no_implicit_optional = True + +warn_return_any = True +warn_unreachable = True +warn_unused_configs = True + +# Turn these options off as it could cause conflicts +# with the Pyright options. +warn_unused_ignores = False +warn_redundant_casts = False + +disallow_any_generics = True +disallow_untyped_defs = True +disallow_untyped_calls = True +disallow_subclassing_any = True +disallow_incomplete_defs = True +disallow_untyped_decorators = True +cache_fine_grained = True + +# By default, mypy reports an error if you assign a value to the result +# of a function call that doesn't return anything. We do this in our test +# cases: +# ``` +# result = ... +# assert result is None +# ``` +# Changing this codegen to make mypy happy would increase complexity +# and would not be worth it. +disable_error_code = func-returns-value,overload-cannot-match + +# https://github.com/python/mypy/issues/12162 +[mypy.overrides] +module = "black.files.*" +ignore_errors = true +ignore_missing_imports = true diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000..53bca7ff --- /dev/null +++ b/noxfile.py @@ -0,0 +1,9 @@ +import nox + + +@nox.session(reuse_venv=True, name="test-pydantic-v1") +def test_pydantic_v1(session: nox.Session) -> None: + session.install("-r", "requirements-dev.lock") + session.install("pydantic<2") + + session.run("pytest", "--showlocals", "--ignore=tests/functional", *session.posargs) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..2769d9ca --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,211 @@ +[project] +name = "agentex" +version = "0.0.1-alpha.1" +description = "The official Python library for the agentex API" +dynamic = ["readme"] +license = "Apache-2.0" +authors = [ +{ name = "Agentex", email = "" }, +] +dependencies = [ + "httpx>=0.23.0, <1", + "pydantic>=1.9.0, <3", + "typing-extensions>=4.10, <5", + "anyio>=3.5.0, <5", + "distro>=1.7.0, <2", + "sniffio", +] +requires-python = ">= 3.8" +classifiers = [ + "Typing :: Typed", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", + "License :: OSI Approved :: Apache Software License" +] + +[project.urls] +Homepage = "https://github.com/scaleapi/agentex-python" +Repository = "https://github.com/scaleapi/agentex-python" + +[project.optional-dependencies] +aiohttp = ["aiohttp", "httpx_aiohttp>=0.1.8"] + +[tool.rye] +managed = true +# version pins are in requirements-dev.lock +dev-dependencies = [ + "pyright==1.1.399", + "mypy", + "respx", + "pytest", + "pytest-asyncio", + "ruff", + "time-machine", + "nox", + "dirty-equals>=0.6.0", + "importlib-metadata>=6.7.0", + "rich>=13.7.1", + "nest_asyncio==1.6.0", + "pytest-xdist>=3.6.1", +] + +[tool.rye.scripts] +format = { chain = [ + "format:ruff", + "format:docs", + "fix:ruff", + # run formatting again to fix any inconsistencies when imports are stripped + "format:ruff", +]} +"format:docs" = "python scripts/utils/ruffen-docs.py README.md api.md" +"format:ruff" = "ruff format" + +"lint" = { chain = [ + "check:ruff", + "typecheck", + "check:importable", +]} +"check:ruff" = "ruff check ." +"fix:ruff" = "ruff check --fix ." + +"check:importable" = "python -c 'import agentex'" + +typecheck = { chain = [ + "typecheck:pyright", + "typecheck:mypy" +]} +"typecheck:pyright" = "pyright" +"typecheck:verify-types" = "pyright --verifytypes agentex --ignoreexternal" +"typecheck:mypy" = "mypy ." + +[build-system] +requires = ["hatchling==1.26.3", "hatch-fancy-pypi-readme"] +build-backend = "hatchling.build" + +[tool.hatch.build] +include = [ + "src/*" +] + +[tool.hatch.build.targets.wheel] +packages = ["src/agentex"] + +[tool.hatch.build.targets.sdist] +# Basically everything except hidden files/directories (such as .github, .devcontainers, .python-version, etc) +include = [ + "/*.toml", + "/*.json", + "/*.lock", + "/*.md", + "/mypy.ini", + "/noxfile.py", + "bin/*", + "examples/*", + "src/*", + "tests/*", +] + +[tool.hatch.metadata.hooks.fancy-pypi-readme] +content-type = "text/markdown" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.fragments]] +path = "README.md" + +[[tool.hatch.metadata.hooks.fancy-pypi-readme.substitutions]] +# replace relative links with absolute links +pattern = '\[(.+?)\]\(((?!https?://)\S+?)\)' +replacement = '[\1](https://github.com/scaleapi/agentex-python/tree/main/\g<2>)' + +[tool.pytest.ini_options] +testpaths = ["tests"] +addopts = "--tb=short -n auto" +xfail_strict = true +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" +filterwarnings = [ + "error" +] + +[tool.pyright] +# this enables practically every flag given by pyright. +# there are a couple of flags that are still disabled by +# default in strict mode as they are experimental and niche. +typeCheckingMode = "strict" +pythonVersion = "3.8" + +exclude = [ + "_dev", + ".venv", + ".nox", +] + +reportImplicitOverride = true +reportOverlappingOverload = false + +reportImportCycles = false +reportPrivateUsage = false + +[tool.ruff] +line-length = 120 +output-format = "grouped" +target-version = "py37" + +[tool.ruff.format] +docstring-code-format = true + +[tool.ruff.lint] +select = [ + # isort + "I", + # bugbear rules + "B", + # remove unused imports + "F401", + # bare except statements + "E722", + # unused arguments + "ARG", + # print statements + "T201", + "T203", + # misuse of typing.TYPE_CHECKING + "TC004", + # import rules + "TID251", +] +ignore = [ + # mutable defaults + "B006", +] +unfixable = [ + # disable auto fix for print statements + "T201", + "T203", +] + +[tool.ruff.lint.flake8-tidy-imports.banned-api] +"functools.lru_cache".msg = "This function does not retain type information for the wrapped function's arguments; The `lru_cache` function from `_utils` should be used instead" + +[tool.ruff.lint.isort] +length-sort = true +length-sort-straight = true +combine-as-imports = true +extra-standard-library = ["typing_extensions"] +known-first-party = ["agentex", "tests"] + +[tool.ruff.lint.per-file-ignores] +"bin/**.py" = ["T201", "T203"] +"scripts/**.py" = ["T201", "T203"] +"tests/**.py" = ["T201", "T203"] +"examples/**.py" = ["T201", "T203"] diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 00000000..52d4c546 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,66 @@ +{ + "packages": { + ".": {} + }, + "$schema": "https://raw.githubusercontent.com/stainless-api/release-please/main/schemas/config.json", + "include-v-in-tag": true, + "include-component-in-tag": false, + "versioning": "prerelease", + "prerelease": true, + "bump-minor-pre-major": true, + "bump-patch-for-minor-pre-major": false, + "pull-request-header": "Automated Release PR", + "pull-request-title-pattern": "release: ${version}", + "changelog-sections": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "revert", + "section": "Reverts" + }, + { + "type": "chore", + "section": "Chores" + }, + { + "type": "docs", + "section": "Documentation" + }, + { + "type": "style", + "section": "Styles" + }, + { + "type": "refactor", + "section": "Refactors" + }, + { + "type": "test", + "section": "Tests", + "hidden": true + }, + { + "type": "build", + "section": "Build System" + }, + { + "type": "ci", + "section": "Continuous Integration", + "hidden": true + } + ], + "release-type": "python", + "extra-files": [ + "src/agentex/_version.py" + ] +} \ No newline at end of file diff --git a/requirements-dev.lock b/requirements-dev.lock new file mode 100644 index 00000000..f82de72c --- /dev/null +++ b/requirements-dev.lock @@ -0,0 +1,135 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via agentex + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.4.0 + # via agentex + # via httpx +argcomplete==3.1.2 + # via nox +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp +certifi==2023.7.22 + # via httpcore + # via httpx +colorlog==6.7.0 + # via nox +dirty-equals==0.6.0 +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via agentex +exceptiongroup==1.2.2 + # via anyio + # via pytest +execnet==2.1.1 + # via pytest-xdist +filelock==3.12.4 + # via virtualenv +frozenlist==1.6.2 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via agentex + # via httpx-aiohttp + # via respx +httpx-aiohttp==0.1.8 + # via agentex +idna==3.4 + # via anyio + # via httpx + # via yarl +importlib-metadata==7.0.0 +iniconfig==2.0.0 + # via pytest +markdown-it-py==3.0.0 + # via rich +mdurl==0.1.2 + # via markdown-it-py +multidict==6.4.4 + # via aiohttp + # via yarl +mypy==1.14.1 +mypy-extensions==1.0.0 + # via mypy +nest-asyncio==1.6.0 +nodeenv==1.8.0 + # via pyright +nox==2023.4.22 +packaging==23.2 + # via nox + # via pytest +platformdirs==3.11.0 + # via virtualenv +pluggy==1.5.0 + # via pytest +propcache==0.3.1 + # via aiohttp + # via yarl +pydantic==2.10.3 + # via agentex +pydantic-core==2.27.1 + # via pydantic +pygments==2.18.0 + # via rich +pyright==1.1.399 +pytest==8.3.3 + # via pytest-asyncio + # via pytest-xdist +pytest-asyncio==0.24.0 +pytest-xdist==3.7.0 +python-dateutil==2.8.2 + # via time-machine +pytz==2023.3.post1 + # via dirty-equals +respx==0.22.0 +rich==13.7.1 +ruff==0.9.4 +setuptools==68.2.2 + # via nodeenv +six==1.16.0 + # via python-dateutil +sniffio==1.3.0 + # via agentex + # via anyio +time-machine==2.9.0 +tomli==2.0.2 + # via mypy + # via pytest +typing-extensions==4.12.2 + # via agentex + # via anyio + # via multidict + # via mypy + # via pydantic + # via pydantic-core + # via pyright +virtualenv==20.24.5 + # via nox +yarl==1.20.0 + # via aiohttp +zipp==3.17.0 + # via importlib-metadata diff --git a/requirements.lock b/requirements.lock new file mode 100644 index 00000000..a6d3556d --- /dev/null +++ b/requirements.lock @@ -0,0 +1,72 @@ +# generated by rye +# use `rye lock` or `rye sync` to update this lockfile +# +# last locked with the following flags: +# pre: false +# features: [] +# all-features: true +# with-sources: false +# generate-hashes: false +# universal: false + +-e file:. +aiohappyeyeballs==2.6.1 + # via aiohttp +aiohttp==3.12.8 + # via agentex + # via httpx-aiohttp +aiosignal==1.3.2 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==4.4.0 + # via agentex + # via httpx +async-timeout==5.0.1 + # via aiohttp +attrs==25.3.0 + # via aiohttp +certifi==2023.7.22 + # via httpcore + # via httpx +distro==1.8.0 + # via agentex +exceptiongroup==1.2.2 + # via anyio +frozenlist==1.6.2 + # via aiohttp + # via aiosignal +h11==0.16.0 + # via httpcore +httpcore==1.0.9 + # via httpx +httpx==0.28.1 + # via agentex + # via httpx-aiohttp +httpx-aiohttp==0.1.8 + # via agentex +idna==3.4 + # via anyio + # via httpx + # via yarl +multidict==6.4.4 + # via aiohttp + # via yarl +propcache==0.3.1 + # via aiohttp + # via yarl +pydantic==2.10.3 + # via agentex +pydantic-core==2.27.1 + # via pydantic +sniffio==1.3.0 + # via agentex + # via anyio +typing-extensions==4.12.2 + # via agentex + # via anyio + # via multidict + # via pydantic + # via pydantic-core +yarl==1.20.0 + # via aiohttp diff --git a/scripts/bootstrap b/scripts/bootstrap new file mode 100755 index 00000000..e84fe62c --- /dev/null +++ b/scripts/bootstrap @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +if ! command -v rye >/dev/null 2>&1 && [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ]; then + brew bundle check >/dev/null 2>&1 || { + echo "==> Installing Homebrew dependencies…" + brew bundle + } +fi + +echo "==> Installing Python dependencies…" + +# experimental uv support makes installations significantly faster +rye config --set-bool behavior.use-uv=true + +rye sync --all-features diff --git a/scripts/format b/scripts/format new file mode 100755 index 00000000..667ec2d7 --- /dev/null +++ b/scripts/format @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +echo "==> Running formatters" +rye run format diff --git a/scripts/lint b/scripts/lint new file mode 100755 index 00000000..69fa62e9 --- /dev/null +++ b/scripts/lint @@ -0,0 +1,11 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +echo "==> Running lints" +rye run lint + +echo "==> Making sure it imports" +rye run python -c 'import agentex' diff --git a/scripts/mock b/scripts/mock new file mode 100755 index 00000000..d2814ae6 --- /dev/null +++ b/scripts/mock @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +if [[ -n "$1" && "$1" != '--'* ]]; then + URL="$1" + shift +else + URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)" +fi + +# Check if the URL is empty +if [ -z "$URL" ]; then + echo "Error: No OpenAPI spec path/url provided or found in .stats.yml" + exit 1 +fi + +echo "==> Starting mock server with URL ${URL}" + +# Run prism mock on the given spec +if [ "$1" == "--daemon" ]; then + npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" &> .prism.log & + + # Wait for server to come online + echo -n "Waiting for server" + while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do + echo -n "." + sleep 0.1 + done + + if grep -q "✖ fatal" ".prism.log"; then + cat .prism.log + exit 1 + fi + + echo +else + npm exec --package=@stainless-api/prism-cli@5.8.5 -- prism mock "$URL" +fi diff --git a/scripts/test b/scripts/test new file mode 100755 index 00000000..2b878456 --- /dev/null +++ b/scripts/test @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +set -e + +cd "$(dirname "$0")/.." + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[0;33m' +NC='\033[0m' # No Color + +function prism_is_running() { + curl --silent "http://localhost:4010" >/dev/null 2>&1 +} + +kill_server_on_port() { + pids=$(lsof -t -i tcp:"$1" || echo "") + if [ "$pids" != "" ]; then + kill "$pids" + echo "Stopped $pids." + fi +} + +function is_overriding_api_base_url() { + [ -n "$TEST_API_BASE_URL" ] +} + +if ! is_overriding_api_base_url && ! prism_is_running ; then + # When we exit this script, make sure to kill the background mock server process + trap 'kill_server_on_port 4010' EXIT + + # Start the dev server + ./scripts/mock --daemon +fi + +if is_overriding_api_base_url ; then + echo -e "${GREEN}✔ Running tests against ${TEST_API_BASE_URL}${NC}" + echo +elif ! prism_is_running ; then + echo -e "${RED}ERROR:${NC} The test suite will not run without a mock Prism server" + echo -e "running against your OpenAPI spec." + echo + echo -e "To run the server, pass in the path or url of your OpenAPI" + echo -e "spec to the prism command:" + echo + echo -e " \$ ${YELLOW}npm exec --package=@stoplight/prism-cli@~5.3.2 -- prism mock path/to/your.openapi.yml${NC}" + echo + + exit 1 +else + echo -e "${GREEN}✔ Mock prism server is running with your OpenAPI spec${NC}" + echo +fi + +export DEFER_PYDANTIC_BUILD=false + +echo "==> Running tests" +rye run pytest "$@" + +echo "==> Running Pydantic v1 tests" +rye run nox -s test-pydantic-v1 -- "$@" diff --git a/scripts/utils/ruffen-docs.py b/scripts/utils/ruffen-docs.py new file mode 100644 index 00000000..0cf2bd2f --- /dev/null +++ b/scripts/utils/ruffen-docs.py @@ -0,0 +1,167 @@ +# fork of https://github.com/asottile/blacken-docs adapted for ruff +from __future__ import annotations + +import re +import sys +import argparse +import textwrap +import contextlib +import subprocess +from typing import Match, Optional, Sequence, Generator, NamedTuple, cast + +MD_RE = re.compile( + r"(?P^(?P *)```\s*python\n)" r"(?P.*?)" r"(?P^(?P=indent)```\s*$)", + re.DOTALL | re.MULTILINE, +) +MD_PYCON_RE = re.compile( + r"(?P^(?P *)```\s*pycon\n)" r"(?P.*?)" r"(?P^(?P=indent)```.*$)", + re.DOTALL | re.MULTILINE, +) +PYCON_PREFIX = ">>> " +PYCON_CONTINUATION_PREFIX = "..." +PYCON_CONTINUATION_RE = re.compile( + rf"^{re.escape(PYCON_CONTINUATION_PREFIX)}( |$)", +) +DEFAULT_LINE_LENGTH = 100 + + +class CodeBlockError(NamedTuple): + offset: int + exc: Exception + + +def format_str( + src: str, +) -> tuple[str, Sequence[CodeBlockError]]: + errors: list[CodeBlockError] = [] + + @contextlib.contextmanager + def _collect_error(match: Match[str]) -> Generator[None, None, None]: + try: + yield + except Exception as e: + errors.append(CodeBlockError(match.start(), e)) + + def _md_match(match: Match[str]) -> str: + code = textwrap.dedent(match["code"]) + with _collect_error(match): + code = format_code_block(code) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + def _pycon_match(match: Match[str]) -> str: + code = "" + fragment = cast(Optional[str], None) + + def finish_fragment() -> None: + nonlocal code + nonlocal fragment + + if fragment is not None: + with _collect_error(match): + fragment = format_code_block(fragment) + fragment_lines = fragment.splitlines() + code += f"{PYCON_PREFIX}{fragment_lines[0]}\n" + for line in fragment_lines[1:]: + # Skip blank lines to handle Black adding a blank above + # functions within blocks. A blank line would end the REPL + # continuation prompt. + # + # >>> if True: + # ... def f(): + # ... pass + # ... + if line: + code += f"{PYCON_CONTINUATION_PREFIX} {line}\n" + if fragment_lines[-1].startswith(" "): + code += f"{PYCON_CONTINUATION_PREFIX}\n" + fragment = None + + indentation = None + for line in match["code"].splitlines(): + orig_line, line = line, line.lstrip() + if indentation is None and line: + indentation = len(orig_line) - len(line) + continuation_match = PYCON_CONTINUATION_RE.match(line) + if continuation_match and fragment is not None: + fragment += line[continuation_match.end() :] + "\n" + else: + finish_fragment() + if line.startswith(PYCON_PREFIX): + fragment = line[len(PYCON_PREFIX) :] + "\n" + else: + code += orig_line[indentation:] + "\n" + finish_fragment() + return code + + def _md_pycon_match(match: Match[str]) -> str: + code = _pycon_match(match) + code = textwrap.indent(code, match["indent"]) + return f"{match['before']}{code}{match['after']}" + + src = MD_RE.sub(_md_match, src) + src = MD_PYCON_RE.sub(_md_pycon_match, src) + return src, errors + + +def format_code_block(code: str) -> str: + return subprocess.check_output( + [ + sys.executable, + "-m", + "ruff", + "format", + "--stdin-filename=script.py", + f"--line-length={DEFAULT_LINE_LENGTH}", + ], + encoding="utf-8", + input=code, + ) + + +def format_file( + filename: str, + skip_errors: bool, +) -> int: + with open(filename, encoding="UTF-8") as f: + contents = f.read() + new_contents, errors = format_str(contents) + for error in errors: + lineno = contents[: error.offset].count("\n") + 1 + print(f"{filename}:{lineno}: code block parse error {error.exc}") + if errors and not skip_errors: + return 1 + if contents != new_contents: + print(f"{filename}: Rewriting...") + with open(filename, "w", encoding="UTF-8") as f: + f.write(new_contents) + return 0 + else: + return 0 + + +def main(argv: Sequence[str] | None = None) -> int: + parser = argparse.ArgumentParser() + parser.add_argument( + "-l", + "--line-length", + type=int, + default=DEFAULT_LINE_LENGTH, + ) + parser.add_argument( + "-S", + "--skip-string-normalization", + action="store_true", + ) + parser.add_argument("-E", "--skip-errors", action="store_true") + parser.add_argument("filenames", nargs="*") + args = parser.parse_args(argv) + + retv = 0 + for filename in args.filenames: + retv |= format_file(filename, skip_errors=args.skip_errors) + return retv + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh new file mode 100755 index 00000000..e766fabe --- /dev/null +++ b/scripts/utils/upload-artifact.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +set -exuo pipefail + +FILENAME=$(basename dist/*.whl) + +RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ + -H "Authorization: Bearer $AUTH" \ + -H "Content-Type: application/json") + +SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url') + +if [[ "$SIGNED_URL" == "null" ]]; then + echo -e "\033[31mFailed to get signed URL.\033[0m" + exit 1 +fi + +UPLOAD_RESPONSE=$(curl -v -X PUT \ + -H "Content-Type: binary/octet-stream" \ + --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) + +if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then + echo -e "\033[32mUploaded build to Stainless storage.\033[0m" + echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/agentex-sdk-python/$SHA/$FILENAME'\033[0m" +else + echo -e "\033[31mFailed to upload artifact.\033[0m" + exit 1 +fi diff --git a/src/agentex/__init__.py b/src/agentex/__init__.py new file mode 100644 index 00000000..891c638e --- /dev/null +++ b/src/agentex/__init__.py @@ -0,0 +1,90 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import typing as _t + +from . import types +from ._types import NOT_GIVEN, Omit, NoneType, NotGiven, Transport, ProxiesTypes +from ._utils import file_from_path +from ._client import Client, Stream, Agentex, Timeout, Transport, AsyncClient, AsyncStream, AsyncAgentex, RequestOptions +from ._models import BaseModel +from ._version import __title__, __version__ +from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse +from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS +from ._exceptions import ( + APIError, + AgentexError, + ConflictError, + NotFoundError, + APIStatusError, + RateLimitError, + APITimeoutError, + BadRequestError, + APIConnectionError, + AuthenticationError, + InternalServerError, + PermissionDeniedError, + UnprocessableEntityError, + APIResponseValidationError, +) +from ._base_client import DefaultHttpxClient, DefaultAioHttpClient, DefaultAsyncHttpxClient +from ._utils._logs import setup_logging as _setup_logging + +__all__ = [ + "types", + "__version__", + "__title__", + "NoneType", + "Transport", + "ProxiesTypes", + "NotGiven", + "NOT_GIVEN", + "Omit", + "AgentexError", + "APIError", + "APIStatusError", + "APITimeoutError", + "APIConnectionError", + "APIResponseValidationError", + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", + "Timeout", + "RequestOptions", + "Client", + "AsyncClient", + "Stream", + "AsyncStream", + "Agentex", + "AsyncAgentex", + "file_from_path", + "BaseModel", + "DEFAULT_TIMEOUT", + "DEFAULT_MAX_RETRIES", + "DEFAULT_CONNECTION_LIMITS", + "DefaultHttpxClient", + "DefaultAsyncHttpxClient", + "DefaultAioHttpClient", +] + +if not _t.TYPE_CHECKING: + from ._utils._resources_proxy import resources as resources + +_setup_logging() + +# Update the __module__ attribute for exported symbols so that +# error messages point to this module instead of the module +# it was originally defined in, e.g. +# agentex._exceptions.NotFoundError -> agentex.NotFoundError +__locals = locals() +for __name in __all__: + if not __name.startswith("__"): + try: + __locals[__name].__module__ = "agentex" + except (TypeError, AttributeError): + # Some of our exported symbols are builtins which we can't set attributes for. + pass diff --git a/src/agentex/_base_client.py b/src/agentex/_base_client.py new file mode 100644 index 00000000..0cc1fbb7 --- /dev/null +++ b/src/agentex/_base_client.py @@ -0,0 +1,1992 @@ +from __future__ import annotations + +import sys +import json +import time +import uuid +import email +import asyncio +import inspect +import logging +import platform +import email.utils +from types import TracebackType +from random import random +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Type, + Union, + Generic, + Mapping, + TypeVar, + Iterable, + Iterator, + Optional, + Generator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Literal, override, get_origin + +import anyio +import httpx +import distro +import pydantic +from httpx import URL +from pydantic import PrivateAttr + +from . import _exceptions +from ._qs import Querystring +from ._files import to_httpx_files, async_to_httpx_files +from ._types import ( + NOT_GIVEN, + Body, + Omit, + Query, + Headers, + Timeout, + NotGiven, + ResponseT, + AnyMapping, + PostParser, + RequestFiles, + HttpxSendArgs, + RequestOptions, + HttpxRequestFiles, + ModelBuilderProtocol, +) +from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping +from ._compat import PYDANTIC_V2, model_copy, model_dump +from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type +from ._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + extract_response_type, +) +from ._constants import ( + DEFAULT_TIMEOUT, + MAX_RETRY_DELAY, + DEFAULT_MAX_RETRIES, + INITIAL_RETRY_DELAY, + RAW_RESPONSE_HEADER, + OVERRIDE_CAST_TO_HEADER, + DEFAULT_CONNECTION_LIMITS, +) +from ._streaming import Stream, SSEDecoder, AsyncStream, SSEBytesDecoder +from ._exceptions import ( + APIStatusError, + APITimeoutError, + APIConnectionError, + APIResponseValidationError, +) + +log: logging.Logger = logging.getLogger(__name__) + +# TODO: make base page type vars covariant +SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") +AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") + + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +_StreamT = TypeVar("_StreamT", bound=Stream[Any]) +_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) + +if TYPE_CHECKING: + from httpx._config import ( + DEFAULT_TIMEOUT_CONFIG, # pyright: ignore[reportPrivateImportUsage] + ) + + HTTPX_DEFAULT_TIMEOUT = DEFAULT_TIMEOUT_CONFIG +else: + try: + from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT + except ImportError: + # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 + HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) + + +class PageInfo: + """Stores the necessary information to build the request to retrieve the next page. + + Either `url` or `params` must be set. + """ + + url: URL | NotGiven + params: Query | NotGiven + json: Body | NotGiven + + @overload + def __init__( + self, + *, + url: URL, + ) -> None: ... + + @overload + def __init__( + self, + *, + params: Query, + ) -> None: ... + + @overload + def __init__( + self, + *, + json: Body, + ) -> None: ... + + def __init__( + self, + *, + url: URL | NotGiven = NOT_GIVEN, + json: Body | NotGiven = NOT_GIVEN, + params: Query | NotGiven = NOT_GIVEN, + ) -> None: + self.url = url + self.json = json + self.params = params + + @override + def __repr__(self) -> str: + if self.url: + return f"{self.__class__.__name__}(url={self.url})" + if self.json: + return f"{self.__class__.__name__}(json={self.json})" + return f"{self.__class__.__name__}(params={self.params})" + + +class BasePage(GenericModel, Generic[_T]): + """ + Defines the core interface for pagination. + + Type Args: + ModelT: The pydantic model that represents an item in the response. + + Methods: + has_next_page(): Check if there is another page available + next_page_info(): Get the necessary information to make a request for the next page + """ + + _options: FinalRequestOptions = PrivateAttr() + _model: Type[_T] = PrivateAttr() + + def has_next_page(self) -> bool: + items = self._get_page_items() + if not items: + return False + return self.next_page_info() is not None + + def next_page_info(self) -> Optional[PageInfo]: ... + + def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] + ... + + def _params_from_url(self, url: URL) -> httpx.QueryParams: + # TODO: do we have to preprocess params here? + return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) + + def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: + options = model_copy(self._options) + options._strip_raw_response_header() + + if not isinstance(info.params, NotGiven): + options.params = {**options.params, **info.params} + return options + + if not isinstance(info.url, NotGiven): + params = self._params_from_url(info.url) + url = info.url.copy_with(params=params) + options.params = dict(url.params) + options.url = str(url) + return options + + if not isinstance(info.json, NotGiven): + if not is_mapping(info.json): + raise TypeError("Pagination is only supported with mappings") + + if not options.json_data: + options.json_data = {**info.json} + else: + if not is_mapping(options.json_data): + raise TypeError("Pagination is only supported with mappings") + + options.json_data = {**options.json_data, **info.json} + return options + + raise ValueError("Unexpected PageInfo state") + + +class BaseSyncPage(BasePage[_T], Generic[_T]): + _client: SyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + client: SyncAPIClient, + model: Type[_T], + options: FinalRequestOptions, + ) -> None: + if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + # Pydantic uses a custom `__iter__` method to support casting BaseModels + # to dictionaries. e.g. dict(model). + # As we want to support `for item in page`, this is inherently incompatible + # with the default pydantic behaviour. It is not possible to support both + # use cases at once. Fortunately, this is not a big deal as all other pydantic + # methods should continue to work as expected as there is an alternative method + # to cast a model to a dictionary, model.dict(), which is used internally + # by pydantic. + def __iter__(self) -> Iterator[_T]: # type: ignore + for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = page.get_next_page() + else: + return + + def get_next_page(self: SyncPageT) -> SyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return self._client._request_api_list(self._model, page=self.__class__, options=options) + + +class AsyncPaginator(Generic[_T, AsyncPageT]): + def __init__( + self, + client: AsyncAPIClient, + options: FinalRequestOptions, + page_cls: Type[AsyncPageT], + model: Type[_T], + ) -> None: + self._model = model + self._client = client + self._options = options + self._page_cls = page_cls + + def __await__(self) -> Generator[Any, None, AsyncPageT]: + return self._get_page().__await__() + + async def _get_page(self) -> AsyncPageT: + def _parser(resp: AsyncPageT) -> AsyncPageT: + resp._set_private_attributes( + model=self._model, + options=self._options, + client=self._client, + ) + return resp + + self._options.post_parser = _parser + + return await self._client.request(self._page_cls, self._options) + + async def __aiter__(self) -> AsyncIterator[_T]: + # https://github.com/microsoft/pyright/issues/3464 + page = cast( + AsyncPageT, + await self, # type: ignore + ) + async for item in page: + yield item + + +class BaseAsyncPage(BasePage[_T], Generic[_T]): + _client: AsyncAPIClient = pydantic.PrivateAttr() + + def _set_private_attributes( + self, + model: Type[_T], + client: AsyncAPIClient, + options: FinalRequestOptions, + ) -> None: + if PYDANTIC_V2 and getattr(self, "__pydantic_private__", None) is None: + self.__pydantic_private__ = {} + + self._model = model + self._client = client + self._options = options + + async def __aiter__(self) -> AsyncIterator[_T]: + async for page in self.iter_pages(): + for item in page._get_page_items(): + yield item + + async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]: + page = self + while True: + yield page + if page.has_next_page(): + page = await page.get_next_page() + else: + return + + async def get_next_page(self: AsyncPageT) -> AsyncPageT: + info = self.next_page_info() + if not info: + raise RuntimeError( + "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." + ) + + options = self._info_to_options(info) + return await self._client._request_api_list(self._model, page=self.__class__, options=options) + + +_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) +_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) + + +class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): + _client: _HttpxClientT + _version: str + _base_url: URL + max_retries: int + timeout: Union[float, Timeout, None] + _strict_response_validation: bool + _idempotency_header: str | None + _default_stream_cls: type[_DefaultStreamT] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None = DEFAULT_TIMEOUT, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + self._version = version + self._base_url = self._enforce_trailing_slash(URL(base_url)) + self.max_retries = max_retries + self.timeout = timeout + self._custom_headers = custom_headers or {} + self._custom_query = custom_query or {} + self._strict_response_validation = _strict_response_validation + self._idempotency_header = None + self._platform: Platform | None = None + + if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] + raise TypeError( + "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `agentex.DEFAULT_MAX_RETRIES`" + ) + + def _enforce_trailing_slash(self, url: URL) -> URL: + if url.raw_path.endswith(b"/"): + return url + return url.copy_with(raw_path=url.raw_path + b"/") + + def _make_status_error_from_response( + self, + response: httpx.Response, + ) -> APIStatusError: + if response.is_closed and not response.is_stream_consumed: + # We can't read the response body as it has been closed + # before it was read. This can happen if an event hook + # raises a status error. + body = None + err_msg = f"Error code: {response.status_code}" + else: + err_text = response.text.strip() + body = err_text + + try: + body = json.loads(err_text) + err_msg = f"Error code: {response.status_code} - {body}" + except Exception: + err_msg = err_text or f"Error code: {response.status_code}" + + return self._make_status_error(err_msg, body=body, response=response) + + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> _exceptions.APIStatusError: + raise NotImplementedError() + + def _build_headers(self, options: FinalRequestOptions, *, retries_taken: int = 0) -> httpx.Headers: + custom_headers = options.headers or {} + headers_dict = _merge_mappings(self.default_headers, custom_headers) + self._validate_headers(headers_dict, custom_headers) + + # headers are case-insensitive while dictionaries are not. + headers = httpx.Headers(headers_dict) + + idempotency_header = self._idempotency_header + if idempotency_header and options.idempotency_key and idempotency_header not in headers: + headers[idempotency_header] = options.idempotency_key + + # Don't set these headers if they were already set or removed by the caller. We check + # `custom_headers`, which can contain `Omit()`, instead of `headers` to account for the removal case. + lower_custom_headers = [header.lower() for header in custom_headers] + if "x-stainless-retry-count" not in lower_custom_headers: + headers["x-stainless-retry-count"] = str(retries_taken) + if "x-stainless-read-timeout" not in lower_custom_headers: + timeout = self.timeout if isinstance(options.timeout, NotGiven) else options.timeout + if isinstance(timeout, Timeout): + timeout = timeout.read + if timeout is not None: + headers["x-stainless-read-timeout"] = str(timeout) + + return headers + + def _prepare_url(self, url: str) -> URL: + """ + Merge a URL argument together with any 'base_url' on the client, + to create the URL used for the outgoing request. + """ + # Copied from httpx's `_merge_url` method. + merge_url = URL(url) + if merge_url.is_relative_url: + merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") + return self.base_url.copy_with(raw_path=merge_raw_path) + + return merge_url + + def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: + return SSEDecoder() + + def _build_request( + self, + options: FinalRequestOptions, + *, + retries_taken: int = 0, + ) -> httpx.Request: + if log.isEnabledFor(logging.DEBUG): + log.debug("Request options: %s", model_dump(options, exclude_unset=True)) + + kwargs: dict[str, Any] = {} + + json_data = options.json_data + if options.extra_json is not None: + if json_data is None: + json_data = cast(Body, options.extra_json) + elif is_mapping(json_data): + json_data = _merge_mappings(json_data, options.extra_json) + else: + raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") + + headers = self._build_headers(options, retries_taken=retries_taken) + params = _merge_mappings(self.default_query, options.params) + content_type = headers.get("Content-Type") + files = options.files + + # If the given Content-Type header is multipart/form-data then it + # has to be removed so that httpx can generate the header with + # additional information for us as it has to be in this form + # for the server to be able to correctly parse the request: + # multipart/form-data; boundary=---abc-- + if content_type is not None and content_type.startswith("multipart/form-data"): + if "boundary" not in content_type: + # only remove the header if the boundary hasn't been explicitly set + # as the caller doesn't want httpx to come up with their own boundary + headers.pop("Content-Type") + + # As we are now sending multipart/form-data instead of application/json + # we need to tell httpx to use it, https://www.python-httpx.org/advanced/clients/#multipart-file-encoding + if json_data: + if not is_dict(json_data): + raise TypeError( + f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead." + ) + kwargs["data"] = self._serialize_multipartform(json_data) + + # httpx determines whether or not to send a "multipart/form-data" + # request based on the truthiness of the "files" argument. + # This gets around that issue by generating a dict value that + # evaluates to true. + # + # https://github.com/encode/httpx/discussions/2399#discussioncomment-3814186 + if not files: + files = cast(HttpxRequestFiles, ForceMultipartDict()) + + prepared_url = self._prepare_url(options.url) + if "_" in prepared_url.host: + # work around https://github.com/encode/httpx/discussions/2880 + kwargs["extensions"] = {"sni_hostname": prepared_url.host.replace("_", "-")} + + is_body_allowed = options.method.lower() != "get" + + if is_body_allowed: + kwargs["json"] = json_data if is_given(json_data) else None + kwargs["files"] = files + else: + headers.pop("Content-Type", None) + kwargs.pop("data", None) + + # TODO: report this error to httpx + return self._client.build_request( # pyright: ignore[reportUnknownMemberType] + headers=headers, + timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, + method=options.method, + url=prepared_url, + # the `Query` type that we use is incompatible with qs' + # `Params` type as it needs to be typed as `Mapping[str, object]` + # so that passing a `TypedDict` doesn't cause an error. + # https://github.com/microsoft/pyright/issues/3526#event-6715453066 + params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, + **kwargs, + ) + + def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: + items = self.qs.stringify_items( + # TODO: type ignore is required as stringify_items is well typed but we can't be + # well typed without heavy validation. + data, # type: ignore + array_format="brackets", + ) + serialized: dict[str, object] = {} + for key, value in items: + existing = serialized.get(key) + + if not existing: + serialized[key] = value + continue + + # If a value has already been set for this key then that + # means we're sending data like `array[]=[1, 2, 3]` and we + # need to tell httpx that we want to send multiple values with + # the same key which is done by using a list or a tuple. + # + # Note: 2d arrays should never result in the same key at both + # levels so it's safe to assume that if the value is a list, + # it was because we changed it to be a list. + if is_list(existing): + existing.append(value) + else: + serialized[key] = [existing, value] + + return serialized + + def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalRequestOptions) -> type[ResponseT]: + if not is_given(options.headers): + return cast_to + + # make a copy of the headers so we don't mutate user-input + headers = dict(options.headers) + + # we internally support defining a temporary header to override the + # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` + # see _response.py for implementation details + override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN) + if is_given(override_cast_to): + options.headers = headers + return cast(Type[ResponseT], override_cast_to) + + return cast_to + + def _should_stream_response_body(self, request: httpx.Request) -> bool: + return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] + + def _process_response_data( + self, + *, + data: object, + cast_to: type[ResponseT], + response: httpx.Response, + ) -> ResponseT: + if data is None: + return cast(ResponseT, None) + + if cast_to is object: + return cast(ResponseT, data) + + try: + if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol): + return cast(ResponseT, cast_to.build(response=response, data=data)) + + if self._strict_response_validation: + return cast(ResponseT, validate_type(type_=cast_to, value=data)) + + return cast(ResponseT, construct_type(type_=cast_to, value=data)) + except pydantic.ValidationError as err: + raise APIResponseValidationError(response=response, body=data) from err + + @property + def qs(self) -> Querystring: + return Querystring() + + @property + def custom_auth(self) -> httpx.Auth | None: + return None + + @property + def auth_headers(self) -> dict[str, str]: + return {} + + @property + def default_headers(self) -> dict[str, str | Omit]: + return { + "Accept": "application/json", + "Content-Type": "application/json", + "User-Agent": self.user_agent, + **self.platform_headers(), + **self.auth_headers, + **self._custom_headers, + } + + @property + def default_query(self) -> dict[str, object]: + return { + **self._custom_query, + } + + def _validate_headers( + self, + headers: Headers, # noqa: ARG002 + custom_headers: Headers, # noqa: ARG002 + ) -> None: + """Validate the given default headers and custom headers. + + Does nothing by default. + """ + return + + @property + def user_agent(self) -> str: + return f"{self.__class__.__name__}/Python {self._version}" + + @property + def base_url(self) -> URL: + return self._base_url + + @base_url.setter + def base_url(self, url: URL | str) -> None: + self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) + + def platform_headers(self) -> Dict[str, str]: + # the actual implementation is in a separate `lru_cache` decorated + # function because adding `lru_cache` to methods will leak memory + # https://github.com/python/cpython/issues/88476 + return platform_headers(self._version, platform=self._platform) + + def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: + """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. + + About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax + """ + if response_headers is None: + return None + + # First, try the non-standard `retry-after-ms` header for milliseconds, + # which is more precise than integer-seconds `retry-after` + try: + retry_ms_header = response_headers.get("retry-after-ms", None) + return float(retry_ms_header) / 1000 + except (TypeError, ValueError): + pass + + # Next, try parsing `retry-after` header as seconds (allowing nonstandard floats). + retry_header = response_headers.get("retry-after") + try: + # note: the spec indicates that this should only ever be an integer + # but if someone sends a float there's no reason for us to not respect it + return float(retry_header) + except (TypeError, ValueError): + pass + + # Last, try parsing `retry-after` as a date. + retry_date_tuple = email.utils.parsedate_tz(retry_header) + if retry_date_tuple is None: + return None + + retry_date = email.utils.mktime_tz(retry_date_tuple) + return float(retry_date - time.time()) + + def _calculate_retry_timeout( + self, + remaining_retries: int, + options: FinalRequestOptions, + response_headers: Optional[httpx.Headers] = None, + ) -> float: + max_retries = options.get_max_retries(self.max_retries) + + # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. + retry_after = self._parse_retry_after_header(response_headers) + if retry_after is not None and 0 < retry_after <= 60: + return retry_after + + # Also cap retry count to 1000 to avoid any potential overflows with `pow` + nb_retries = min(max_retries - remaining_retries, 1000) + + # Apply exponential backoff, but not more than the max. + sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) + + # Apply some jitter, plus-or-minus half a second. + jitter = 1 - 0.25 * random() + timeout = sleep_seconds * jitter + return timeout if timeout >= 0 else 0 + + def _should_retry(self, response: httpx.Response) -> bool: + # Note: this is not a standard header + should_retry_header = response.headers.get("x-should-retry") + + # If the server explicitly says whether or not to retry, obey. + if should_retry_header == "true": + log.debug("Retrying as header `x-should-retry` is set to `true`") + return True + if should_retry_header == "false": + log.debug("Not retrying as header `x-should-retry` is set to `false`") + return False + + # Retry on request timeouts. + if response.status_code == 408: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on lock timeouts. + if response.status_code == 409: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry on rate limits. + if response.status_code == 429: + log.debug("Retrying due to status code %i", response.status_code) + return True + + # Retry internal errors. + if response.status_code >= 500: + log.debug("Retrying due to status code %i", response.status_code) + return True + + log.debug("Not retrying") + return False + + def _idempotency_key(self) -> str: + return f"stainless-python-retry-{uuid.uuid4()}" + + +class _DefaultHttpxClient(httpx.Client): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultHttpxClient = httpx.Client + """An alias to `httpx.Client` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.Client` will result in httpx's defaults being used, not ours. + """ +else: + DefaultHttpxClient = _DefaultHttpxClient + + +class SyncHttpxClientWrapper(DefaultHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + self.close() + except Exception: + pass + + +class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): + _client: httpx.Client + _default_stream_cls: type[Stream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.Client | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + _strict_response_validation: bool, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.Client): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.Client` but got {type(http_client)}" + ) + + super().__init__( + version=version, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + base_url=base_url, + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or SyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + # If an error is thrown while constructing a client, self._client + # may not be present + if hasattr(self, "_client"): + self._client.close() + + def __enter__(self: _T) -> _T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: Type[_StreamT], + ) -> _StreamT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: Type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + err.response.close() + self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + err.response.read() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + time.sleep(timeout) + + def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, APIResponse): + raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + ResponseT, + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = APIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return api_response.parse() + + def _request_api_list( + self, + model: Type[object], + page: Type[SyncPageT], + options: FinalRequestOptions, + ) -> SyncPageT: + def _parser(resp: SyncPageT) -> SyncPageT: + resp._set_private_attributes( + client=self, + model=model, + options=options, + ) + return resp + + options.post_parser = _parser + + return self.request(page, options, stream=False) + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + # cast is required because mypy complains about returning Any even though + # it understands the type variables + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: Literal[True], + stream_cls: type[_StreamT], + ) -> _StreamT: ... + + @overload + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: ... + + def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + files: RequestFiles | None = None, + stream: bool = False, + stream_cls: type[_StreamT] | None = None, + ) -> ResponseT | _StreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) + + def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=to_httpx_files(files), **options + ) + return self.request(cast_to, opts) + + def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[object], + page: Type[SyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> SyncPageT: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +class _DefaultAsyncHttpxClient(httpx.AsyncClient): + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + super().__init__(**kwargs) + + +try: + import httpx_aiohttp +except ImportError: + + class _DefaultAioHttpClient(httpx.AsyncClient): + def __init__(self, **_kwargs: Any) -> None: + raise RuntimeError("To use the aiohttp client you must have installed the package with the `aiohttp` extra") +else: + + class _DefaultAioHttpClient(httpx_aiohttp.HttpxAiohttpClient): # type: ignore + def __init__(self, **kwargs: Any) -> None: + kwargs.setdefault("timeout", DEFAULT_TIMEOUT) + kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) + kwargs.setdefault("follow_redirects", True) + + super().__init__(**kwargs) + + +if TYPE_CHECKING: + DefaultAsyncHttpxClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK + uses internally. + + This is useful because overriding the `http_client` with your own instance of + `httpx.AsyncClient` will result in httpx's defaults being used, not ours. + """ + + DefaultAioHttpClient = httpx.AsyncClient + """An alias to `httpx.AsyncClient` that changes the default HTTP transport to `aiohttp`.""" +else: + DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient + DefaultAioHttpClient = _DefaultAioHttpClient + + +class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): + def __del__(self) -> None: + if self.is_closed: + return + + try: + # TODO(someday): support non asyncio runtimes here + asyncio.get_running_loop().create_task(self.aclose()) + except Exception: + pass + + +class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): + _client: httpx.AsyncClient + _default_stream_cls: type[AsyncStream[Any]] | None = None + + def __init__( + self, + *, + version: str, + base_url: str | URL, + _strict_response_validation: bool, + max_retries: int = DEFAULT_MAX_RETRIES, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.AsyncClient | None = None, + custom_headers: Mapping[str, str] | None = None, + custom_query: Mapping[str, object] | None = None, + ) -> None: + if not is_given(timeout): + # if the user passed in a custom http client with a non-default + # timeout set then we use that timeout. + # + # note: there is an edge case here where the user passes in a client + # where they've explicitly set the timeout to match the default timeout + # as this check is structural, meaning that we'll think they didn't + # pass in a timeout and will ignore it + if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: + timeout = http_client.timeout + else: + timeout = DEFAULT_TIMEOUT + + if http_client is not None and not isinstance(http_client, httpx.AsyncClient): # pyright: ignore[reportUnnecessaryIsInstance] + raise TypeError( + f"Invalid `http_client` argument; Expected an instance of `httpx.AsyncClient` but got {type(http_client)}" + ) + + super().__init__( + version=version, + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + max_retries=max_retries, + custom_query=custom_query, + custom_headers=custom_headers, + _strict_response_validation=_strict_response_validation, + ) + self._client = http_client or AsyncHttpxClientWrapper( + base_url=base_url, + # cast to a valid type because mypy doesn't understand our type narrowing + timeout=cast(Timeout, timeout), + ) + + def is_closed(self) -> bool: + return self._client.is_closed + + async def close(self) -> None: + """Close the underlying HTTPX client. + + The client will *not* be usable after this. + """ + await self._client.aclose() + + async def __aenter__(self: _T) -> _T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def _prepare_options( + self, + options: FinalRequestOptions, # noqa: ARG002 + ) -> FinalRequestOptions: + """Hook for mutating the given options""" + return options + + async def _prepare_request( + self, + request: httpx.Request, # noqa: ARG002 + ) -> None: + """This method is used as a callback for mutating the `Request` object + after it has been constructed. + This is useful for cases where you want to add certain headers based off of + the request properties, e.g. `url`, `method` etc. + """ + return None + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def request( + self, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + *, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + if self._platform is None: + # `get_platform` can make blocking IO calls so we + # execute it earlier while we are in an async context + self._platform = await asyncify(get_platform)() + + cast_to = self._maybe_override_cast_to(cast_to, options) + + # create a copy of the options we were given so that if the + # options are mutated later & we then retry, the retries are + # given the original options + input_options = model_copy(options) + if input_options.idempotency_key is None and input_options.method.lower() != "get": + # ensure the idempotency key is reused between requests + input_options.idempotency_key = self._idempotency_key() + + response: httpx.Response | None = None + max_retries = input_options.get_max_retries(self.max_retries) + + retries_taken = 0 + for retries_taken in range(max_retries + 1): + options = model_copy(input_options) + options = await self._prepare_options(options) + + remaining_retries = max_retries - retries_taken + request = self._build_request(options, retries_taken=retries_taken) + await self._prepare_request(request) + + kwargs: HttpxSendArgs = {} + if self.custom_auth is not None: + kwargs["auth"] = self.custom_auth + + if options.follow_redirects is not None: + kwargs["follow_redirects"] = options.follow_redirects + + log.debug("Sending HTTP Request: %s %s", request.method, request.url) + + response = None + try: + response = await self._client.send( + request, + stream=stream or self._should_stream_response_body(request=request), + **kwargs, + ) + except httpx.TimeoutException as err: + log.debug("Encountered httpx.TimeoutException", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising timeout error") + raise APITimeoutError(request=request) from err + except Exception as err: + log.debug("Encountered Exception", exc_info=True) + + if remaining_retries > 0: + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=None, + ) + continue + + log.debug("Raising connection error") + raise APIConnectionError(request=request) from err + + log.debug( + 'HTTP Response: %s %s "%i %s" %s', + request.method, + request.url, + response.status_code, + response.reason_phrase, + response.headers, + ) + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code + log.debug("Encountered httpx.HTTPStatusError", exc_info=True) + + if remaining_retries > 0 and self._should_retry(err.response): + await err.response.aclose() + await self._sleep_for_retry( + retries_taken=retries_taken, + max_retries=max_retries, + options=input_options, + response=response, + ) + continue + + # If the response is streamed then we need to explicitly read the response + # to completion before attempting to access the response text. + if not err.response.is_closed: + await err.response.aread() + + log.debug("Re-raising status error") + raise self._make_status_error_from_response(err.response) from None + + break + + assert response is not None, "could not resolve response (should never happen)" + return await self._process_response( + cast_to=cast_to, + options=options, + response=response, + stream=stream, + stream_cls=stream_cls, + retries_taken=retries_taken, + ) + + async def _sleep_for_retry( + self, *, retries_taken: int, max_retries: int, options: FinalRequestOptions, response: httpx.Response | None + ) -> None: + remaining_retries = max_retries - retries_taken + if remaining_retries == 1: + log.debug("1 retry left") + else: + log.debug("%i retries left", remaining_retries) + + timeout = self._calculate_retry_timeout(remaining_retries, options, response.headers if response else None) + log.info("Retrying request to %s in %f seconds", options.url, timeout) + + await anyio.sleep(timeout) + + async def _process_response( + self, + *, + cast_to: Type[ResponseT], + options: FinalRequestOptions, + response: httpx.Response, + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + retries_taken: int = 0, + ) -> ResponseT: + origin = get_origin(cast_to) or cast_to + + if ( + inspect.isclass(origin) + and issubclass(origin, BaseAPIResponse) + # we only want to actually return the custom BaseAPIResponse class if we're + # returning the raw response, or if we're not streaming SSE, as if we're streaming + # SSE then `cast_to` doesn't actively reflect the type we need to parse into + and (not stream or bool(response.request.headers.get(RAW_RESPONSE_HEADER))) + ): + if not issubclass(origin, AsyncAPIResponse): + raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") + + response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) + return cast( + "ResponseT", + response_cls( + raw=response, + client=self, + cast_to=extract_response_type(response_cls), + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ), + ) + + if cast_to == httpx.Response: + return cast(ResponseT, response) + + api_response = AsyncAPIResponse( + raw=response, + client=self, + cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] + stream=stream, + stream_cls=stream_cls, + options=options, + retries_taken=retries_taken, + ) + if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): + return cast(ResponseT, api_response) + + return await api_response.parse() + + def _request_api_list( + self, + model: Type[_T], + page: Type[AsyncPageT], + options: FinalRequestOptions, + ) -> AsyncPaginator[_T, AsyncPageT]: + return AsyncPaginator(client=self, options=options, page_cls=page, model=model) + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def get( + self, + path: str, + *, + cast_to: Type[ResponseT], + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct(method="get", url=path, **options) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[False] = False, + ) -> ResponseT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: Literal[True], + stream_cls: type[_AsyncStreamT], + ) -> _AsyncStreamT: ... + + @overload + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: ... + + async def post( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + stream: bool = False, + stream_cls: type[_AsyncStreamT] | None = None, + ) -> ResponseT | _AsyncStreamT: + opts = FinalRequestOptions.construct( + method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) + + async def patch( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + async def put( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + files: RequestFiles | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct( + method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options + ) + return await self.request(cast_to, opts) + + async def delete( + self, + path: str, + *, + cast_to: Type[ResponseT], + body: Body | None = None, + options: RequestOptions = {}, + ) -> ResponseT: + opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) + return await self.request(cast_to, opts) + + def get_api_list( + self, + path: str, + *, + model: Type[_T], + page: Type[AsyncPageT], + body: Body | None = None, + options: RequestOptions = {}, + method: str = "get", + ) -> AsyncPaginator[_T, AsyncPageT]: + opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) + return self._request_api_list(model, page, opts) + + +def make_request_options( + *, + query: Query | None = None, + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + idempotency_key: str | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + post_parser: PostParser | NotGiven = NOT_GIVEN, +) -> RequestOptions: + """Create a dict of type RequestOptions without keys of NotGiven values.""" + options: RequestOptions = {} + if extra_headers is not None: + options["headers"] = extra_headers + + if extra_body is not None: + options["extra_json"] = cast(AnyMapping, extra_body) + + if query is not None: + options["params"] = query + + if extra_query is not None: + options["params"] = {**options.get("params", {}), **extra_query} + + if not isinstance(timeout, NotGiven): + options["timeout"] = timeout + + if idempotency_key is not None: + options["idempotency_key"] = idempotency_key + + if is_given(post_parser): + # internal + options["post_parser"] = post_parser # type: ignore + + return options + + +class ForceMultipartDict(Dict[str, None]): + def __bool__(self) -> bool: + return True + + +class OtherPlatform: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"Other:{self.name}" + + +Platform = Union[ + OtherPlatform, + Literal[ + "MacOS", + "Linux", + "Windows", + "FreeBSD", + "OpenBSD", + "iOS", + "Android", + "Unknown", + ], +] + + +def get_platform() -> Platform: + try: + system = platform.system().lower() + platform_name = platform.platform().lower() + except Exception: + return "Unknown" + + if "iphone" in platform_name or "ipad" in platform_name: + # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7 + # system is Darwin and platform_name is a string like: + # - Darwin-21.6.0-iPhone12,1-64bit + # - Darwin-21.6.0-iPad7,11-64bit + return "iOS" + + if system == "darwin": + return "MacOS" + + if system == "windows": + return "Windows" + + if "android" in platform_name: + # Tested using Pydroid 3 + # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc' + return "Android" + + if system == "linux": + # https://distro.readthedocs.io/en/latest/#distro.id + distro_id = distro.id() + if distro_id == "freebsd": + return "FreeBSD" + + if distro_id == "openbsd": + return "OpenBSD" + + return "Linux" + + if platform_name: + return OtherPlatform(platform_name) + + return "Unknown" + + +@lru_cache(maxsize=None) +def platform_headers(version: str, *, platform: Platform | None) -> Dict[str, str]: + return { + "X-Stainless-Lang": "python", + "X-Stainless-Package-Version": version, + "X-Stainless-OS": str(platform or get_platform()), + "X-Stainless-Arch": str(get_architecture()), + "X-Stainless-Runtime": get_python_runtime(), + "X-Stainless-Runtime-Version": get_python_version(), + } + + +class OtherArch: + def __init__(self, name: str) -> None: + self.name = name + + @override + def __str__(self) -> str: + return f"other:{self.name}" + + +Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]] + + +def get_python_runtime() -> str: + try: + return platform.python_implementation() + except Exception: + return "unknown" + + +def get_python_version() -> str: + try: + return platform.python_version() + except Exception: + return "unknown" + + +def get_architecture() -> Arch: + try: + machine = platform.machine().lower() + except Exception: + return "unknown" + + if machine in ("arm64", "aarch64"): + return "arm64" + + # TODO: untested + if machine == "arm": + return "arm" + + if machine == "x86_64": + return "x64" + + # TODO: untested + if sys.maxsize <= 2**32: + return "x32" + + if machine: + return OtherArch(machine) + + return "unknown" + + +def _merge_mappings( + obj1: Mapping[_T_co, Union[_T, Omit]], + obj2: Mapping[_T_co, Union[_T, Omit]], +) -> Dict[_T_co, _T]: + """Merge two mappings of the same type, removing any values that are instances of `Omit`. + + In cases with duplicate keys the second mapping takes precedence. + """ + merged = {**obj1, **obj2} + return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/src/agentex/_client.py b/src/agentex/_client.py new file mode 100644 index 00000000..ee34e357 --- /dev/null +++ b/src/agentex/_client.py @@ -0,0 +1,535 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, Union, Mapping +from typing_extensions import Self, override + +import httpx + +from . import _exceptions +from ._qs import Querystring +from ._types import ( + NOT_GIVEN, + Body, + Omit, + Query, + Headers, + Timeout, + NotGiven, + Transport, + ProxiesTypes, + RequestOptions, +) +from ._utils import is_given, get_async_library +from ._version import __version__ +from ._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .resources import echo, spans, events, states, tracker +from ._streaming import Stream as Stream, AsyncStream as AsyncStream +from ._exceptions import APIStatusError +from ._base_client import ( + DEFAULT_MAX_RETRIES, + SyncAPIClient, + AsyncAPIClient, + make_request_options, +) +from .resources.tasks import tasks +from .resources.agents import agents +from .resources.messages import messages + +__all__ = ["Timeout", "Transport", "ProxiesTypes", "RequestOptions", "Agentex", "AsyncAgentex", "Client", "AsyncClient"] + + +class Agentex(SyncAPIClient): + echo: echo.EchoResource + agents: agents.AgentsResource + tasks: tasks.TasksResource + messages: messages.MessagesResource + spans: spans.SpansResource + states: states.StatesResource + events: events.EventsResource + tracker: tracker.TrackerResource + with_raw_response: AgentexWithRawResponse + with_streaming_response: AgentexWithStreamedResponse + + # client options + api_key: str | None + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details. + http_client: httpx.Client | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new synchronous Agentex client instance. + + This automatically infers the `api_key` argument from the `AGENTEX_SDK_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("AGENTEX_SDK_API_KEY") + self.api_key = api_key + + if base_url is None: + base_url = os.environ.get("AGENTEX_BASE_URL") + if base_url is None: + base_url = f"https://api.example.com" + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self.echo = echo.EchoResource(self) + self.agents = agents.AgentsResource(self) + self.tasks = tasks.TasksResource(self) + self.messages = messages.MessagesResource(self) + self.spans = spans.SpansResource(self) + self.states = states.StatesResource(self) + self.events = events.EventsResource(self) + self.tracker = tracker.TrackerResource(self) + self.with_raw_response = AgentexWithRawResponse(self) + self.with_streaming_response = AgentexWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + if api_key is None: + return {} + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": "false", + **self._custom_headers, + } + + @override + def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: + if self.api_key and headers.get("Authorization"): + return + if isinstance(custom_headers.get("Authorization"), Omit): + return + + raise TypeError( + '"Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted"' + ) + + def copy( + self, + *, + api_key: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.Client | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + def get_root( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """Root""" + return self.get( + "/", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class AsyncAgentex(AsyncAPIClient): + echo: echo.AsyncEchoResource + agents: agents.AsyncAgentsResource + tasks: tasks.AsyncTasksResource + messages: messages.AsyncMessagesResource + spans: spans.AsyncSpansResource + states: states.AsyncStatesResource + events: events.AsyncEventsResource + tracker: tracker.AsyncTrackerResource + with_raw_response: AsyncAgentexWithRawResponse + with_streaming_response: AsyncAgentexWithStreamedResponse + + # client options + api_key: str | None + + def __init__( + self, + *, + api_key: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, + max_retries: int = DEFAULT_MAX_RETRIES, + default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + # Configure a custom httpx client. + # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. + # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details. + http_client: httpx.AsyncClient | None = None, + # Enable or disable schema validation for data returned by the API. + # When enabled an error APIResponseValidationError is raised + # if the API responds with invalid data for the expected schema. + # + # This parameter may be removed or changed in the future. + # If you rely on this feature, please open a GitHub issue + # outlining your use-case to help us decide if it should be + # part of our public interface in the future. + _strict_response_validation: bool = False, + ) -> None: + """Construct a new async AsyncAgentex client instance. + + This automatically infers the `api_key` argument from the `AGENTEX_SDK_API_KEY` environment variable if it is not provided. + """ + if api_key is None: + api_key = os.environ.get("AGENTEX_SDK_API_KEY") + self.api_key = api_key + + if base_url is None: + base_url = os.environ.get("AGENTEX_BASE_URL") + if base_url is None: + base_url = f"https://api.example.com" + + super().__init__( + version=__version__, + base_url=base_url, + max_retries=max_retries, + timeout=timeout, + http_client=http_client, + custom_headers=default_headers, + custom_query=default_query, + _strict_response_validation=_strict_response_validation, + ) + + self.echo = echo.AsyncEchoResource(self) + self.agents = agents.AsyncAgentsResource(self) + self.tasks = tasks.AsyncTasksResource(self) + self.messages = messages.AsyncMessagesResource(self) + self.spans = spans.AsyncSpansResource(self) + self.states = states.AsyncStatesResource(self) + self.events = events.AsyncEventsResource(self) + self.tracker = tracker.AsyncTrackerResource(self) + self.with_raw_response = AsyncAgentexWithRawResponse(self) + self.with_streaming_response = AsyncAgentexWithStreamedResponse(self) + + @property + @override + def qs(self) -> Querystring: + return Querystring(array_format="comma") + + @property + @override + def auth_headers(self) -> dict[str, str]: + api_key = self.api_key + if api_key is None: + return {} + return {"Authorization": f"Bearer {api_key}"} + + @property + @override + def default_headers(self) -> dict[str, str | Omit]: + return { + **super().default_headers, + "X-Stainless-Async": f"async:{get_async_library()}", + **self._custom_headers, + } + + @override + def _validate_headers(self, headers: Headers, custom_headers: Headers) -> None: + if self.api_key and headers.get("Authorization"): + return + if isinstance(custom_headers.get("Authorization"), Omit): + return + + raise TypeError( + '"Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted"' + ) + + def copy( + self, + *, + api_key: str | None = None, + base_url: str | httpx.URL | None = None, + timeout: float | Timeout | None | NotGiven = NOT_GIVEN, + http_client: httpx.AsyncClient | None = None, + max_retries: int | NotGiven = NOT_GIVEN, + default_headers: Mapping[str, str] | None = None, + set_default_headers: Mapping[str, str] | None = None, + default_query: Mapping[str, object] | None = None, + set_default_query: Mapping[str, object] | None = None, + _extra_kwargs: Mapping[str, Any] = {}, + ) -> Self: + """ + Create a new client instance re-using the same options given to the current client with optional overriding. + """ + if default_headers is not None and set_default_headers is not None: + raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") + + if default_query is not None and set_default_query is not None: + raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") + + headers = self._custom_headers + if default_headers is not None: + headers = {**headers, **default_headers} + elif set_default_headers is not None: + headers = set_default_headers + + params = self._custom_query + if default_query is not None: + params = {**params, **default_query} + elif set_default_query is not None: + params = set_default_query + + http_client = http_client or self._client + return self.__class__( + api_key=api_key or self.api_key, + base_url=base_url or self.base_url, + timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, + http_client=http_client, + max_retries=max_retries if is_given(max_retries) else self.max_retries, + default_headers=headers, + default_query=params, + **_extra_kwargs, + ) + + # Alias for `copy` for nicer inline usage, e.g. + # client.with_options(timeout=10).foo.create(...) + with_options = copy + + async def get_root( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """Root""" + return await self.get( + "/", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + @override + def _make_status_error( + self, + err_msg: str, + *, + body: object, + response: httpx.Response, + ) -> APIStatusError: + if response.status_code == 400: + return _exceptions.BadRequestError(err_msg, response=response, body=body) + + if response.status_code == 401: + return _exceptions.AuthenticationError(err_msg, response=response, body=body) + + if response.status_code == 403: + return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) + + if response.status_code == 404: + return _exceptions.NotFoundError(err_msg, response=response, body=body) + + if response.status_code == 409: + return _exceptions.ConflictError(err_msg, response=response, body=body) + + if response.status_code == 422: + return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) + + if response.status_code == 429: + return _exceptions.RateLimitError(err_msg, response=response, body=body) + + if response.status_code >= 500: + return _exceptions.InternalServerError(err_msg, response=response, body=body) + return APIStatusError(err_msg, response=response, body=body) + + +class AgentexWithRawResponse: + def __init__(self, client: Agentex) -> None: + self.echo = echo.EchoResourceWithRawResponse(client.echo) + self.agents = agents.AgentsResourceWithRawResponse(client.agents) + self.tasks = tasks.TasksResourceWithRawResponse(client.tasks) + self.messages = messages.MessagesResourceWithRawResponse(client.messages) + self.spans = spans.SpansResourceWithRawResponse(client.spans) + self.states = states.StatesResourceWithRawResponse(client.states) + self.events = events.EventsResourceWithRawResponse(client.events) + self.tracker = tracker.TrackerResourceWithRawResponse(client.tracker) + + self.get_root = to_raw_response_wrapper( + client.get_root, + ) + + +class AsyncAgentexWithRawResponse: + def __init__(self, client: AsyncAgentex) -> None: + self.echo = echo.AsyncEchoResourceWithRawResponse(client.echo) + self.agents = agents.AsyncAgentsResourceWithRawResponse(client.agents) + self.tasks = tasks.AsyncTasksResourceWithRawResponse(client.tasks) + self.messages = messages.AsyncMessagesResourceWithRawResponse(client.messages) + self.spans = spans.AsyncSpansResourceWithRawResponse(client.spans) + self.states = states.AsyncStatesResourceWithRawResponse(client.states) + self.events = events.AsyncEventsResourceWithRawResponse(client.events) + self.tracker = tracker.AsyncTrackerResourceWithRawResponse(client.tracker) + + self.get_root = async_to_raw_response_wrapper( + client.get_root, + ) + + +class AgentexWithStreamedResponse: + def __init__(self, client: Agentex) -> None: + self.echo = echo.EchoResourceWithStreamingResponse(client.echo) + self.agents = agents.AgentsResourceWithStreamingResponse(client.agents) + self.tasks = tasks.TasksResourceWithStreamingResponse(client.tasks) + self.messages = messages.MessagesResourceWithStreamingResponse(client.messages) + self.spans = spans.SpansResourceWithStreamingResponse(client.spans) + self.states = states.StatesResourceWithStreamingResponse(client.states) + self.events = events.EventsResourceWithStreamingResponse(client.events) + self.tracker = tracker.TrackerResourceWithStreamingResponse(client.tracker) + + self.get_root = to_streamed_response_wrapper( + client.get_root, + ) + + +class AsyncAgentexWithStreamedResponse: + def __init__(self, client: AsyncAgentex) -> None: + self.echo = echo.AsyncEchoResourceWithStreamingResponse(client.echo) + self.agents = agents.AsyncAgentsResourceWithStreamingResponse(client.agents) + self.tasks = tasks.AsyncTasksResourceWithStreamingResponse(client.tasks) + self.messages = messages.AsyncMessagesResourceWithStreamingResponse(client.messages) + self.spans = spans.AsyncSpansResourceWithStreamingResponse(client.spans) + self.states = states.AsyncStatesResourceWithStreamingResponse(client.states) + self.events = events.AsyncEventsResourceWithStreamingResponse(client.events) + self.tracker = tracker.AsyncTrackerResourceWithStreamingResponse(client.tracker) + + self.get_root = async_to_streamed_response_wrapper( + client.get_root, + ) + + +Client = Agentex + +AsyncClient = AsyncAgentex diff --git a/src/agentex/_compat.py b/src/agentex/_compat.py new file mode 100644 index 00000000..92d9ee61 --- /dev/null +++ b/src/agentex/_compat.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload +from datetime import date, datetime +from typing_extensions import Self, Literal + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import IncEx, StrBytesIntFloat + +_T = TypeVar("_T") +_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) + +# --------------- Pydantic v2 compatibility --------------- + +# Pyright incorrectly reports some of our functions as overriding a method when they don't +# pyright: reportIncompatibleMethodOverride=false + +PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +# v1 re-exports +if TYPE_CHECKING: + + def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 + ... + + def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001 + ... + + def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001 + ... + + def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001 + ... + + def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001 + ... + + def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001 + ... + + def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 + ... + +else: + if PYDANTIC_V2: + from pydantic.v1.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + else: + from pydantic.typing import ( + get_args as get_args, + is_union as is_union, + get_origin as get_origin, + is_typeddict as is_typeddict, + is_literal_type as is_literal_type, + ) + from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime + + +# refactored config +if TYPE_CHECKING: + from pydantic import ConfigDict as ConfigDict +else: + if PYDANTIC_V2: + from pydantic import ConfigDict + else: + # TODO: provide an error message here? + ConfigDict = None + + +# renamed methods / properties +def parse_obj(model: type[_ModelT], value: object) -> _ModelT: + if PYDANTIC_V2: + return model.model_validate(value) + else: + return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + + +def field_is_required(field: FieldInfo) -> bool: + if PYDANTIC_V2: + return field.is_required() + return field.required # type: ignore + + +def field_get_default(field: FieldInfo) -> Any: + value = field.get_default() + if PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value + + +def field_outer_type(field: FieldInfo) -> Any: + if PYDANTIC_V2: + return field.annotation + return field.outer_type_ # type: ignore + + +def get_model_config(model: type[pydantic.BaseModel]) -> Any: + if PYDANTIC_V2: + return model.model_config + return model.__config__ # type: ignore + + +def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: + if PYDANTIC_V2: + return model.model_fields + return model.__fields__ # type: ignore + + +def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: + if PYDANTIC_V2: + return model.model_copy(deep=deep) + return model.copy(deep=deep) # type: ignore + + +def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: + if PYDANTIC_V2: + return model.model_dump_json(indent=indent) + return model.json(indent=indent) # type: ignore + + +def model_dump( + model: pydantic.BaseModel, + *, + exclude: IncEx | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + warnings: bool = True, + mode: Literal["json", "python"] = "python", +) -> dict[str, Any]: + if PYDANTIC_V2 or hasattr(model, "model_dump"): + return model.model_dump( + mode=mode, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + # warnings are not supported in Pydantic v1 + warnings=warnings if PYDANTIC_V2 else True, + ) + return cast( + "dict[str, Any]", + model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + ), + ) + + +def model_parse(model: type[_ModelT], data: Any) -> _ModelT: + if PYDANTIC_V2: + return model.model_validate(data) + return model.parse_obj(data) # pyright: ignore[reportDeprecated] + + +# generic models +if TYPE_CHECKING: + + class GenericModel(pydantic.BaseModel): ... + +else: + if PYDANTIC_V2: + # there no longer needs to be a distinction in v2 but + # we still have to create our own subclass to avoid + # inconsistent MRO ordering errors + class GenericModel(pydantic.BaseModel): ... + + else: + import pydantic.generics + + class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... + + +# cached properties +if TYPE_CHECKING: + cached_property = property + + # we define a separate type (copied from typeshed) + # that represents that `cached_property` is `set`able + # at runtime, which differs from `@property`. + # + # this is a separate type as editors likely special case + # `@property` and we don't want to cause issues just to have + # more helpful internal types. + + class typed_cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + + def __init__(self, func: Callable[[Any], _T]) -> None: ... + + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... + + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... + + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: + raise NotImplementedError() + + def __set_name__(self, owner: type[Any], name: str) -> None: ... + + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: ... +else: + from functools import cached_property as cached_property + + typed_cached_property = cached_property diff --git a/src/agentex/_constants.py b/src/agentex/_constants.py new file mode 100644 index 00000000..6ddf2c71 --- /dev/null +++ b/src/agentex/_constants.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import httpx + +RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" +OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" + +# default timeout is 1 minute +DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0) +DEFAULT_MAX_RETRIES = 2 +DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) + +INITIAL_RETRY_DELAY = 0.5 +MAX_RETRY_DELAY = 8.0 diff --git a/src/agentex/_exceptions.py b/src/agentex/_exceptions.py new file mode 100644 index 00000000..8f26c82b --- /dev/null +++ b/src/agentex/_exceptions.py @@ -0,0 +1,108 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal + +import httpx + +__all__ = [ + "BadRequestError", + "AuthenticationError", + "PermissionDeniedError", + "NotFoundError", + "ConflictError", + "UnprocessableEntityError", + "RateLimitError", + "InternalServerError", +] + + +class AgentexError(Exception): + pass + + +class APIError(AgentexError): + message: str + request: httpx.Request + + body: object | None + """The API response body. + + If the API responded with a valid JSON structure then this property will be the + decoded result. + + If it isn't a valid JSON structure then this will be the raw response. + + If there was no response associated with this error then it will be `None`. + """ + + def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002 + super().__init__(message) + self.request = request + self.message = message + self.body = body + + +class APIResponseValidationError(APIError): + response: httpx.Response + status_code: int + + def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None: + super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIStatusError(APIError): + """Raised when an API response has a status code of 4xx or 5xx.""" + + response: httpx.Response + status_code: int + + def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None: + super().__init__(message, response.request, body=body) + self.response = response + self.status_code = response.status_code + + +class APIConnectionError(APIError): + def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: + super().__init__(message, request, body=None) + + +class APITimeoutError(APIConnectionError): + def __init__(self, request: httpx.Request) -> None: + super().__init__(message="Request timed out.", request=request) + + +class BadRequestError(APIStatusError): + status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride] + + +class AuthenticationError(APIStatusError): + status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride] + + +class PermissionDeniedError(APIStatusError): + status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride] + + +class NotFoundError(APIStatusError): + status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride] + + +class ConflictError(APIStatusError): + status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride] + + +class UnprocessableEntityError(APIStatusError): + status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride] + + +class RateLimitError(APIStatusError): + status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride] + + +class InternalServerError(APIStatusError): + pass diff --git a/src/agentex/_files.py b/src/agentex/_files.py new file mode 100644 index 00000000..715cc207 --- /dev/null +++ b/src/agentex/_files.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import io +import os +import pathlib +from typing import overload +from typing_extensions import TypeGuard + +import anyio + +from ._types import ( + FileTypes, + FileContent, + RequestFiles, + HttpxFileTypes, + Base64FileInput, + HttpxFileContent, + HttpxRequestFiles, +) +from ._utils import is_tuple_t, is_mapping_t, is_sequence_t + + +def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: + return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + + +def is_file_content(obj: object) -> TypeGuard[FileContent]: + return ( + isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) + ) + + +def assert_is_file_content(obj: object, *, key: str | None = None) -> None: + if not is_file_content(obj): + prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" + raise RuntimeError( + f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead." + ) from None + + +@overload +def to_httpx_files(files: None) -> None: ... + + +@overload +def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: _transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, _transform_file(file)) for key, file in files] + else: + raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +def _transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = pathlib.Path(file) + return (path.name, path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], _read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +def _read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return pathlib.Path(file).read_bytes() + return file + + +@overload +async def async_to_httpx_files(files: None) -> None: ... + + +@overload +async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... + + +async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: + if files is None: + return None + + if is_mapping_t(files): + files = {key: await _async_transform_file(file) for key, file in files.items()} + elif is_sequence_t(files): + files = [(key, await _async_transform_file(file)) for key, file in files] + else: + raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") + + return files + + +async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: + if is_file_content(file): + if isinstance(file, os.PathLike): + path = anyio.Path(file) + return (path.name, await path.read_bytes()) + + return file + + if is_tuple_t(file): + return (file[0], await _async_read_file_content(file[1]), *file[2:]) + + raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") + + +async def _async_read_file_content(file: FileContent) -> HttpxFileContent: + if isinstance(file, os.PathLike): + return await anyio.Path(file).read_bytes() + + return file diff --git a/src/agentex/_models.py b/src/agentex/_models.py new file mode 100644 index 00000000..ffcbf67b --- /dev/null +++ b/src/agentex/_models.py @@ -0,0 +1,808 @@ +from __future__ import annotations + +import os +import inspect +from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, Optional, cast +from datetime import date, datetime +from typing_extensions import ( + List, + Unpack, + Literal, + ClassVar, + Protocol, + Required, + ParamSpec, + TypedDict, + TypeGuard, + final, + override, + runtime_checkable, +) + +import pydantic +from pydantic.fields import FieldInfo + +from ._types import ( + Body, + IncEx, + Query, + ModelT, + Headers, + Timeout, + NotGiven, + AnyMapping, + HttpxRequestFiles, +) +from ._utils import ( + PropertyInfo, + is_list, + is_given, + json_safe, + lru_cache, + is_mapping, + parse_date, + coerce_boolean, + parse_datetime, + strip_not_given, + extract_type_arg, + is_annotated_type, + is_type_alias_type, + strip_annotated_type, +) +from ._compat import ( + PYDANTIC_V2, + ConfigDict, + GenericModel as BaseGenericModel, + get_args, + is_union, + parse_obj, + get_origin, + is_literal_type, + get_model_config, + get_model_fields, + field_get_default, +) +from ._constants import RAW_RESPONSE_HEADER + +if TYPE_CHECKING: + from pydantic_core.core_schema import ModelField, ModelSchema, LiteralSchema, ModelFieldsSchema + +__all__ = ["BaseModel", "GenericModel"] + +_T = TypeVar("_T") +_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") + +P = ParamSpec("P") + + +@runtime_checkable +class _ConfigProtocol(Protocol): + allow_population_by_field_name: bool + + +class BaseModel(pydantic.BaseModel): + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict( + extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) + ) + else: + + @property + @override + def model_fields_set(self) -> set[str]: + # a forwards-compat shim for pydantic v2 + return self.__fields_set__ # type: ignore + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + extra: Any = pydantic.Extra.allow # type: ignore + + def to_dict( + self, + *, + mode: Literal["json", "python"] = "python", + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> dict[str, object]: + """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + mode: + If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. + If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` + + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. + """ + return self.model_dump( + mode=mode, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + def to_json( + self, + *, + indent: int | None = 2, + use_api_names: bool = True, + exclude_unset: bool = True, + exclude_defaults: bool = False, + exclude_none: bool = False, + warnings: bool = True, + ) -> str: + """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). + + By default, fields that were not set by the API will not be included, + and keys will match the API response, *not* the property names from the model. + + For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, + the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). + + Args: + indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` + use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. + """ + return self.model_dump_json( + indent=indent, + by_alias=use_api_names, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + warnings=warnings, + ) + + @override + def __str__(self) -> str: + # mypy complains about an invalid self arg + return f"{self.__repr_name__()}({self.__repr_str__(', ')})" # type: ignore[misc] + + # Override the 'construct' method in a way that supports recursive parsing without validation. + # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. + @classmethod + @override + def construct( # pyright: ignore[reportIncompatibleMethodOverride] + __cls: Type[ModelT], + _fields_set: set[str] | None = None, + **values: object, + ) -> ModelT: + m = __cls.__new__(__cls) + fields_values: dict[str, object] = {} + + config = get_model_config(__cls) + populate_by_name = ( + config.allow_population_by_field_name + if isinstance(config, _ConfigProtocol) + else config.get("populate_by_name") + ) + + if _fields_set is None: + _fields_set = set() + + model_fields = get_model_fields(__cls) + for name, field in model_fields.items(): + key = field.alias + if key is None or (key not in values and populate_by_name): + key = name + + if key in values: + fields_values[name] = _construct_field(value=values[key], field=field, key=key) + _fields_set.add(name) + else: + fields_values[name] = field_get_default(field) + + _extra = {} + for key, value in values.items(): + if key not in model_fields: + if PYDANTIC_V2: + _extra[key] = value + else: + _fields_set.add(key) + fields_values[key] = value + + object.__setattr__(m, "__dict__", fields_values) + + if PYDANTIC_V2: + # these properties are copied from Pydantic's `model_construct()` method + object.__setattr__(m, "__pydantic_private__", None) + object.__setattr__(m, "__pydantic_extra__", _extra) + object.__setattr__(m, "__pydantic_fields_set__", _fields_set) + else: + # init_private_attributes() does not exist in v2 + m._init_private_attributes() # type: ignore + + # copied from Pydantic v1's `construct()` method + object.__setattr__(m, "__fields_set__", _fields_set) + + return m + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + # because the type signatures are technically different + # although not in practice + model_construct = construct + + if not PYDANTIC_V2: + # we define aliases for some of the new pydantic v2 methods so + # that we can just document these methods without having to specify + # a specific pydantic version as some users may not know which + # pydantic version they are currently using + + @override + def model_dump( + self, + *, + mode: Literal["json", "python"] | str = "python", + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + serialize_as_any: bool = False, + ) -> dict[str, Any]: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump + + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + Args: + mode: The mode in which `to_python` should run. + If mode is 'json', the dictionary will only contain JSON serializable types. + If mode is 'python', the dictionary may contain any Python objects. + include: A list of fields to include in the output. + exclude: A list of fields to exclude from the output. + by_alias: Whether to use the field's alias in the dictionary key if defined. + exclude_unset: Whether to exclude fields that are unset or None from the output. + exclude_defaults: Whether to exclude fields that are set to their default value from the output. + exclude_none: Whether to exclude fields that have a value of `None` from the output. + round_trip: Whether to enable serialization and deserialization round-trip support. + warnings: Whether to log warnings when invalid fields are encountered. + + Returns: + A dictionary representation of the model. + """ + if mode not in {"json", "python"}: + raise ValueError("mode must be either 'json' or 'python'") + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + dumped = super().dict( # pyright: ignore[reportDeprecated] + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + return cast(dict[str, Any], json_safe(dumped)) if mode == "json" else dumped + + @override + def model_dump_json( + self, + *, + indent: int | None = None, + include: IncEx | None = None, + exclude: IncEx | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool | Literal["none", "warn", "error"] = True, + context: dict[str, Any] | None = None, + serialize_as_any: bool = False, + ) -> str: + """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json + + Generates a JSON representation of the model using Pydantic's `to_json` method. + + Args: + indent: Indentation to use in the JSON output. If None is passed, the output will be compact. + include: Field(s) to include in the JSON output. Can take either a string or set of strings. + exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. + by_alias: Whether to serialize using field aliases. + exclude_unset: Whether to exclude fields that have not been explicitly set. + exclude_defaults: Whether to exclude fields that have the default value. + exclude_none: Whether to exclude fields that have a value of `None`. + round_trip: Whether to use serialization/deserialization between JSON and class instance. + warnings: Whether to show any warnings that occurred during serialization. + + Returns: + A JSON string representation of the model. + """ + if round_trip != False: + raise ValueError("round_trip is only supported in Pydantic v2") + if warnings != True: + raise ValueError("warnings is only supported in Pydantic v2") + if context is not None: + raise ValueError("context is only supported in Pydantic v2") + if serialize_as_any != False: + raise ValueError("serialize_as_any is only supported in Pydantic v2") + return super().json( # type: ignore[reportDeprecated] + indent=indent, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + + +def _construct_field(value: object, field: FieldInfo, key: str) -> object: + if value is None: + return field_get_default(field) + + if PYDANTIC_V2: + type_ = field.annotation + else: + type_ = cast(type, field.outer_type_) # type: ignore + + if type_ is None: + raise RuntimeError(f"Unexpected field type is None for {key}") + + return construct_type(value=value, type_=type_, metadata=getattr(field, "metadata", None)) + + +def is_basemodel(type_: type) -> bool: + """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" + if is_union(type_): + for variant in get_args(type_): + if is_basemodel(variant): + return True + + return False + + return is_basemodel_type(type_) + + +def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: + origin = get_origin(type_) or type_ + if not inspect.isclass(origin): + return False + return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) + + +def build( + base_model_cls: Callable[P, _BaseModelT], + *args: P.args, + **kwargs: P.kwargs, +) -> _BaseModelT: + """Construct a BaseModel class without validation. + + This is useful for cases where you need to instantiate a `BaseModel` + from an API response as this provides type-safe params which isn't supported + by helpers like `construct_type()`. + + ```py + build(MyModel, my_field_a="foo", my_field_b=123) + ``` + """ + if args: + raise TypeError( + "Received positional arguments which are not supported; Keyword arguments must be used instead", + ) + + return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) + + +def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: + """Loose coercion to the expected type with construction of nested values. + + Note: the returned value from this function is not guaranteed to match the + given type. + """ + return cast(_T, construct_type(value=value, type_=type_)) + + +def construct_type(*, value: object, type_: object, metadata: Optional[List[Any]] = None) -> object: + """Loose coercion to the expected type with construction of nested values. + + If the given value does not match the expected type then it is returned as-is. + """ + + # store a reference to the original type we were given before we extract any inner + # types so that we can properly resolve forward references in `TypeAliasType` annotations + original_type = None + + # we allow `object` as the input type because otherwise, passing things like + # `Literal['value']` will be reported as a type error by type checkers + type_ = cast("type[object]", type_) + if is_type_alias_type(type_): + original_type = type_ # type: ignore[unreachable] + type_ = type_.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if metadata is not None and len(metadata) > 0: + meta: tuple[Any, ...] = tuple(metadata) + elif is_annotated_type(type_): + meta = get_args(type_)[1:] + type_ = extract_type_arg(type_, 0) + else: + meta = tuple() + + # we need to use the origin class for any types that are subscripted generics + # e.g. Dict[str, object] + origin = get_origin(type_) or type_ + args = get_args(type_) + + if is_union(origin): + try: + return validate_type(type_=cast("type[object]", original_type or type_), value=value) + except Exception: + pass + + # if the type is a discriminated union then we want to construct the right variant + # in the union, even if the data doesn't match exactly, otherwise we'd break code + # that relies on the constructed class types, e.g. + # + # class FooType: + # kind: Literal['foo'] + # value: str + # + # class BarType: + # kind: Literal['bar'] + # value: int + # + # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then + # we'd end up constructing `FooType` when it should be `BarType`. + discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) + if discriminator and is_mapping(value): + variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) + if variant_value and isinstance(variant_value, str): + variant_type = discriminator.mapping.get(variant_value) + if variant_type: + return construct_type(type_=variant_type, value=value) + + # if the data is not valid, use the first variant that doesn't fail while deserializing + for variant in args: + try: + return construct_type(value=value, type_=variant) + except Exception: + continue + + raise RuntimeError(f"Could not convert data into a valid instance of {type_}") + + if origin == dict: + if not is_mapping(value): + return value + + _, items_type = get_args(type_) # Dict[_, items_type] + return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} + + if ( + not is_literal_type(type_) + and inspect.isclass(origin) + and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)) + ): + if is_list(value): + return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] + + if is_mapping(value): + if issubclass(type_, BaseModel): + return type_.construct(**value) # type: ignore[arg-type] + + return cast(Any, type_).construct(**value) + + if origin == list: + if not is_list(value): + return value + + inner_type = args[0] # List[inner_type] + return [construct_type(value=entry, type_=inner_type) for entry in value] + + if origin == float: + if isinstance(value, int): + coerced = float(value) + if coerced != value: + return value + return coerced + + return value + + if type_ == datetime: + try: + return parse_datetime(value) # type: ignore + except Exception: + return value + + if type_ == date: + try: + return parse_date(value) # type: ignore + except Exception: + return value + + return value + + +@runtime_checkable +class CachedDiscriminatorType(Protocol): + __discriminator__: DiscriminatorDetails + + +class DiscriminatorDetails: + field_name: str + """The name of the discriminator field in the variant class, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] + ``` + + Will result in field_name='type' + """ + + field_alias_from: str | None + """The name of the discriminator field in the API response, e.g. + + ```py + class Foo(BaseModel): + type: Literal['foo'] = Field(alias='type_from_api') + ``` + + Will result in field_alias_from='type_from_api' + """ + + mapping: dict[str, type] + """Mapping of discriminator value to variant type, e.g. + + {'foo': FooVariant, 'bar': BarVariant} + """ + + def __init__( + self, + *, + mapping: dict[str, type], + discriminator_field: str, + discriminator_alias: str | None, + ) -> None: + self.mapping = mapping + self.field_name = discriminator_field + self.field_alias_from = discriminator_alias + + +def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: + if isinstance(union, CachedDiscriminatorType): + return union.__discriminator__ + + discriminator_field_name: str | None = None + + for annotation in meta_annotations: + if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: + discriminator_field_name = annotation.discriminator + break + + if not discriminator_field_name: + return None + + mapping: dict[str, type] = {} + discriminator_alias: str | None = None + + for variant in get_args(union): + variant = strip_annotated_type(variant) + if is_basemodel_type(variant): + if PYDANTIC_V2: + field = _extract_field_schema_pv2(variant, discriminator_field_name) + if not field: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field.get("serialization_alias") + + field_schema = field["schema"] + + if field_schema["type"] == "literal": + for entry in cast("LiteralSchema", field_schema)["expected"]: + if isinstance(entry, str): + mapping[entry] = variant + else: + field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] + if not field_info: + continue + + # Note: if one variant defines an alias then they all should + discriminator_alias = field_info.alias + + if (annotation := getattr(field_info, "annotation", None)) and is_literal_type(annotation): + for entry in get_args(annotation): + if isinstance(entry, str): + mapping[entry] = variant + + if not mapping: + return None + + details = DiscriminatorDetails( + mapping=mapping, + discriminator_field=discriminator_field_name, + discriminator_alias=discriminator_alias, + ) + cast(CachedDiscriminatorType, union).__discriminator__ = details + return details + + +def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: + schema = model.__pydantic_core_schema__ + if schema["type"] == "definitions": + schema = schema["schema"] + + if schema["type"] != "model": + return None + + schema = cast("ModelSchema", schema) + fields_schema = schema["schema"] + if fields_schema["type"] != "model-fields": + return None + + fields_schema = cast("ModelFieldsSchema", fields_schema) + field = fields_schema["fields"].get(field_name) + if not field: + return None + + return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] + + +def validate_type(*, type_: type[_T], value: object) -> _T: + """Strict validation that the given value matches the expected type""" + if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): + return cast(_T, parse_obj(type_, value)) + + return cast(_T, _validate_non_model_type(type_=type_, value=value)) + + +def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None: + """Add a pydantic config for the given type. + + Note: this is a no-op on Pydantic v1. + """ + setattr(typ, "__pydantic_config__", config) # noqa: B010 + + +# our use of subclassing here causes weirdness for type checkers, +# so we just pretend that we don't subclass +if TYPE_CHECKING: + GenericModel = BaseModel +else: + + class GenericModel(BaseGenericModel, BaseModel): + pass + + +if PYDANTIC_V2: + from pydantic import TypeAdapter as _TypeAdapter + + _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) + + if TYPE_CHECKING: + from pydantic import TypeAdapter + else: + TypeAdapter = _CachedTypeAdapter + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + return TypeAdapter(type_).validate_python(value) + +elif not TYPE_CHECKING: # TODO: condition is weird + + class RootModel(GenericModel, Generic[_T]): + """Used as a placeholder to easily convert runtime types to a Pydantic format + to provide validation. + + For example: + ```py + validated = RootModel[int](__root__="5").__root__ + # validated: 5 + ``` + """ + + __root__: _T + + def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: + model = _create_pydantic_model(type_).validate(value) + return cast(_T, model.__root__) + + def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: + return RootModel[type_] # type: ignore + + +class FinalRequestOptionsInput(TypedDict, total=False): + method: Required[str] + url: Required[str] + params: Query + headers: Headers + max_retries: int + timeout: float | Timeout | None + files: HttpxRequestFiles | None + idempotency_key: str + json_data: Body + extra_json: AnyMapping + follow_redirects: bool + + +@final +class FinalRequestOptions(pydantic.BaseModel): + method: str + url: str + params: Query = {} + headers: Union[Headers, NotGiven] = NotGiven() + max_retries: Union[int, NotGiven] = NotGiven() + timeout: Union[float, Timeout, None, NotGiven] = NotGiven() + files: Union[HttpxRequestFiles, None] = None + idempotency_key: Union[str, None] = None + post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() + follow_redirects: Union[bool, None] = None + + # It should be noted that we cannot use `json` here as that would override + # a BaseModel method in an incompatible fashion. + json_data: Union[Body, None] = None + extra_json: Union[AnyMapping, None] = None + + if PYDANTIC_V2: + model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) + else: + + class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] + arbitrary_types_allowed: bool = True + + def get_max_retries(self, max_retries: int) -> int: + if isinstance(self.max_retries, NotGiven): + return max_retries + return self.max_retries + + def _strip_raw_response_header(self) -> None: + if not is_given(self.headers): + return + + if self.headers.get(RAW_RESPONSE_HEADER): + self.headers = {**self.headers} + self.headers.pop(RAW_RESPONSE_HEADER) + + # override the `construct` method so that we can run custom transformations. + # this is necessary as we don't want to do any actual runtime type checking + # (which means we can't use validators) but we do want to ensure that `NotGiven` + # values are not present + # + # type ignore required because we're adding explicit types to `**values` + @classmethod + def construct( # type: ignore + cls, + _fields_set: set[str] | None = None, + **values: Unpack[FinalRequestOptionsInput], + ) -> FinalRequestOptions: + kwargs: dict[str, Any] = { + # we unconditionally call `strip_not_given` on any value + # as it will just ignore any non-mapping types + key: strip_not_given(value) + for key, value in values.items() + } + if PYDANTIC_V2: + return super().model_construct(_fields_set, **kwargs) + return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] + + if not TYPE_CHECKING: + # type checkers incorrectly complain about this assignment + model_construct = construct diff --git a/src/agentex/_qs.py b/src/agentex/_qs.py new file mode 100644 index 00000000..274320ca --- /dev/null +++ b/src/agentex/_qs.py @@ -0,0 +1,150 @@ +from __future__ import annotations + +from typing import Any, List, Tuple, Union, Mapping, TypeVar +from urllib.parse import parse_qs, urlencode +from typing_extensions import Literal, get_args + +from ._types import NOT_GIVEN, NotGiven, NotGivenOr +from ._utils import flatten + +_T = TypeVar("_T") + + +ArrayFormat = Literal["comma", "repeat", "indices", "brackets"] +NestedFormat = Literal["dots", "brackets"] + +PrimitiveData = Union[str, int, float, bool, None] +# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"] +# https://github.com/microsoft/pyright/issues/3555 +Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"] +Params = Mapping[str, Data] + + +class Querystring: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + *, + array_format: ArrayFormat = "repeat", + nested_format: NestedFormat = "brackets", + ) -> None: + self.array_format = array_format + self.nested_format = nested_format + + def parse(self, query: str) -> Mapping[str, object]: + # Note: custom format syntax is not supported yet + return parse_qs(query) + + def stringify( + self, + params: Params, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> str: + return urlencode( + self.stringify_items( + params, + array_format=array_format, + nested_format=nested_format, + ) + ) + + def stringify_items( + self, + params: Params, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> list[tuple[str, str]]: + opts = Options( + qs=self, + array_format=array_format, + nested_format=nested_format, + ) + return flatten([self._stringify_item(key, value, opts) for key, value in params.items()]) + + def _stringify_item( + self, + key: str, + value: Data, + opts: Options, + ) -> list[tuple[str, str]]: + if isinstance(value, Mapping): + items: list[tuple[str, str]] = [] + nested_format = opts.nested_format + for subkey, subvalue in value.items(): + items.extend( + self._stringify_item( + # TODO: error if unknown format + f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]", + subvalue, + opts, + ) + ) + return items + + if isinstance(value, (list, tuple)): + array_format = opts.array_format + if array_format == "comma": + return [ + ( + key, + ",".join(self._primitive_value_to_str(item) for item in value if item is not None), + ), + ] + elif array_format == "repeat": + items = [] + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + elif array_format == "indices": + raise NotImplementedError("The array indices format is not supported yet") + elif array_format == "brackets": + items = [] + key = key + "[]" + for item in value: + items.extend(self._stringify_item(key, item, opts)) + return items + else: + raise NotImplementedError( + f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}" + ) + + serialised = self._primitive_value_to_str(value) + if not serialised: + return [] + return [(key, serialised)] + + def _primitive_value_to_str(self, value: PrimitiveData) -> str: + # copied from httpx + if value is True: + return "true" + elif value is False: + return "false" + elif value is None: + return "" + return str(value) + + +_qs = Querystring() +parse = _qs.parse +stringify = _qs.stringify +stringify_items = _qs.stringify_items + + +class Options: + array_format: ArrayFormat + nested_format: NestedFormat + + def __init__( + self, + qs: Querystring = _qs, + *, + array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, + nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, + ) -> None: + self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format + self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/src/agentex/_resource.py b/src/agentex/_resource.py new file mode 100644 index 00000000..246bd65d --- /dev/null +++ b/src/agentex/_resource.py @@ -0,0 +1,43 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import time +from typing import TYPE_CHECKING + +import anyio + +if TYPE_CHECKING: + from ._client import Agentex, AsyncAgentex + + +class SyncAPIResource: + _client: Agentex + + def __init__(self, client: Agentex) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + def _sleep(self, seconds: float) -> None: + time.sleep(seconds) + + +class AsyncAPIResource: + _client: AsyncAgentex + + def __init__(self, client: AsyncAgentex) -> None: + self._client = client + self._get = client.get + self._post = client.post + self._patch = client.patch + self._put = client.put + self._delete = client.delete + self._get_api_list = client.get_api_list + + async def _sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) diff --git a/src/agentex/_response.py b/src/agentex/_response.py new file mode 100644 index 00000000..be9bfbc3 --- /dev/null +++ b/src/agentex/_response.py @@ -0,0 +1,830 @@ +from __future__ import annotations + +import os +import inspect +import logging +import datetime +import functools +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Union, + Generic, + TypeVar, + Callable, + Iterator, + AsyncIterator, + cast, + overload, +) +from typing_extensions import Awaitable, ParamSpec, override, get_origin + +import anyio +import httpx +import pydantic + +from ._types import NoneType +from ._utils import is_given, extract_type_arg, is_annotated_type, is_type_alias_type, extract_type_var_from_base +from ._models import BaseModel, is_basemodel +from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER +from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type +from ._exceptions import AgentexError, APIResponseValidationError + +if TYPE_CHECKING: + from ._models import FinalRequestOptions + from ._base_client import BaseClient + + +P = ParamSpec("P") +R = TypeVar("R") +_T = TypeVar("_T") +_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") +_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]") + +log: logging.Logger = logging.getLogger(__name__) + + +class BaseAPIResponse(Generic[R]): + _cast_to: type[R] + _client: BaseClient[Any, Any] + _parsed_by_type: dict[type[Any], Any] + _is_sse_stream: bool + _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None + _options: FinalRequestOptions + + http_response: httpx.Response + + retries_taken: int + """The number of retries made. If no retries happened this will be `0`""" + + def __init__( + self, + *, + raw: httpx.Response, + cast_to: type[R], + client: BaseClient[Any, Any], + stream: bool, + stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, + options: FinalRequestOptions, + retries_taken: int = 0, + ) -> None: + self._cast_to = cast_to + self._client = client + self._parsed_by_type = {} + self._is_sse_stream = stream + self._stream_cls = stream_cls + self._options = options + self.http_response = raw + self.retries_taken = retries_taken + + @property + def headers(self) -> httpx.Headers: + return self.http_response.headers + + @property + def http_request(self) -> httpx.Request: + """Returns the httpx Request instance associated with the current response.""" + return self.http_response.request + + @property + def status_code(self) -> int: + return self.http_response.status_code + + @property + def url(self) -> httpx.URL: + """Returns the URL for which the request was made.""" + return self.http_response.url + + @property + def method(self) -> str: + return self.http_request.method + + @property + def http_version(self) -> str: + return self.http_response.http_version + + @property + def elapsed(self) -> datetime.timedelta: + """The time taken for the complete request/response cycle to complete.""" + return self.http_response.elapsed + + @property + def is_closed(self) -> bool: + """Whether or not the response body has been closed. + + If this is False then there is response data that has not been read yet. + You must either fully consume the response body or call `.close()` + before discarding the response to prevent resource leaks. + """ + return self.http_response.is_closed + + @override + def __repr__(self) -> str: + return ( + f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>" + ) + + def _parse(self, *, to: type[_T] | None = None) -> R | _T: + cast_to = to if to is not None else self._cast_to + + # unwrap `TypeAlias('Name', T)` -> `T` + if is_type_alias_type(cast_to): + cast_to = cast_to.__value__ # type: ignore[unreachable] + + # unwrap `Annotated[T, ...]` -> `T` + if cast_to and is_annotated_type(cast_to): + cast_to = extract_type_arg(cast_to, 0) + + origin = get_origin(cast_to) or cast_to + + if self._is_sse_stream: + if to: + if not is_stream_class_type(to): + raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") + + return cast( + _T, + to( + cast_to=extract_stream_chunk_type( + to, + failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", + ), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if self._stream_cls: + return cast( + R, + self._stream_cls( + cast_to=extract_stream_chunk_type(self._stream_cls), + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) + if stream_cls is None: + raise MissingStreamClassError() + + return cast( + R, + stream_cls( + cast_to=cast_to, + response=self.http_response, + client=cast(Any, self._client), + ), + ) + + if cast_to is NoneType: + return cast(R, None) + + response = self.http_response + if cast_to == str: + return cast(R, response.text) + + if cast_to == bytes: + return cast(R, response.content) + + if cast_to == int: + return cast(R, int(response.text)) + + if cast_to == float: + return cast(R, float(response.text)) + + if cast_to == bool: + return cast(R, response.text.lower() == "true") + + if origin == APIResponse: + raise RuntimeError("Unexpected state - cast_to is `APIResponse`") + + if inspect.isclass(origin) and issubclass(origin, httpx.Response): + # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response + # and pass that class to our request functions. We cannot change the variance to be either + # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct + # the response class ourselves but that is something that should be supported directly in httpx + # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. + if cast_to != httpx.Response: + raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") + return cast(R, response) + + if ( + inspect.isclass( + origin # pyright: ignore[reportUnknownArgumentType] + ) + and not issubclass(origin, BaseModel) + and issubclass(origin, pydantic.BaseModel) + ): + raise TypeError("Pydantic models must subclass our base model type, e.g. `from agentex import BaseModel`") + + if ( + cast_to is not object + and not origin is list + and not origin is dict + and not origin is Union + and not issubclass(origin, BaseModel) + ): + raise RuntimeError( + f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." + ) + + # split is required to handle cases where additional information is included + # in the response, e.g. application/json; charset=utf-8 + content_type, *_ = response.headers.get("content-type", "*").split(";") + if not content_type.endswith("json"): + if is_basemodel(cast_to): + try: + data = response.json() + except Exception as exc: + log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) + else: + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + if self._client._strict_response_validation: + raise APIResponseValidationError( + response=response, + message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", + body=response.text, + ) + + # If the API responds with content that isn't JSON then we just return + # the (decoded) text without performing any parsing so that you can still + # handle the response however you need to. + return response.text # type: ignore + + data = response.json() + + return self._client._process_response_data( + data=data, + cast_to=cast_to, # type: ignore + response=response, + ) + + +class APIResponse(BaseAPIResponse[R]): + @overload + def parse(self, *, to: type[_T]) -> _T: ... + + @overload + def parse(self) -> R: ... + + def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from agentex import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `int` + - `float` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return self.http_response.read() + except httpx.StreamConsumed as exc: + # The default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message. + raise StreamAlreadyConsumed() from exc + + def text(self) -> str: + """Read and decode the response content into a string.""" + self.read() + return self.http_response.text + + def json(self) -> object: + """Read and decode the JSON response content.""" + self.read() + return self.http_response.json() + + def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.http_response.close() + + def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + for chunk in self.http_response.iter_bytes(chunk_size): + yield chunk + + def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + for chunk in self.http_response.iter_text(chunk_size): + yield chunk + + def iter_lines(self) -> Iterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + for chunk in self.http_response.iter_lines(): + yield chunk + + +class AsyncAPIResponse(BaseAPIResponse[R]): + @overload + async def parse(self, *, to: type[_T]) -> _T: ... + + @overload + async def parse(self) -> R: ... + + async def parse(self, *, to: type[_T] | None = None) -> R | _T: + """Returns the rich python representation of this response's data. + + For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. + + You can customise the type that the response is parsed into through + the `to` argument, e.g. + + ```py + from agentex import BaseModel + + + class MyModel(BaseModel): + foo: str + + + obj = response.parse(to=MyModel) + print(obj.foo) + ``` + + We support parsing: + - `BaseModel` + - `dict` + - `list` + - `Union` + - `str` + - `httpx.Response` + """ + cache_key = to if to is not None else self._cast_to + cached = self._parsed_by_type.get(cache_key) + if cached is not None: + return cached # type: ignore[no-any-return] + + if not self._is_sse_stream: + await self.read() + + parsed = self._parse(to=to) + if is_given(self._options.post_parser): + parsed = self._options.post_parser(parsed) + + self._parsed_by_type[cache_key] = parsed + return parsed + + async def read(self) -> bytes: + """Read and return the binary response content.""" + try: + return await self.http_response.aread() + except httpx.StreamConsumed as exc: + # the default error raised by httpx isn't very + # helpful in our case so we re-raise it with + # a different error message + raise StreamAlreadyConsumed() from exc + + async def text(self) -> str: + """Read and decode the response content into a string.""" + await self.read() + return self.http_response.text + + async def json(self) -> object: + """Read and decode the JSON response content.""" + await self.read() + return self.http_response.json() + + async def close(self) -> None: + """Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.http_response.aclose() + + async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: + """ + A byte-iterator over the decoded response content. + + This automatically handles gzip, deflate and brotli encoded responses. + """ + async for chunk in self.http_response.aiter_bytes(chunk_size): + yield chunk + + async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: + """A str-iterator over the decoded response content + that handles both gzip, deflate, etc but also detects the content's + string encoding. + """ + async for chunk in self.http_response.aiter_text(chunk_size): + yield chunk + + async def iter_lines(self) -> AsyncIterator[str]: + """Like `iter_text()` but will only yield chunks for each line""" + async for chunk in self.http_response.aiter_lines(): + yield chunk + + +class BinaryAPIResponse(APIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(): + f.write(data) + + +class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]): + """Subclass of APIResponse providing helpers for dealing with binary data. + + Note: If you want to stream the response data instead of eagerly reading it + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + + async def write_to_file( + self, + file: str | os.PathLike[str], + ) -> None: + """Write the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + + Note: if you want to stream the data to the file instead of writing + all at once then you should use `.with_streaming_response` when making + the API request, e.g. `.with_streaming_response.get_binary_response()` + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(): + await f.write(data) + + +class StreamedBinaryAPIResponse(APIResponse[bytes]): + def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + with open(file, mode="wb") as f: + for data in self.iter_bytes(chunk_size): + f.write(data) + + +class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]): + async def stream_to_file( + self, + file: str | os.PathLike[str], + *, + chunk_size: int | None = None, + ) -> None: + """Streams the output to the given file. + + Accepts a filename or any path-like object, e.g. pathlib.Path + """ + path = anyio.Path(file) + async with await path.open(mode="wb") as f: + async for data in self.iter_bytes(chunk_size): + await f.write(data) + + +class MissingStreamClassError(TypeError): + def __init__(self) -> None: + super().__init__( + "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `agentex._streaming` for reference", + ) + + +class StreamAlreadyConsumed(AgentexError): + """ + Attempted to read or stream content, but the content has already + been streamed. + + This can happen if you use a method like `.iter_lines()` and then attempt + to read th entire response body afterwards, e.g. + + ```py + response = await client.post(...) + async for line in response.iter_lines(): + ... # do something with `line` + + content = await response.read() + # ^ error + ``` + + If you want this behaviour you'll need to either manually accumulate the response + content or call `await response.read()` before iterating over the stream. + """ + + def __init__(self) -> None: + message = ( + "Attempted to read or stream some content, but the content has " + "already been streamed. " + "This could be due to attempting to stream the response " + "content more than once." + "\n\n" + "You can fix this by manually accumulating the response content while streaming " + "or by calling `.read()` before starting to stream." + ) + super().__init__(message) + + +class ResponseContextManager(Generic[_APIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, request_func: Callable[[], _APIResponseT]) -> None: + self._request_func = request_func + self.__response: _APIResponseT | None = None + + def __enter__(self) -> _APIResponseT: + self.__response = self._request_func() + return self.__response + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + self.__response.close() + + +class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]): + """Context manager for ensuring that a request is not made + until it is entered and that the response will always be closed + when the context manager exits + """ + + def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None: + self._api_request = api_request + self.__response: _AsyncAPIResponseT | None = None + + async def __aenter__(self) -> _AsyncAPIResponseT: + self.__response = await self._api_request + return self.__response + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self.__response is not None: + await self.__response.close() + + +def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request)) + + return wrapped + + +def async_to_streamed_response_wrapper( + func: Callable[P, Awaitable[R]], +) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support streaming and returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request)) + + return wrapped + + +def to_custom_streamed_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, ResponseContextManager[_APIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = functools.partial(func, *args, **kwargs) + + return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request)) + + return wrapped + + +def async_to_custom_streamed_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support streaming and returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "stream" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + make_request = func(*args, **kwargs) + + return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request)) + + return wrapped + + +def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(APIResponse[R], func(*args, **kwargs)) + + return wrapped + + +def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]: + """Higher order function that takes one of our bound API methods and wraps it + to support returning the raw `APIResponse` object directly. + """ + + @functools.wraps(func) + async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]: + extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + + kwargs["extra_headers"] = extra_headers + + return cast(AsyncAPIResponse[R], await func(*args, **kwargs)) + + return wrapped + + +def to_custom_raw_response_wrapper( + func: Callable[P, object], + response_cls: type[_APIResponseT], +) -> Callable[P, _APIResponseT]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(_APIResponseT, func(*args, **kwargs)) + + return wrapped + + +def async_to_custom_raw_response_wrapper( + func: Callable[P, Awaitable[object]], + response_cls: type[_AsyncAPIResponseT], +) -> Callable[P, Awaitable[_AsyncAPIResponseT]]: + """Higher order function that takes one of our bound API methods and an `APIResponse` class + and wraps the method to support returning the given response class directly. + + Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` + """ + + @functools.wraps(func) + def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]: + extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} + extra_headers[RAW_RESPONSE_HEADER] = "raw" + extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls + + kwargs["extra_headers"] = extra_headers + + return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs)) + + return wrapped + + +def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: + """Given a type like `APIResponse[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(APIResponse[bytes]): + ... + + extract_response_type(MyResponse) -> bytes + ``` + """ + return extract_type_var_from_base( + typ, + generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)), + index=0, + ) diff --git a/src/agentex/_streaming.py b/src/agentex/_streaming.py new file mode 100644 index 00000000..cdcb3280 --- /dev/null +++ b/src/agentex/_streaming.py @@ -0,0 +1,333 @@ +# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py +from __future__ import annotations + +import json +import inspect +from types import TracebackType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast +from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable + +import httpx + +from ._utils import extract_type_var_from_base + +if TYPE_CHECKING: + from ._client import Agentex, AsyncAgentex + + +_T = TypeVar("_T") + + +class Stream(Generic[_T]): + """Provides the core interface to iterate over a synchronous stream response.""" + + response: httpx.Response + + _decoder: SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: Agentex, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + def __next__(self) -> _T: + return self._iterator.__next__() + + def __iter__(self) -> Iterator[_T]: + for item in self._iterator: + yield item + + def _iter_events(self) -> Iterator[ServerSentEvent]: + yield from self._decoder.iter_bytes(self.response.iter_bytes()) + + def __stream__(self) -> Iterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + for _sse in iterator: + ... + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + self.response.close() + + +class AsyncStream(Generic[_T]): + """Provides the core interface to iterate over an asynchronous stream response.""" + + response: httpx.Response + + _decoder: SSEDecoder | SSEBytesDecoder + + def __init__( + self, + *, + cast_to: type[_T], + response: httpx.Response, + client: AsyncAgentex, + ) -> None: + self.response = response + self._cast_to = cast_to + self._client = client + self._decoder = client._make_sse_decoder() + self._iterator = self.__stream__() + + async def __anext__(self) -> _T: + return await self._iterator.__anext__() + + async def __aiter__(self) -> AsyncIterator[_T]: + async for item in self._iterator: + yield item + + async def _iter_events(self) -> AsyncIterator[ServerSentEvent]: + async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()): + yield sse + + async def __stream__(self) -> AsyncIterator[_T]: + cast_to = cast(Any, self._cast_to) + response = self.response + process_data = self._client._process_response_data + iterator = self._iter_events() + + async for sse in iterator: + yield process_data(data=sse.json(), cast_to=cast_to, response=response) + + # Ensure the entire stream is consumed + async for _sse in iterator: + ... + + async def __aenter__(self) -> Self: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.close() + + async def close(self) -> None: + """ + Close the response and release the connection. + + Automatically called if the response body is read to completion. + """ + await self.response.aclose() + + +class ServerSentEvent: + def __init__( + self, + *, + event: str | None = None, + data: str | None = None, + id: str | None = None, + retry: int | None = None, + ) -> None: + if data is None: + data = "" + + self._id = id + self._data = data + self._event = event or None + self._retry = retry + + @property + def event(self) -> str | None: + return self._event + + @property + def id(self) -> str | None: + return self._id + + @property + def retry(self) -> int | None: + return self._retry + + @property + def data(self) -> str: + return self._data + + def json(self) -> Any: + return json.loads(self.data) + + @override + def __repr__(self) -> str: + return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" + + +class SSEDecoder: + _data: list[str] + _event: str | None + _retry: int | None + _last_event_id: str | None + + def __init__(self) -> None: + self._event = None + self._data = [] + self._last_event_id = None + self._retry = None + + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + for chunk in self._iter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + async for chunk in self._aiter_chunks(iterator): + # Split before decoding so splitlines() only uses \r and \n + for raw_line in chunk.splitlines(): + line = raw_line.decode("utf-8") + sse = self.decode(line) + if sse: + yield sse + + async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]: + """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" + data = b"" + async for chunk in iterator: + for line in chunk.splitlines(keepends=True): + data += line + if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): + yield data + data = b"" + if data: + yield data + + def decode(self, line: str) -> ServerSentEvent | None: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = None + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None + + +@runtime_checkable +class SSEBytesDecoder(Protocol): + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: + """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: + """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered""" + ... + + +def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]: + """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" + origin = get_origin(typ) or typ + return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream)) + + +def extract_stream_chunk_type( + stream_cls: type, + *, + failure_message: str | None = None, +) -> type: + """Given a type like `Stream[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyStream(Stream[bytes]): + ... + + extract_stream_chunk_type(MyStream) -> bytes + ``` + """ + from ._base_client import Stream, AsyncStream + + return extract_type_var_from_base( + stream_cls, + index=0, + generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)), + failure_message=failure_message, + ) diff --git a/src/agentex/_types.py b/src/agentex/_types.py new file mode 100644 index 00000000..d01de4b1 --- /dev/null +++ b/src/agentex/_types.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + List, + Type, + Tuple, + Union, + Mapping, + TypeVar, + Callable, + Optional, + Sequence, +) +from typing_extensions import Set, Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable + +import httpx +import pydantic +from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport + +if TYPE_CHECKING: + from ._models import BaseModel + from ._response import APIResponse, AsyncAPIResponse + +Transport = BaseTransport +AsyncTransport = AsyncBaseTransport +Query = Mapping[str, object] +Body = object +AnyMapping = Mapping[str, object] +ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) +_T = TypeVar("_T") + + +# Approximates httpx internal ProxiesTypes and RequestFiles types +# while adding support for `PathLike` instances +ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] +ProxiesTypes = Union[str, Proxy, ProxiesDict] +if TYPE_CHECKING: + Base64FileInput = Union[IO[bytes], PathLike[str]] + FileContent = Union[IO[bytes], bytes, PathLike[str]] +else: + Base64FileInput = Union[IO[bytes], PathLike] + FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. +FileTypes = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], +] +RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] + +# duplicate of the above but without our custom file support +HttpxFileContent = Union[IO[bytes], bytes] +HttpxFileTypes = Union[ + # file (or bytes) + HttpxFileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], HttpxFileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], HttpxFileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], +] +HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] + +# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT +# where ResponseT includes `None`. In order to support directly +# passing `None`, overloads would have to be defined for every +# method that uses `ResponseT` which would lead to an unacceptable +# amount of code duplication and make it unreadable. See _base_client.py +# for example usage. +# +# This unfortunately means that you will either have +# to import this type and pass it explicitly: +# +# from agentex import NoneType +# client.get('/foo', cast_to=NoneType) +# +# or build it yourself: +# +# client.get('/foo', cast_to=type(None)) +if TYPE_CHECKING: + NoneType: Type[None] +else: + NoneType = type(None) + + +class RequestOptions(TypedDict, total=False): + headers: Headers + max_retries: int + timeout: float | Timeout | None + params: Query + extra_json: AnyMapping + idempotency_key: str + follow_redirects: bool + + +# Sentinel class used until PEP 0661 is accepted +class NotGiven: + """ + A sentinel singleton class used to distinguish omitted keyword arguments + from those passed in with the value None (which may have different behavior). + + For example: + + ```py + def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... + + + get(timeout=1) # 1s timeout + get(timeout=None) # No timeout + get() # Default timeout behavior, which may not be statically known at the method definition. + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + @override + def __repr__(self) -> str: + return "NOT_GIVEN" + + +NotGivenOr = Union[_T, NotGiven] +NOT_GIVEN = NotGiven() + + +class Omit: + """In certain situations you need to be able to represent a case where a default value has + to be explicitly removed and `None` is not an appropriate substitute, for example: + + ```py + # as the default `Content-Type` header is `application/json` that will be sent + client.post("/upload/files", files={"file": b"my raw file content"}) + + # you can't explicitly override the header as it has to be dynamically generated + # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' + client.post(..., headers={"Content-Type": "multipart/form-data"}) + + # instead you can remove the default `application/json` header by passing Omit + client.post(..., headers={"Content-Type": Omit()}) + ``` + """ + + def __bool__(self) -> Literal[False]: + return False + + +@runtime_checkable +class ModelBuilderProtocol(Protocol): + @classmethod + def build( + cls: type[_T], + *, + response: Response, + data: object, + ) -> _T: ... + + +Headers = Mapping[str, Union[str, Omit]] + + +class HeadersLikeProtocol(Protocol): + def get(self, __key: str) -> str | None: ... + + +HeadersLike = Union[Headers, HeadersLikeProtocol] + +ResponseT = TypeVar( + "ResponseT", + bound=Union[ + object, + str, + None, + "BaseModel", + List[Any], + Dict[str, Any], + Response, + ModelBuilderProtocol, + "APIResponse[Any]", + "AsyncAPIResponse[Any]", + ], +) + +StrBytesIntFloat = Union[str, bytes, int, float] + +# Note: copied from Pydantic +# https://github.com/pydantic/pydantic/blob/6f31f8f68ef011f84357330186f603ff295312fd/pydantic/main.py#L79 +IncEx: TypeAlias = Union[Set[int], Set[str], Mapping[int, Union["IncEx", bool]], Mapping[str, Union["IncEx", bool]]] + +PostParser = Callable[[Any], Any] + + +@runtime_checkable +class InheritsGeneric(Protocol): + """Represents a type that has inherited from `Generic` + + The `__orig_bases__` property can be used to determine the resolved + type variable for a given base class. + """ + + __orig_bases__: tuple[_GenericAlias] + + +class _GenericAlias(Protocol): + __origin__: type[object] + + +class HttpxSendArgs(TypedDict, total=False): + auth: httpx.Auth + follow_redirects: bool diff --git a/src/agentex/_utils/__init__.py b/src/agentex/_utils/__init__.py new file mode 100644 index 00000000..d4fda26f --- /dev/null +++ b/src/agentex/_utils/__init__.py @@ -0,0 +1,57 @@ +from ._sync import asyncify as asyncify +from ._proxy import LazyProxy as LazyProxy +from ._utils import ( + flatten as flatten, + is_dict as is_dict, + is_list as is_list, + is_given as is_given, + is_tuple as is_tuple, + json_safe as json_safe, + lru_cache as lru_cache, + is_mapping as is_mapping, + is_tuple_t as is_tuple_t, + parse_date as parse_date, + is_iterable as is_iterable, + is_sequence as is_sequence, + coerce_float as coerce_float, + is_mapping_t as is_mapping_t, + removeprefix as removeprefix, + removesuffix as removesuffix, + extract_files as extract_files, + is_sequence_t as is_sequence_t, + required_args as required_args, + coerce_boolean as coerce_boolean, + coerce_integer as coerce_integer, + file_from_path as file_from_path, + parse_datetime as parse_datetime, + strip_not_given as strip_not_given, + deepcopy_minimal as deepcopy_minimal, + get_async_library as get_async_library, + maybe_coerce_float as maybe_coerce_float, + get_required_header as get_required_header, + maybe_coerce_boolean as maybe_coerce_boolean, + maybe_coerce_integer as maybe_coerce_integer, +) +from ._typing import ( + is_list_type as is_list_type, + is_union_type as is_union_type, + extract_type_arg as extract_type_arg, + is_iterable_type as is_iterable_type, + is_required_type as is_required_type, + is_annotated_type as is_annotated_type, + is_type_alias_type as is_type_alias_type, + strip_annotated_type as strip_annotated_type, + extract_type_var_from_base as extract_type_var_from_base, +) +from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator +from ._transform import ( + PropertyInfo as PropertyInfo, + transform as transform, + async_transform as async_transform, + maybe_transform as maybe_transform, + async_maybe_transform as async_maybe_transform, +) +from ._reflection import ( + function_has_argument as function_has_argument, + assert_signatures_in_sync as assert_signatures_in_sync, +) diff --git a/src/agentex/_utils/_logs.py b/src/agentex/_utils/_logs.py new file mode 100644 index 00000000..ecfc8eeb --- /dev/null +++ b/src/agentex/_utils/_logs.py @@ -0,0 +1,25 @@ +import os +import logging + +logger: logging.Logger = logging.getLogger("agentex") +httpx_logger: logging.Logger = logging.getLogger("httpx") + + +def _basic_config() -> None: + # e.g. [2023-10-05 14:12:26 - agentex._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" + logging.basicConfig( + format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def setup_logging() -> None: + env = os.environ.get("AGENTEX_LOG") + if env == "debug": + _basic_config() + logger.setLevel(logging.DEBUG) + httpx_logger.setLevel(logging.DEBUG) + elif env == "info": + _basic_config() + logger.setLevel(logging.INFO) + httpx_logger.setLevel(logging.INFO) diff --git a/src/agentex/_utils/_proxy.py b/src/agentex/_utils/_proxy.py new file mode 100644 index 00000000..0f239a33 --- /dev/null +++ b/src/agentex/_utils/_proxy.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar, Iterable, cast +from typing_extensions import override + +T = TypeVar("T") + + +class LazyProxy(Generic[T], ABC): + """Implements data methods to pretend that an instance is another instance. + + This includes forwarding attribute access and other methods. + """ + + # Note: we have to special case proxies that themselves return proxies + # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz` + + def __getattr__(self, attr: str) -> object: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied # pyright: ignore + return getattr(proxied, attr) + + @override + def __repr__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return repr(self.__get_proxied__()) + + @override + def __str__(self) -> str: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return proxied.__class__.__name__ + return str(proxied) + + @override + def __dir__(self) -> Iterable[str]: + proxied = self.__get_proxied__() + if isinstance(proxied, LazyProxy): + return [] + return proxied.__dir__() + + @property # type: ignore + @override + def __class__(self) -> type: # pyright: ignore + try: + proxied = self.__get_proxied__() + except Exception: + return type(self) + if issubclass(type(proxied), LazyProxy): + return type(proxied) + return proxied.__class__ + + def __get_proxied__(self) -> T: + return self.__load__() + + def __as_proxied__(self) -> T: + """Helper method that returns the current proxy, typed as the loaded object""" + return cast(T, self) + + @abstractmethod + def __load__(self) -> T: ... diff --git a/src/agentex/_utils/_reflection.py b/src/agentex/_utils/_reflection.py new file mode 100644 index 00000000..89aa712a --- /dev/null +++ b/src/agentex/_utils/_reflection.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +import inspect +from typing import Any, Callable + + +def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: + """Returns whether or not the given function has a specific parameter""" + sig = inspect.signature(func) + return arg_name in sig.parameters + + +def assert_signatures_in_sync( + source_func: Callable[..., Any], + check_func: Callable[..., Any], + *, + exclude_params: set[str] = set(), +) -> None: + """Ensure that the signature of the second function matches the first.""" + + check_sig = inspect.signature(check_func) + source_sig = inspect.signature(source_func) + + errors: list[str] = [] + + for name, source_param in source_sig.parameters.items(): + if name in exclude_params: + continue + + custom_param = check_sig.parameters.get(name) + if not custom_param: + errors.append(f"the `{name}` param is missing") + continue + + if custom_param.annotation != source_param.annotation: + errors.append( + f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" + ) + continue + + if errors: + raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) diff --git a/src/agentex/_utils/_resources_proxy.py b/src/agentex/_utils/_resources_proxy.py new file mode 100644 index 00000000..6ab8fc88 --- /dev/null +++ b/src/agentex/_utils/_resources_proxy.py @@ -0,0 +1,24 @@ +from __future__ import annotations + +from typing import Any +from typing_extensions import override + +from ._proxy import LazyProxy + + +class ResourcesProxy(LazyProxy[Any]): + """A proxy for the `agentex.resources` module. + + This is used so that we can lazily import `agentex.resources` only when + needed *and* so that users can just import `agentex` and reference `agentex.resources` + """ + + @override + def __load__(self) -> Any: + import importlib + + mod = importlib.import_module("agentex.resources") + return mod + + +resources = ResourcesProxy().__as_proxied__() diff --git a/src/agentex/_utils/_streams.py b/src/agentex/_utils/_streams.py new file mode 100644 index 00000000..f4a0208f --- /dev/null +++ b/src/agentex/_utils/_streams.py @@ -0,0 +1,12 @@ +from typing import Any +from typing_extensions import Iterator, AsyncIterator + + +def consume_sync_iterator(iterator: Iterator[Any]) -> None: + for _ in iterator: + ... + + +async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: + async for _ in iterator: + ... diff --git a/src/agentex/_utils/_sync.py b/src/agentex/_utils/_sync.py new file mode 100644 index 00000000..ad7ec71b --- /dev/null +++ b/src/agentex/_utils/_sync.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import sys +import asyncio +import functools +import contextvars +from typing import Any, TypeVar, Callable, Awaitable +from typing_extensions import ParamSpec + +import anyio +import sniffio +import anyio.to_thread + +T_Retval = TypeVar("T_Retval") +T_ParamSpec = ParamSpec("T_ParamSpec") + + +if sys.version_info >= (3, 9): + _asyncio_to_thread = asyncio.to_thread +else: + # backport of https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread + # for Python 3.8 support + async def _asyncio_to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs + ) -> Any: + """Asynchronously run function *func* in a separate thread. + + Any *args and **kwargs supplied for this function are directly passed + to *func*. Also, the current :class:`contextvars.Context` is propagated, + allowing context variables from the main thread to be accessed in the + separate thread. + + Returns a coroutine that can be awaited to get the eventual result of *func*. + """ + loop = asyncio.events.get_running_loop() + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) + + +async def to_thread( + func: Callable[T_ParamSpec, T_Retval], /, *args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs +) -> T_Retval: + if sniffio.current_async_library() == "asyncio": + return await _asyncio_to_thread(func, *args, **kwargs) + + return await anyio.to_thread.run_sync( + functools.partial(func, *args, **kwargs), + ) + + +# inspired by `asyncer`, https://github.com/tiangolo/asyncer +def asyncify(function: Callable[T_ParamSpec, T_Retval]) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: + """ + Take a blocking function and create an async one that receives the same + positional and keyword arguments. For python version 3.9 and above, it uses + asyncio.to_thread to run the function in a separate thread. For python version + 3.8, it uses locally defined copy of the asyncio.to_thread function which was + introduced in python 3.9. + + Usage: + + ```python + def blocking_func(arg1, arg2, kwarg1=None): + # blocking code + return result + + + result = asyncify(blocking_function)(arg1, arg2, kwarg1=value1) + ``` + + ## Arguments + + `function`: a blocking regular callable (e.g. a function) + + ## Return + + An async function that takes the same positional and keyword arguments as the + original one, that when called runs the same original function in a thread worker + and returns the result. + """ + + async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: + return await to_thread(function, *args, **kwargs) + + return wrapper diff --git a/src/agentex/_utils/_transform.py b/src/agentex/_utils/_transform.py new file mode 100644 index 00000000..b0cc20a7 --- /dev/null +++ b/src/agentex/_utils/_transform.py @@ -0,0 +1,447 @@ +from __future__ import annotations + +import io +import base64 +import pathlib +from typing import Any, Mapping, TypeVar, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, override, get_type_hints as _get_type_hints + +import anyio +import pydantic + +from ._utils import ( + is_list, + is_given, + lru_cache, + is_mapping, + is_iterable, +) +from .._files import is_base64_file_input +from ._typing import ( + is_list_type, + is_union_type, + extract_type_arg, + is_iterable_type, + is_required_type, + is_annotated_type, + strip_annotated_type, +) +from .._compat import get_origin, model_dump, is_typeddict + +_T = TypeVar("_T") + + +# TODO: support for drilling globals() and locals() +# TODO: ensure works correctly with forward references in all cases + + +PropertyFormat = Literal["iso8601", "base64", "custom"] + + +class PropertyInfo: + """Metadata class to be used in Annotated types to provide information about a given type. + + For example: + + class MyParams(TypedDict): + account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] + + This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. + """ + + alias: str | None + format: PropertyFormat | None + format_template: str | None + discriminator: str | None + + def __init__( + self, + *, + alias: str | None = None, + format: PropertyFormat | None = None, + format_template: str | None = None, + discriminator: str | None = None, + ) -> None: + self.alias = alias + self.format = format + self.format_template = format_template + self.discriminator = discriminator + + @override + def __repr__(self) -> str: + return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" + + +def maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `transform()` that allows `None` to be passed. + + See `transform()` for more details. + """ + if data is None: + return None + return transform(data, expected_type) + + +# Wrapper over _transform_recursive providing fake types +def transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = _transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +@lru_cache(maxsize=8096) +def _get_annotated_type(type_: type) -> type | None: + """If the given type is an `Annotated` type then it is returned, if not `None` is returned. + + This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` + """ + if is_required_type(type_): + # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` + type_ = get_args(type_)[0] + + if is_annotated_type(type_): + return type_ + + return None + + +def _maybe_transform_key(key: str, type_: type) -> str: + """Transform the given `data` based on the annotations provided in `type_`. + + Note: this function only looks at `Annotated` types that contain `PropertyInfo` metadata. + """ + annotated_type = _get_annotated_type(type_) + if annotated_type is None: + # no `Annotated` definition for this type, no transformation needed + return key + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.alias is not None: + return annotation.alias + + return key + + +def _no_transform_needed(annotation: type) -> bool: + return annotation == float or annotation == int + + +def _transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return _transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = _transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return _format_data(data, annotation.format, annotation.format_template) + + return data + + +def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = data.read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +def _transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include `NotGiven` values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) + return result + + +async def async_maybe_transform( + data: object, + expected_type: object, +) -> Any | None: + """Wrapper over `async_transform()` that allows `None` to be passed. + + See `async_transform()` for more details. + """ + if data is None: + return None + return await async_transform(data, expected_type) + + +async def async_transform( + data: _T, + expected_type: object, +) -> _T: + """Transform dictionaries based off of type information from the given type, for example: + + ```py + class Params(TypedDict, total=False): + card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] + + + transformed = transform({"card_id": ""}, Params) + # {'cardID': ''} + ``` + + Any keys / data that does not have type information given will be included as is. + + It should be noted that the transformations that this function does are not represented in the type system. + """ + transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) + return cast(_T, transformed) + + +async def _async_transform_recursive( + data: object, + *, + annotation: type, + inner_type: type | None = None, +) -> object: + """Transform the given data against the expected type. + + Args: + annotation: The direct type annotation given to the particular piece of data. + This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc + + inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type + is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in + the list can be transformed using the metadata from the container type. + + Defaults to the same value as the `annotation` argument. + """ + if inner_type is None: + inner_type = annotation + + stripped_type = strip_annotated_type(inner_type) + origin = get_origin(stripped_type) or stripped_type + if is_typeddict(stripped_type) and is_mapping(data): + return await _async_transform_typeddict(data, stripped_type) + + if origin == dict and is_mapping(data): + items_type = get_args(stripped_type)[1] + return {key: _transform_recursive(value, annotation=items_type) for key, value in data.items()} + + if ( + # List[T] + (is_list_type(stripped_type) and is_list(data)) + # Iterable[T] + or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) + ): + # dicts are technically iterable, but it is an iterable on the keys of the dict and is not usually + # intended as an iterable, so we don't transform it. + if isinstance(data, dict): + return cast(object, data) + + inner_type = extract_type_arg(stripped_type, 0) + if _no_transform_needed(inner_type): + # for some types there is no need to transform anything, so we can get a small + # perf boost from skipping that work. + # + # but we still need to convert to a list to ensure the data is json-serializable + if is_list(data): + return data + return list(data) + + return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] + + if is_union_type(stripped_type): + # For union types we run the transformation against all subtypes to ensure that everything is transformed. + # + # TODO: there may be edge cases where the same normalized field name will transform to two different names + # in different subtypes. + for subtype in get_args(stripped_type): + data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) + return data + + if isinstance(data, pydantic.BaseModel): + return model_dump(data, exclude_unset=True, mode="json") + + annotated_type = _get_annotated_type(annotation) + if annotated_type is None: + return data + + # ignore the first argument as it is the actual type + annotations = get_args(annotated_type)[1:] + for annotation in annotations: + if isinstance(annotation, PropertyInfo) and annotation.format is not None: + return await _async_format_data(data, annotation.format, annotation.format_template) + + return data + + +async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: + if isinstance(data, (date, datetime)): + if format_ == "iso8601": + return data.isoformat() + + if format_ == "custom" and format_template is not None: + return data.strftime(format_template) + + if format_ == "base64" and is_base64_file_input(data): + binary: str | bytes | None = None + + if isinstance(data, pathlib.Path): + binary = await anyio.Path(data).read_bytes() + elif isinstance(data, io.IOBase): + binary = data.read() + + if isinstance(binary, str): # type: ignore[unreachable] + binary = binary.encode() + + if not isinstance(binary, bytes): + raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") + + return base64.b64encode(binary).decode("ascii") + + return data + + +async def _async_transform_typeddict( + data: Mapping[str, object], + expected_type: type, +) -> Mapping[str, object]: + result: dict[str, object] = {} + annotations = get_type_hints(expected_type, include_extras=True) + for key, value in data.items(): + if not is_given(value): + # we don't need to include `NotGiven` values here as they'll + # be stripped out before the request is sent anyway + continue + + type_ = annotations.get(key) + if type_ is None: + # we do not have a type annotation for this field, leave it as is + result[key] = value + else: + result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) + return result + + +@lru_cache(maxsize=8096) +def get_type_hints( + obj: Any, + globalns: dict[str, Any] | None = None, + localns: Mapping[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: + return _get_type_hints(obj, globalns=globalns, localns=localns, include_extras=include_extras) diff --git a/src/agentex/_utils/_typing.py b/src/agentex/_utils/_typing.py new file mode 100644 index 00000000..1bac9542 --- /dev/null +++ b/src/agentex/_utils/_typing.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +import sys +import typing +import typing_extensions +from typing import Any, TypeVar, Iterable, cast +from collections import abc as _c_abc +from typing_extensions import ( + TypeIs, + Required, + Annotated, + get_args, + get_origin, +) + +from ._utils import lru_cache +from .._types import InheritsGeneric +from .._compat import is_union as _is_union + + +def is_annotated_type(typ: type) -> bool: + return get_origin(typ) == Annotated + + +def is_list_type(typ: type) -> bool: + return (get_origin(typ) or typ) == list + + +def is_iterable_type(typ: type) -> bool: + """If the given type is `typing.Iterable[T]`""" + origin = get_origin(typ) or typ + return origin == Iterable or origin == _c_abc.Iterable + + +def is_union_type(typ: type) -> bool: + return _is_union(get_origin(typ)) + + +def is_required_type(typ: type) -> bool: + return get_origin(typ) == Required + + +def is_typevar(typ: type) -> bool: + # type ignore is required because type checkers + # think this expression will always return False + return type(typ) == TypeVar # type: ignore + + +_TYPE_ALIAS_TYPES: tuple[type[typing_extensions.TypeAliasType], ...] = (typing_extensions.TypeAliasType,) +if sys.version_info >= (3, 12): + _TYPE_ALIAS_TYPES = (*_TYPE_ALIAS_TYPES, typing.TypeAliasType) + + +def is_type_alias_type(tp: Any, /) -> TypeIs[typing_extensions.TypeAliasType]: + """Return whether the provided argument is an instance of `TypeAliasType`. + + ```python + type Int = int + is_type_alias_type(Int) + # > True + Str = TypeAliasType("Str", str) + is_type_alias_type(Str) + # > True + ``` + """ + return isinstance(tp, _TYPE_ALIAS_TYPES) + + +# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] +@lru_cache(maxsize=8096) +def strip_annotated_type(typ: type) -> type: + if is_required_type(typ) or is_annotated_type(typ): + return strip_annotated_type(cast(type, get_args(typ)[0])) + + return typ + + +def extract_type_arg(typ: type, index: int) -> type: + args = get_args(typ) + try: + return cast(type, args[index]) + except IndexError as err: + raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err + + +def extract_type_var_from_base( + typ: type, + *, + generic_bases: tuple[type, ...], + index: int, + failure_message: str | None = None, +) -> type: + """Given a type like `Foo[T]`, returns the generic type variable `T`. + + This also handles the case where a concrete subclass is given, e.g. + ```py + class MyResponse(Foo[bytes]): + ... + + extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes + ``` + + And where a generic subclass is given: + ```py + _T = TypeVar('_T') + class MyResponse(Foo[_T]): + ... + + extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes + ``` + """ + cls = cast(object, get_origin(typ) or typ) + if cls in generic_bases: # pyright: ignore[reportUnnecessaryContains] + # we're given the class directly + return extract_type_arg(typ, index) + + # if a subclass is given + # --- + # this is needed as __orig_bases__ is not present in the typeshed stubs + # because it is intended to be for internal use only, however there does + # not seem to be a way to resolve generic TypeVars for inherited subclasses + # without using it. + if isinstance(cls, InheritsGeneric): + target_base_class: Any | None = None + for base in cls.__orig_bases__: + if base.__origin__ in generic_bases: + target_base_class = base + break + + if target_base_class is None: + raise RuntimeError( + "Could not find the generic base class;\n" + "This should never happen;\n" + f"Does {cls} inherit from one of {generic_bases} ?" + ) + + extracted = extract_type_arg(target_base_class, index) + if is_typevar(extracted): + # If the extracted type argument is itself a type variable + # then that means the subclass itself is generic, so we have + # to resolve the type argument from the class itself, not + # the base class. + # + # Note: if there is more than 1 type argument, the subclass could + # change the ordering of the type arguments, this is not currently + # supported. + return extract_type_arg(typ, index) + + return extracted + + raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/src/agentex/_utils/_utils.py b/src/agentex/_utils/_utils.py new file mode 100644 index 00000000..ea3cf3f2 --- /dev/null +++ b/src/agentex/_utils/_utils.py @@ -0,0 +1,422 @@ +from __future__ import annotations + +import os +import re +import inspect +import functools +from typing import ( + Any, + Tuple, + Mapping, + TypeVar, + Callable, + Iterable, + Sequence, + cast, + overload, +) +from pathlib import Path +from datetime import date, datetime +from typing_extensions import TypeGuard + +import sniffio + +from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike +from .._compat import parse_date as parse_date, parse_datetime as parse_datetime + +_T = TypeVar("_T") +_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) +_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) +_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: + return [item for sublist in t for item in sublist] + + +def extract_files( + # TODO: this needs to take Dict but variance issues..... + # create protocol type ? + query: Mapping[str, object], + *, + paths: Sequence[Sequence[str]], +) -> list[tuple[str, FileTypes]]: + """Recursively extract files from the given dictionary based on specified paths. + + A path may look like this ['foo', 'files', '', 'data']. + + Note: this mutates the given dictionary. + """ + files: list[tuple[str, FileTypes]] = [] + for path in paths: + files.extend(_extract_items(query, path, index=0, flattened_key=None)) + return files + + +def _extract_items( + obj: object, + path: Sequence[str], + *, + index: int, + flattened_key: str | None, +) -> list[tuple[str, FileTypes]]: + try: + key = path[index] + except IndexError: + if isinstance(obj, NotGiven): + # no value was provided - we can safely ignore + return [] + + # cyclical import + from .._files import assert_is_file_content + + # We have exhausted the path, return the entry we found. + assert flattened_key is not None + + if is_list(obj): + files: list[tuple[str, FileTypes]] = [] + for entry in obj: + assert_is_file_content(entry, key=flattened_key + "[]" if flattened_key else "") + files.append((flattened_key + "[]", cast(FileTypes, entry))) + return files + + assert_is_file_content(obj, key=flattened_key) + return [(flattened_key, cast(FileTypes, obj))] + + index += 1 + if is_dict(obj): + try: + # We are at the last entry in the path so we must remove the field + if (len(path)) == index: + item = obj.pop(key) + else: + item = obj[key] + except KeyError: + # Key was not present in the dictionary, this is not indicative of an error + # as the given path may not point to a required field. We also do not want + # to enforce required fields as the API may differ from the spec in some cases. + return [] + if flattened_key is None: + flattened_key = key + else: + flattened_key += f"[{key}]" + return _extract_items( + item, + path, + index=index, + flattened_key=flattened_key, + ) + elif is_list(obj): + if key != "": + return [] + + return flatten( + [ + _extract_items( + item, + path, + index=index, + flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", + ) + for item in obj + ] + ) + + # Something unexpected was passed, just ignore it. + return [] + + +def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: + return not isinstance(obj, NotGiven) + + +# Type safe methods for narrowing types with TypeVars. +# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], +# however this cause Pyright to rightfully report errors. As we know we don't +# care about the contained types we can safely use `object` in it's place. +# +# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. +# `is_*` is for when you're dealing with an unknown input +# `is_*_t` is for when you're narrowing a known union type to a specific subset + + +def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: + return isinstance(obj, tuple) + + +def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: + return isinstance(obj, tuple) + + +def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: + return isinstance(obj, Sequence) + + +def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: + return isinstance(obj, Sequence) + + +def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: + return isinstance(obj, Mapping) + + +def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: + return isinstance(obj, Mapping) + + +def is_dict(obj: object) -> TypeGuard[dict[object, object]]: + return isinstance(obj, dict) + + +def is_list(obj: object) -> TypeGuard[list[object]]: + return isinstance(obj, list) + + +def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: + return isinstance(obj, Iterable) + + +def deepcopy_minimal(item: _T) -> _T: + """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: + + - mappings, e.g. `dict` + - list + + This is done for performance reasons. + """ + if is_mapping(item): + return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) + if is_list(item): + return cast(_T, [deepcopy_minimal(entry) for entry in item]) + return item + + +# copied from https://github.com/Rapptz/RoboDanny +def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: + size = len(seq) + if size == 0: + return "" + + if size == 1: + return seq[0] + + if size == 2: + return f"{seq[0]} {final} {seq[1]}" + + return delim.join(seq[:-1]) + f" {final} {seq[-1]}" + + +def quote(string: str) -> str: + """Add single quotation marks around the given string. Does *not* do any escaping.""" + return f"'{string}'" + + +def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: + """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. + + Useful for enforcing runtime validation of overloaded functions. + + Example usage: + ```py + @overload + def foo(*, a: str) -> str: ... + + + @overload + def foo(*, b: bool) -> str: ... + + + # This enforces the same constraints that a static type checker would + # i.e. that either a or b must be passed to the function + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: bool | None = None) -> str: ... + ``` + """ + + def inner(func: CallableT) -> CallableT: + params = inspect.signature(func).parameters + positional = [ + name + for name, param in params.items() + if param.kind + in { + param.POSITIONAL_ONLY, + param.POSITIONAL_OR_KEYWORD, + } + ] + + @functools.wraps(func) + def wrapper(*args: object, **kwargs: object) -> object: + given_params: set[str] = set() + for i, _ in enumerate(args): + try: + given_params.add(positional[i]) + except IndexError: + raise TypeError( + f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" + ) from None + + for key in kwargs.keys(): + given_params.add(key) + + for variant in variants: + matches = all((param in given_params for param in variant)) + if matches: + break + else: # no break + if len(variants) > 1: + variations = human_join( + ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] + ) + msg = f"Missing required arguments; Expected either {variations} arguments to be given" + else: + assert len(variants) > 0 + + # TODO: this error message is not deterministic + missing = list(set(variants[0]) - given_params) + if len(missing) > 1: + msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" + else: + msg = f"Missing required argument: {quote(missing[0])}" + raise TypeError(msg) + return func(*args, **kwargs) + + return wrapper # type: ignore + + return inner + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +@overload +def strip_not_given(obj: None) -> None: ... + + +@overload +def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... + + +@overload +def strip_not_given(obj: object) -> object: ... + + +def strip_not_given(obj: object | None) -> object: + """Remove all top-level keys where their values are instances of `NotGiven`""" + if obj is None: + return None + + if not is_mapping(obj): + return obj + + return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} + + +def coerce_integer(val: str) -> int: + return int(val, base=10) + + +def coerce_float(val: str) -> float: + return float(val) + + +def coerce_boolean(val: str) -> bool: + return val == "true" or val == "1" or val == "on" + + +def maybe_coerce_integer(val: str | None) -> int | None: + if val is None: + return None + return coerce_integer(val) + + +def maybe_coerce_float(val: str | None) -> float | None: + if val is None: + return None + return coerce_float(val) + + +def maybe_coerce_boolean(val: str | None) -> bool | None: + if val is None: + return None + return coerce_boolean(val) + + +def removeprefix(string: str, prefix: str) -> str: + """Remove a prefix from a string. + + Backport of `str.removeprefix` for Python < 3.9 + """ + if string.startswith(prefix): + return string[len(prefix) :] + return string + + +def removesuffix(string: str, suffix: str) -> str: + """Remove a suffix from a string. + + Backport of `str.removesuffix` for Python < 3.9 + """ + if string.endswith(suffix): + return string[: -len(suffix)] + return string + + +def file_from_path(path: str) -> FileTypes: + contents = Path(path).read_bytes() + file_name = os.path.basename(path) + return (file_name, contents) + + +def get_required_header(headers: HeadersLike, header: str) -> str: + lower_header = header.lower() + if is_mapping_t(headers): + # mypy doesn't understand the type narrowing here + for k, v in headers.items(): # type: ignore + if k.lower() == lower_header and isinstance(v, str): + return v + + # to deal with the case where the header looks like Stainless-Event-Id + intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) + + for normalized_header in [header, lower_header, header.upper(), intercaps_header]: + value = headers.get(normalized_header) + if value: + return value + + raise ValueError(f"Could not find {header} header") + + +def get_async_library() -> str: + try: + return sniffio.current_async_library() + except Exception: + return "false" + + +def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: + """A version of functools.lru_cache that retains the type signature + for the wrapped function arguments. + """ + wrapper = functools.lru_cache( # noqa: TID251 + maxsize=maxsize, + ) + return cast(Any, wrapper) # type: ignore[no-any-return] + + +def json_safe(data: object) -> object: + """Translates a mapping / sequence recursively in the same fashion + as `pydantic` v2's `model_dump(mode="json")`. + """ + if is_mapping(data): + return {json_safe(key): json_safe(value) for key, value in data.items()} + + if is_iterable(data) and not isinstance(data, (str, bytes, bytearray)): + return [json_safe(item) for item in data] + + if isinstance(data, (datetime, date)): + return data.isoformat() + + return data diff --git a/src/agentex/_version.py b/src/agentex/_version.py new file mode 100644 index 00000000..3b76e3fe --- /dev/null +++ b/src/agentex/_version.py @@ -0,0 +1,4 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +__title__ = "agentex" +__version__ = "0.0.1-alpha.1" # x-release-please-version diff --git a/src/agentex/lib/.keep b/src/agentex/lib/.keep new file mode 100644 index 00000000..5e2c99fd --- /dev/null +++ b/src/agentex/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/.circleci/config.yml b/src/agentex/py.typed similarity index 100% rename from .circleci/config.yml rename to src/agentex/py.typed diff --git a/src/agentex/resources/__init__.py b/src/agentex/resources/__init__.py new file mode 100644 index 00000000..d4758f92 --- /dev/null +++ b/src/agentex/resources/__init__.py @@ -0,0 +1,117 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .echo import ( + EchoResource, + AsyncEchoResource, + EchoResourceWithRawResponse, + AsyncEchoResourceWithRawResponse, + EchoResourceWithStreamingResponse, + AsyncEchoResourceWithStreamingResponse, +) +from .spans import ( + SpansResource, + AsyncSpansResource, + SpansResourceWithRawResponse, + AsyncSpansResourceWithRawResponse, + SpansResourceWithStreamingResponse, + AsyncSpansResourceWithStreamingResponse, +) +from .tasks import ( + TasksResource, + AsyncTasksResource, + TasksResourceWithRawResponse, + AsyncTasksResourceWithRawResponse, + TasksResourceWithStreamingResponse, + AsyncTasksResourceWithStreamingResponse, +) +from .agents import ( + AgentsResource, + AsyncAgentsResource, + AgentsResourceWithRawResponse, + AsyncAgentsResourceWithRawResponse, + AgentsResourceWithStreamingResponse, + AsyncAgentsResourceWithStreamingResponse, +) +from .events import ( + EventsResource, + AsyncEventsResource, + EventsResourceWithRawResponse, + AsyncEventsResourceWithRawResponse, + EventsResourceWithStreamingResponse, + AsyncEventsResourceWithStreamingResponse, +) +from .states import ( + StatesResource, + AsyncStatesResource, + StatesResourceWithRawResponse, + AsyncStatesResourceWithRawResponse, + StatesResourceWithStreamingResponse, + AsyncStatesResourceWithStreamingResponse, +) +from .tracker import ( + TrackerResource, + AsyncTrackerResource, + TrackerResourceWithRawResponse, + AsyncTrackerResourceWithRawResponse, + TrackerResourceWithStreamingResponse, + AsyncTrackerResourceWithStreamingResponse, +) +from .messages import ( + MessagesResource, + AsyncMessagesResource, + MessagesResourceWithRawResponse, + AsyncMessagesResourceWithRawResponse, + MessagesResourceWithStreamingResponse, + AsyncMessagesResourceWithStreamingResponse, +) + +__all__ = [ + "EchoResource", + "AsyncEchoResource", + "EchoResourceWithRawResponse", + "AsyncEchoResourceWithRawResponse", + "EchoResourceWithStreamingResponse", + "AsyncEchoResourceWithStreamingResponse", + "AgentsResource", + "AsyncAgentsResource", + "AgentsResourceWithRawResponse", + "AsyncAgentsResourceWithRawResponse", + "AgentsResourceWithStreamingResponse", + "AsyncAgentsResourceWithStreamingResponse", + "TasksResource", + "AsyncTasksResource", + "TasksResourceWithRawResponse", + "AsyncTasksResourceWithRawResponse", + "TasksResourceWithStreamingResponse", + "AsyncTasksResourceWithStreamingResponse", + "MessagesResource", + "AsyncMessagesResource", + "MessagesResourceWithRawResponse", + "AsyncMessagesResourceWithRawResponse", + "MessagesResourceWithStreamingResponse", + "AsyncMessagesResourceWithStreamingResponse", + "SpansResource", + "AsyncSpansResource", + "SpansResourceWithRawResponse", + "AsyncSpansResourceWithRawResponse", + "SpansResourceWithStreamingResponse", + "AsyncSpansResourceWithStreamingResponse", + "StatesResource", + "AsyncStatesResource", + "StatesResourceWithRawResponse", + "AsyncStatesResourceWithRawResponse", + "StatesResourceWithStreamingResponse", + "AsyncStatesResourceWithStreamingResponse", + "EventsResource", + "AsyncEventsResource", + "EventsResourceWithRawResponse", + "AsyncEventsResourceWithRawResponse", + "EventsResourceWithStreamingResponse", + "AsyncEventsResourceWithStreamingResponse", + "TrackerResource", + "AsyncTrackerResource", + "TrackerResourceWithRawResponse", + "AsyncTrackerResourceWithRawResponse", + "TrackerResourceWithStreamingResponse", + "AsyncTrackerResourceWithStreamingResponse", +] diff --git a/src/agentex/resources/agents/__init__.py b/src/agentex/resources/agents/__init__.py new file mode 100644 index 00000000..374345e0 --- /dev/null +++ b/src/agentex/resources/agents/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .name import ( + NameResource, + AsyncNameResource, + NameResourceWithRawResponse, + AsyncNameResourceWithRawResponse, + NameResourceWithStreamingResponse, + AsyncNameResourceWithStreamingResponse, +) +from .agents import ( + AgentsResource, + AsyncAgentsResource, + AgentsResourceWithRawResponse, + AsyncAgentsResourceWithRawResponse, + AgentsResourceWithStreamingResponse, + AsyncAgentsResourceWithStreamingResponse, +) + +__all__ = [ + "NameResource", + "AsyncNameResource", + "NameResourceWithRawResponse", + "AsyncNameResourceWithRawResponse", + "NameResourceWithStreamingResponse", + "AsyncNameResourceWithStreamingResponse", + "AgentsResource", + "AsyncAgentsResource", + "AgentsResourceWithRawResponse", + "AsyncAgentsResourceWithRawResponse", + "AgentsResourceWithStreamingResponse", + "AsyncAgentsResourceWithStreamingResponse", +] diff --git a/src/agentex/resources/agents/agents.py b/src/agentex/resources/agents/agents.py new file mode 100644 index 00000000..15b013d1 --- /dev/null +++ b/src/agentex/resources/agents/agents.py @@ -0,0 +1,469 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal + +import httpx + +from .name import ( + NameResource, + AsyncNameResource, + NameResourceWithRawResponse, + AsyncNameResourceWithRawResponse, + NameResourceWithStreamingResponse, + AsyncNameResourceWithStreamingResponse, +) +from ...types import agent_rpc_params, agent_list_params +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...types.agent import Agent +from ..._base_client import make_request_options +from ...types.agent_list_response import AgentListResponse + +__all__ = ["AgentsResource", "AsyncAgentsResource"] + + +class AgentsResource(SyncAPIResource): + @cached_property + def name(self) -> NameResource: + return NameResource(self._client) + + @cached_property + def with_raw_response(self) -> AgentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AgentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AgentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AgentsResourceWithStreamingResponse(self) + + def retrieve( + self, + agent_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Get an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return self._get( + f"/agents/{agent_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + def list( + self, + *, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentListResponse: + """ + List all registered agents, optionally filtered by query parameters. + + Args: + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/agents", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"task_id": task_id}, agent_list_params.AgentListParams), + ), + cast_to=AgentListResponse, + ) + + def delete( + self, + agent_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Delete an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return self._delete( + f"/agents/{agent_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + def rpc( + self, + agent_id: str, + *, + method: Literal["event/send", "task/create", "message/send", "task/cancel"], + params: agent_rpc_params.Params, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Handle JSON-RPC requests for an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return self._post( + f"/agents/{agent_id}/rpc", + body=maybe_transform( + { + "method": method, + "params": params, + "id": id, + "jsonrpc": jsonrpc, + }, + agent_rpc_params.AgentRpcParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class AsyncAgentsResource(AsyncAPIResource): + @cached_property + def name(self) -> AsyncNameResource: + return AsyncNameResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncAgentsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncAgentsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncAgentsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncAgentsResourceWithStreamingResponse(self) + + async def retrieve( + self, + agent_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Get an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return await self._get( + f"/agents/{agent_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + async def list( + self, + *, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentListResponse: + """ + List all registered agents, optionally filtered by query parameters. + + Args: + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/agents", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"task_id": task_id}, agent_list_params.AgentListParams), + ), + cast_to=AgentListResponse, + ) + + async def delete( + self, + agent_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Delete an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return await self._delete( + f"/agents/{agent_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + async def rpc( + self, + agent_id: str, + *, + method: Literal["event/send", "task/create", "message/send", "task/cancel"], + params: agent_rpc_params.Params, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Handle JSON-RPC requests for an agent by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_id: + raise ValueError(f"Expected a non-empty value for `agent_id` but received {agent_id!r}") + return await self._post( + f"/agents/{agent_id}/rpc", + body=await async_maybe_transform( + { + "method": method, + "params": params, + "id": id, + "jsonrpc": jsonrpc, + }, + agent_rpc_params.AgentRpcParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class AgentsResourceWithRawResponse: + def __init__(self, agents: AgentsResource) -> None: + self._agents = agents + + self.retrieve = to_raw_response_wrapper( + agents.retrieve, + ) + self.list = to_raw_response_wrapper( + agents.list, + ) + self.delete = to_raw_response_wrapper( + agents.delete, + ) + self.rpc = to_raw_response_wrapper( + agents.rpc, + ) + + @cached_property + def name(self) -> NameResourceWithRawResponse: + return NameResourceWithRawResponse(self._agents.name) + + +class AsyncAgentsResourceWithRawResponse: + def __init__(self, agents: AsyncAgentsResource) -> None: + self._agents = agents + + self.retrieve = async_to_raw_response_wrapper( + agents.retrieve, + ) + self.list = async_to_raw_response_wrapper( + agents.list, + ) + self.delete = async_to_raw_response_wrapper( + agents.delete, + ) + self.rpc = async_to_raw_response_wrapper( + agents.rpc, + ) + + @cached_property + def name(self) -> AsyncNameResourceWithRawResponse: + return AsyncNameResourceWithRawResponse(self._agents.name) + + +class AgentsResourceWithStreamingResponse: + def __init__(self, agents: AgentsResource) -> None: + self._agents = agents + + self.retrieve = to_streamed_response_wrapper( + agents.retrieve, + ) + self.list = to_streamed_response_wrapper( + agents.list, + ) + self.delete = to_streamed_response_wrapper( + agents.delete, + ) + self.rpc = to_streamed_response_wrapper( + agents.rpc, + ) + + @cached_property + def name(self) -> NameResourceWithStreamingResponse: + return NameResourceWithStreamingResponse(self._agents.name) + + +class AsyncAgentsResourceWithStreamingResponse: + def __init__(self, agents: AsyncAgentsResource) -> None: + self._agents = agents + + self.retrieve = async_to_streamed_response_wrapper( + agents.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + agents.list, + ) + self.delete = async_to_streamed_response_wrapper( + agents.delete, + ) + self.rpc = async_to_streamed_response_wrapper( + agents.rpc, + ) + + @cached_property + def name(self) -> AsyncNameResourceWithStreamingResponse: + return AsyncNameResourceWithStreamingResponse(self._agents.name) diff --git a/src/agentex/resources/agents/name.py b/src/agentex/resources/agents/name.py new file mode 100644 index 00000000..8906f03e --- /dev/null +++ b/src/agentex/resources/agents/name.py @@ -0,0 +1,350 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union +from typing_extensions import Literal + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ...types.agent import Agent +from ..._base_client import make_request_options +from ...types.agents import name_rpc_params + +__all__ = ["NameResource", "AsyncNameResource"] + + +class NameResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> NameResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return NameResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> NameResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return NameResourceWithStreamingResponse(self) + + def retrieve( + self, + agent_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Get an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return self._get( + f"/agents/name/{agent_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + def delete( + self, + agent_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Delete an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return self._delete( + f"/agents/name/{agent_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + def rpc( + self, + agent_name: str, + *, + method: Literal["event/send", "task/create", "message/send", "task/cancel"], + params: name_rpc_params.Params, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Handle JSON-RPC requests for an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return self._post( + f"/agents/name/{agent_name}/rpc", + body=maybe_transform( + { + "method": method, + "params": params, + "id": id, + "jsonrpc": jsonrpc, + }, + name_rpc_params.NameRpcParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class AsyncNameResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncNameResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncNameResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncNameResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncNameResourceWithStreamingResponse(self) + + async def retrieve( + self, + agent_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Get an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return await self._get( + f"/agents/name/{agent_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + async def delete( + self, + agent_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Agent: + """ + Delete an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return await self._delete( + f"/agents/name/{agent_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Agent, + ) + + async def rpc( + self, + agent_name: str, + *, + method: Literal["event/send", "task/create", "message/send", "task/cancel"], + params: name_rpc_params.Params, + id: Union[int, str, None] | NotGiven = NOT_GIVEN, + jsonrpc: Literal["2.0"] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Handle JSON-RPC requests for an agent by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not agent_name: + raise ValueError(f"Expected a non-empty value for `agent_name` but received {agent_name!r}") + return await self._post( + f"/agents/name/{agent_name}/rpc", + body=await async_maybe_transform( + { + "method": method, + "params": params, + "id": id, + "jsonrpc": jsonrpc, + }, + name_rpc_params.NameRpcParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class NameResourceWithRawResponse: + def __init__(self, name: NameResource) -> None: + self._name = name + + self.retrieve = to_raw_response_wrapper( + name.retrieve, + ) + self.delete = to_raw_response_wrapper( + name.delete, + ) + self.rpc = to_raw_response_wrapper( + name.rpc, + ) + + +class AsyncNameResourceWithRawResponse: + def __init__(self, name: AsyncNameResource) -> None: + self._name = name + + self.retrieve = async_to_raw_response_wrapper( + name.retrieve, + ) + self.delete = async_to_raw_response_wrapper( + name.delete, + ) + self.rpc = async_to_raw_response_wrapper( + name.rpc, + ) + + +class NameResourceWithStreamingResponse: + def __init__(self, name: NameResource) -> None: + self._name = name + + self.retrieve = to_streamed_response_wrapper( + name.retrieve, + ) + self.delete = to_streamed_response_wrapper( + name.delete, + ) + self.rpc = to_streamed_response_wrapper( + name.rpc, + ) + + +class AsyncNameResourceWithStreamingResponse: + def __init__(self, name: AsyncNameResource) -> None: + self._name = name + + self.retrieve = async_to_streamed_response_wrapper( + name.retrieve, + ) + self.delete = async_to_streamed_response_wrapper( + name.delete, + ) + self.rpc = async_to_streamed_response_wrapper( + name.rpc, + ) diff --git a/src/agentex/resources/echo.py b/src/agentex/resources/echo.py new file mode 100644 index 00000000..5966357a --- /dev/null +++ b/src/agentex/resources/echo.py @@ -0,0 +1,162 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..types import echo_send_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options + +__all__ = ["EchoResource", "AsyncEchoResource"] + + +class EchoResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EchoResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return EchoResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EchoResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return EchoResourceWithStreamingResponse(self) + + def send( + self, + *, + message: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Echo + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/echo", + body=maybe_transform({"message": message}, echo_send_params.EchoSendParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class AsyncEchoResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEchoResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncEchoResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEchoResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncEchoResourceWithStreamingResponse(self) + + async def send( + self, + *, + message: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> object: + """ + Echo + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/echo", + body=await async_maybe_transform({"message": message}, echo_send_params.EchoSendParams), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + ) + + +class EchoResourceWithRawResponse: + def __init__(self, echo: EchoResource) -> None: + self._echo = echo + + self.send = to_raw_response_wrapper( + echo.send, + ) + + +class AsyncEchoResourceWithRawResponse: + def __init__(self, echo: AsyncEchoResource) -> None: + self._echo = echo + + self.send = async_to_raw_response_wrapper( + echo.send, + ) + + +class EchoResourceWithStreamingResponse: + def __init__(self, echo: EchoResource) -> None: + self._echo = echo + + self.send = to_streamed_response_wrapper( + echo.send, + ) + + +class AsyncEchoResourceWithStreamingResponse: + def __init__(self, echo: AsyncEchoResource) -> None: + self._echo = echo + + self.send = async_to_streamed_response_wrapper( + echo.send, + ) diff --git a/src/agentex/resources/events.py b/src/agentex/resources/events.py new file mode 100644 index 00000000..79f6fa1b --- /dev/null +++ b/src/agentex/resources/events.py @@ -0,0 +1,294 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional + +import httpx + +from ..types import event_list_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..types.event import Event +from .._base_client import make_request_options +from ..types.event_list_response import EventListResponse + +__all__ = ["EventsResource", "AsyncEventsResource"] + + +class EventsResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> EventsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return EventsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> EventsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return EventsResourceWithStreamingResponse(self) + + def retrieve( + self, + event_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Event: + """ + Get Event + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not event_id: + raise ValueError(f"Expected a non-empty value for `event_id` but received {event_id!r}") + return self._get( + f"/events/{event_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Event, + ) + + def list( + self, + *, + agent_id: str, + task_id: str, + last_processed_event_id: Optional[str] | NotGiven = NOT_GIVEN, + limit: Optional[int] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EventListResponse: + """ + List events for a specific task and agent. + + Optionally filter for events after a specific sequence ID. Results are ordered + by sequence_id. + + Args: + agent_id: The agent ID to filter events by + + task_id: The task ID to filter events by + + last_processed_event_id: Optional event ID to get events after this ID + + limit: Optional limit on number of results + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + "last_processed_event_id": last_processed_event_id, + "limit": limit, + }, + event_list_params.EventListParams, + ), + ), + cast_to=EventListResponse, + ) + + +class AsyncEventsResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncEventsResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncEventsResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncEventsResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncEventsResourceWithStreamingResponse(self) + + async def retrieve( + self, + event_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Event: + """ + Get Event + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not event_id: + raise ValueError(f"Expected a non-empty value for `event_id` but received {event_id!r}") + return await self._get( + f"/events/{event_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Event, + ) + + async def list( + self, + *, + agent_id: str, + task_id: str, + last_processed_event_id: Optional[str] | NotGiven = NOT_GIVEN, + limit: Optional[int] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> EventListResponse: + """ + List events for a specific task and agent. + + Optionally filter for events after a specific sequence ID. Results are ordered + by sequence_id. + + Args: + agent_id: The agent ID to filter events by + + task_id: The task ID to filter events by + + last_processed_event_id: Optional event ID to get events after this ID + + limit: Optional limit on number of results + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/events", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + "last_processed_event_id": last_processed_event_id, + "limit": limit, + }, + event_list_params.EventListParams, + ), + ), + cast_to=EventListResponse, + ) + + +class EventsResourceWithRawResponse: + def __init__(self, events: EventsResource) -> None: + self._events = events + + self.retrieve = to_raw_response_wrapper( + events.retrieve, + ) + self.list = to_raw_response_wrapper( + events.list, + ) + + +class AsyncEventsResourceWithRawResponse: + def __init__(self, events: AsyncEventsResource) -> None: + self._events = events + + self.retrieve = async_to_raw_response_wrapper( + events.retrieve, + ) + self.list = async_to_raw_response_wrapper( + events.list, + ) + + +class EventsResourceWithStreamingResponse: + def __init__(self, events: EventsResource) -> None: + self._events = events + + self.retrieve = to_streamed_response_wrapper( + events.retrieve, + ) + self.list = to_streamed_response_wrapper( + events.list, + ) + + +class AsyncEventsResourceWithStreamingResponse: + def __init__(self, events: AsyncEventsResource) -> None: + self._events = events + + self.retrieve = async_to_streamed_response_wrapper( + events.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + events.list, + ) diff --git a/src/agentex/resources/messages/__init__.py b/src/agentex/resources/messages/__init__.py new file mode 100644 index 00000000..389b8cc1 --- /dev/null +++ b/src/agentex/resources/messages/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .batch import ( + BatchResource, + AsyncBatchResource, + BatchResourceWithRawResponse, + AsyncBatchResourceWithRawResponse, + BatchResourceWithStreamingResponse, + AsyncBatchResourceWithStreamingResponse, +) +from .messages import ( + MessagesResource, + AsyncMessagesResource, + MessagesResourceWithRawResponse, + AsyncMessagesResourceWithRawResponse, + MessagesResourceWithStreamingResponse, + AsyncMessagesResourceWithStreamingResponse, +) + +__all__ = [ + "BatchResource", + "AsyncBatchResource", + "BatchResourceWithRawResponse", + "AsyncBatchResourceWithRawResponse", + "BatchResourceWithStreamingResponse", + "AsyncBatchResourceWithStreamingResponse", + "MessagesResource", + "AsyncMessagesResource", + "MessagesResourceWithRawResponse", + "AsyncMessagesResourceWithRawResponse", + "MessagesResourceWithStreamingResponse", + "AsyncMessagesResourceWithStreamingResponse", +] diff --git a/src/agentex/resources/messages/batch.py b/src/agentex/resources/messages/batch.py new file mode 100644 index 00000000..478c2df5 --- /dev/null +++ b/src/agentex/resources/messages/batch.py @@ -0,0 +1,270 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Iterable + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.messages import batch_create_params, batch_update_params +from ...types.messages.batch_create_response import BatchCreateResponse +from ...types.messages.batch_update_response import BatchUpdateResponse + +__all__ = ["BatchResource", "AsyncBatchResource"] + + +class BatchResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> BatchResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return BatchResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> BatchResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return BatchResourceWithStreamingResponse(self) + + def create( + self, + *, + contents: Iterable[batch_create_params.Content], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BatchCreateResponse: + """ + Batch Create Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/messages/batch", + body=maybe_transform( + { + "contents": contents, + "task_id": task_id, + }, + batch_create_params.BatchCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BatchCreateResponse, + ) + + def update( + self, + *, + task_id: str, + updates: Dict[str, batch_update_params.Updates], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BatchUpdateResponse: + """ + Batch Update Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._put( + "/messages/batch", + body=maybe_transform( + { + "task_id": task_id, + "updates": updates, + }, + batch_update_params.BatchUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BatchUpdateResponse, + ) + + +class AsyncBatchResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncBatchResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncBatchResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncBatchResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncBatchResourceWithStreamingResponse(self) + + async def create( + self, + *, + contents: Iterable[batch_create_params.Content], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BatchCreateResponse: + """ + Batch Create Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/messages/batch", + body=await async_maybe_transform( + { + "contents": contents, + "task_id": task_id, + }, + batch_create_params.BatchCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BatchCreateResponse, + ) + + async def update( + self, + *, + task_id: str, + updates: Dict[str, batch_update_params.Updates], + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> BatchUpdateResponse: + """ + Batch Update Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._put( + "/messages/batch", + body=await async_maybe_transform( + { + "task_id": task_id, + "updates": updates, + }, + batch_update_params.BatchUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=BatchUpdateResponse, + ) + + +class BatchResourceWithRawResponse: + def __init__(self, batch: BatchResource) -> None: + self._batch = batch + + self.create = to_raw_response_wrapper( + batch.create, + ) + self.update = to_raw_response_wrapper( + batch.update, + ) + + +class AsyncBatchResourceWithRawResponse: + def __init__(self, batch: AsyncBatchResource) -> None: + self._batch = batch + + self.create = async_to_raw_response_wrapper( + batch.create, + ) + self.update = async_to_raw_response_wrapper( + batch.update, + ) + + +class BatchResourceWithStreamingResponse: + def __init__(self, batch: BatchResource) -> None: + self._batch = batch + + self.create = to_streamed_response_wrapper( + batch.create, + ) + self.update = to_streamed_response_wrapper( + batch.update, + ) + + +class AsyncBatchResourceWithStreamingResponse: + def __init__(self, batch: AsyncBatchResource) -> None: + self._batch = batch + + self.create = async_to_streamed_response_wrapper( + batch.create, + ) + self.update = async_to_streamed_response_wrapper( + batch.update, + ) diff --git a/src/agentex/resources/messages/messages.py b/src/agentex/resources/messages/messages.py new file mode 100644 index 00000000..7745cf76 --- /dev/null +++ b/src/agentex/resources/messages/messages.py @@ -0,0 +1,491 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional + +import httpx + +from .batch import ( + BatchResource, + AsyncBatchResource, + BatchResourceWithRawResponse, + AsyncBatchResourceWithRawResponse, + BatchResourceWithStreamingResponse, + AsyncBatchResourceWithStreamingResponse, +) +from ...types import StreamingStatus, message_list_params, message_create_params, message_update_params +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._utils import maybe_transform, async_maybe_transform +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._base_client import make_request_options +from ...types.task_message import TaskMessage +from ...types.streaming_status import StreamingStatus +from ...types.message_list_response import MessageListResponse + +__all__ = ["MessagesResource", "AsyncMessagesResource"] + + +class MessagesResource(SyncAPIResource): + @cached_property + def batch(self) -> BatchResource: + return BatchResource(self._client) + + @cached_property + def with_raw_response(self) -> MessagesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return MessagesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> MessagesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return MessagesResourceWithStreamingResponse(self) + + def create( + self, + *, + content: message_create_params.Content, + task_id: str, + streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Create Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/messages", + body=maybe_transform( + { + "content": content, + "task_id": task_id, + "streaming_status": streaming_status, + }, + message_create_params.MessageCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + def retrieve( + self, + message_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Get Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + return self._get( + f"/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + def update( + self, + message_id: str, + *, + content: message_update_params.Content, + task_id: str, + streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Update Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + return self._put( + f"/messages/{message_id}", + body=maybe_transform( + { + "content": content, + "task_id": task_id, + "streaming_status": streaming_status, + }, + message_update_params.MessageUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + def list( + self, + *, + task_id: str, + limit: Optional[int] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> MessageListResponse: + """ + List Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/messages", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "task_id": task_id, + "limit": limit, + }, + message_list_params.MessageListParams, + ), + ), + cast_to=MessageListResponse, + ) + + +class AsyncMessagesResource(AsyncAPIResource): + @cached_property + def batch(self) -> AsyncBatchResource: + return AsyncBatchResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncMessagesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncMessagesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncMessagesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncMessagesResourceWithStreamingResponse(self) + + async def create( + self, + *, + content: message_create_params.Content, + task_id: str, + streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Create Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/messages", + body=await async_maybe_transform( + { + "content": content, + "task_id": task_id, + "streaming_status": streaming_status, + }, + message_create_params.MessageCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + async def retrieve( + self, + message_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Get Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + return await self._get( + f"/messages/{message_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + async def update( + self, + message_id: str, + *, + content: message_update_params.Content, + task_id: str, + streaming_status: Optional[StreamingStatus] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskMessage: + """ + Update Message + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not message_id: + raise ValueError(f"Expected a non-empty value for `message_id` but received {message_id!r}") + return await self._put( + f"/messages/{message_id}", + body=await async_maybe_transform( + { + "content": content, + "task_id": task_id, + "streaming_status": streaming_status, + }, + message_update_params.MessageUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskMessage, + ) + + async def list( + self, + *, + task_id: str, + limit: Optional[int] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> MessageListResponse: + """ + List Messages + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/messages", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "task_id": task_id, + "limit": limit, + }, + message_list_params.MessageListParams, + ), + ), + cast_to=MessageListResponse, + ) + + +class MessagesResourceWithRawResponse: + def __init__(self, messages: MessagesResource) -> None: + self._messages = messages + + self.create = to_raw_response_wrapper( + messages.create, + ) + self.retrieve = to_raw_response_wrapper( + messages.retrieve, + ) + self.update = to_raw_response_wrapper( + messages.update, + ) + self.list = to_raw_response_wrapper( + messages.list, + ) + + @cached_property + def batch(self) -> BatchResourceWithRawResponse: + return BatchResourceWithRawResponse(self._messages.batch) + + +class AsyncMessagesResourceWithRawResponse: + def __init__(self, messages: AsyncMessagesResource) -> None: + self._messages = messages + + self.create = async_to_raw_response_wrapper( + messages.create, + ) + self.retrieve = async_to_raw_response_wrapper( + messages.retrieve, + ) + self.update = async_to_raw_response_wrapper( + messages.update, + ) + self.list = async_to_raw_response_wrapper( + messages.list, + ) + + @cached_property + def batch(self) -> AsyncBatchResourceWithRawResponse: + return AsyncBatchResourceWithRawResponse(self._messages.batch) + + +class MessagesResourceWithStreamingResponse: + def __init__(self, messages: MessagesResource) -> None: + self._messages = messages + + self.create = to_streamed_response_wrapper( + messages.create, + ) + self.retrieve = to_streamed_response_wrapper( + messages.retrieve, + ) + self.update = to_streamed_response_wrapper( + messages.update, + ) + self.list = to_streamed_response_wrapper( + messages.list, + ) + + @cached_property + def batch(self) -> BatchResourceWithStreamingResponse: + return BatchResourceWithStreamingResponse(self._messages.batch) + + +class AsyncMessagesResourceWithStreamingResponse: + def __init__(self, messages: AsyncMessagesResource) -> None: + self._messages = messages + + self.create = async_to_streamed_response_wrapper( + messages.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + messages.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + messages.update, + ) + self.list = async_to_streamed_response_wrapper( + messages.list, + ) + + @cached_property + def batch(self) -> AsyncBatchResourceWithStreamingResponse: + return AsyncBatchResourceWithStreamingResponse(self._messages.batch) diff --git a/src/agentex/resources/spans.py b/src/agentex/resources/spans.py new file mode 100644 index 00000000..00327697 --- /dev/null +++ b/src/agentex/resources/spans.py @@ -0,0 +1,557 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Iterable, Optional +from datetime import datetime + +import httpx + +from ..types import span_list_params, span_create_params, span_update_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..types.span import Span +from .._base_client import make_request_options +from ..types.span_list_response import SpanListResponse + +__all__ = ["SpansResource", "AsyncSpansResource"] + + +class SpansResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> SpansResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return SpansResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> SpansResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return SpansResourceWithStreamingResponse(self) + + def create( + self, + *, + name: str, + start_time: Union[str, datetime], + trace_id: str, + id: Optional[str] | NotGiven = NOT_GIVEN, + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + end_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + parent_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Create a new span with the provided parameters + + Args: + name: Name that describes what operation this span represents + + start_time: The time the span started + + trace_id: Unique identifier for the trace this span belongs to + + id: Unique identifier for the span. If not provided, an ID will be generated. + + data: Any additional metadata or context for the span + + end_time: The time the span ended + + input: Input parameters or data for the operation + + output: Output data resulting from the operation + + parent_id: ID of the parent span if this is a child span in a trace + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/spans", + body=maybe_transform( + { + "name": name, + "start_time": start_time, + "trace_id": trace_id, + "id": id, + "data": data, + "end_time": end_time, + "input": input, + "output": output, + "parent_id": parent_id, + }, + span_create_params.SpanCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + def retrieve( + self, + span_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Get a span by ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not span_id: + raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") + return self._get( + f"/spans/{span_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + def update( + self, + span_id: str, + *, + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + end_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + parent_id: Optional[str] | NotGiven = NOT_GIVEN, + start_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + trace_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Update a span with the provided output data and mark it as complete + + Args: + data: Any additional metadata or context for the span + + end_time: The time the span ended + + input: Input parameters or data for the operation + + name: Name that describes what operation this span represents + + output: Output data resulting from the operation + + parent_id: ID of the parent span if this is a child span in a trace + + start_time: The time the span started + + trace_id: Unique identifier for the trace this span belongs to + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not span_id: + raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") + return self._patch( + f"/spans/{span_id}", + body=maybe_transform( + { + "data": data, + "end_time": end_time, + "input": input, + "name": name, + "output": output, + "parent_id": parent_id, + "start_time": start_time, + "trace_id": trace_id, + }, + span_update_params.SpanUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + def list( + self, + *, + trace_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SpanListResponse: + """ + List all spans for a given trace ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/spans", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform({"trace_id": trace_id}, span_list_params.SpanListParams), + ), + cast_to=SpanListResponse, + ) + + +class AsyncSpansResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncSpansResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncSpansResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncSpansResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncSpansResourceWithStreamingResponse(self) + + async def create( + self, + *, + name: str, + start_time: Union[str, datetime], + trace_id: str, + id: Optional[str] | NotGiven = NOT_GIVEN, + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + end_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + parent_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Create a new span with the provided parameters + + Args: + name: Name that describes what operation this span represents + + start_time: The time the span started + + trace_id: Unique identifier for the trace this span belongs to + + id: Unique identifier for the span. If not provided, an ID will be generated. + + data: Any additional metadata or context for the span + + end_time: The time the span ended + + input: Input parameters or data for the operation + + output: Output data resulting from the operation + + parent_id: ID of the parent span if this is a child span in a trace + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/spans", + body=await async_maybe_transform( + { + "name": name, + "start_time": start_time, + "trace_id": trace_id, + "id": id, + "data": data, + "end_time": end_time, + "input": input, + "output": output, + "parent_id": parent_id, + }, + span_create_params.SpanCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + async def retrieve( + self, + span_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Get a span by ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not span_id: + raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") + return await self._get( + f"/spans/{span_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + async def update( + self, + span_id: str, + *, + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + end_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + name: Optional[str] | NotGiven = NOT_GIVEN, + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] | NotGiven = NOT_GIVEN, + parent_id: Optional[str] | NotGiven = NOT_GIVEN, + start_time: Union[str, datetime, None] | NotGiven = NOT_GIVEN, + trace_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Span: + """ + Update a span with the provided output data and mark it as complete + + Args: + data: Any additional metadata or context for the span + + end_time: The time the span ended + + input: Input parameters or data for the operation + + name: Name that describes what operation this span represents + + output: Output data resulting from the operation + + parent_id: ID of the parent span if this is a child span in a trace + + start_time: The time the span started + + trace_id: Unique identifier for the trace this span belongs to + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not span_id: + raise ValueError(f"Expected a non-empty value for `span_id` but received {span_id!r}") + return await self._patch( + f"/spans/{span_id}", + body=await async_maybe_transform( + { + "data": data, + "end_time": end_time, + "input": input, + "name": name, + "output": output, + "parent_id": parent_id, + "start_time": start_time, + "trace_id": trace_id, + }, + span_update_params.SpanUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Span, + ) + + async def list( + self, + *, + trace_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> SpanListResponse: + """ + List all spans for a given trace ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/spans", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform({"trace_id": trace_id}, span_list_params.SpanListParams), + ), + cast_to=SpanListResponse, + ) + + +class SpansResourceWithRawResponse: + def __init__(self, spans: SpansResource) -> None: + self._spans = spans + + self.create = to_raw_response_wrapper( + spans.create, + ) + self.retrieve = to_raw_response_wrapper( + spans.retrieve, + ) + self.update = to_raw_response_wrapper( + spans.update, + ) + self.list = to_raw_response_wrapper( + spans.list, + ) + + +class AsyncSpansResourceWithRawResponse: + def __init__(self, spans: AsyncSpansResource) -> None: + self._spans = spans + + self.create = async_to_raw_response_wrapper( + spans.create, + ) + self.retrieve = async_to_raw_response_wrapper( + spans.retrieve, + ) + self.update = async_to_raw_response_wrapper( + spans.update, + ) + self.list = async_to_raw_response_wrapper( + spans.list, + ) + + +class SpansResourceWithStreamingResponse: + def __init__(self, spans: SpansResource) -> None: + self._spans = spans + + self.create = to_streamed_response_wrapper( + spans.create, + ) + self.retrieve = to_streamed_response_wrapper( + spans.retrieve, + ) + self.update = to_streamed_response_wrapper( + spans.update, + ) + self.list = to_streamed_response_wrapper( + spans.list, + ) + + +class AsyncSpansResourceWithStreamingResponse: + def __init__(self, spans: AsyncSpansResource) -> None: + self._spans = spans + + self.create = async_to_streamed_response_wrapper( + spans.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + spans.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + spans.update, + ) + self.list = async_to_streamed_response_wrapper( + spans.list, + ) diff --git a/src/agentex/resources/states.py b/src/agentex/resources/states.py new file mode 100644 index 00000000..6147890a --- /dev/null +++ b/src/agentex/resources/states.py @@ -0,0 +1,544 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Optional + +import httpx + +from ..types import state_list_params, state_create_params, state_update_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..types.state import State +from .._base_client import make_request_options +from ..types.state_list_response import StateListResponse + +__all__ = ["StatesResource", "AsyncStatesResource"] + + +class StatesResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> StatesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return StatesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> StatesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return StatesResourceWithStreamingResponse(self) + + def create( + self, + *, + agent_id: str, + state: Dict[str, object], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Create Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._post( + "/states", + body=maybe_transform( + { + "agent_id": agent_id, + "state": state, + "task_id": task_id, + }, + state_create_params.StateCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + def retrieve( + self, + state_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Get a state by its unique state ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return self._get( + f"/states/{state_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + def update( + self, + state_id: str, + *, + agent_id: str, + state: Dict[str, object], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Update Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return self._put( + f"/states/{state_id}", + body=maybe_transform( + { + "agent_id": agent_id, + "state": state, + "task_id": task_id, + }, + state_update_params.StateUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + def list( + self, + *, + agent_id: Optional[str] | NotGiven = NOT_GIVEN, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> StateListResponse: + """ + List all states, optionally filtered by query parameters. + + Args: + agent_id: Agent ID + + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/states", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + }, + state_list_params.StateListParams, + ), + ), + cast_to=StateListResponse, + ) + + def delete( + self, + state_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Delete Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return self._delete( + f"/states/{state_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + +class AsyncStatesResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncStatesResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncStatesResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncStatesResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncStatesResourceWithStreamingResponse(self) + + async def create( + self, + *, + agent_id: str, + state: Dict[str, object], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Create Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._post( + "/states", + body=await async_maybe_transform( + { + "agent_id": agent_id, + "state": state, + "task_id": task_id, + }, + state_create_params.StateCreateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + async def retrieve( + self, + state_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Get a state by its unique state ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return await self._get( + f"/states/{state_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + async def update( + self, + state_id: str, + *, + agent_id: str, + state: Dict[str, object], + task_id: str, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Update Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return await self._put( + f"/states/{state_id}", + body=await async_maybe_transform( + { + "agent_id": agent_id, + "state": state, + "task_id": task_id, + }, + state_update_params.StateUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + async def list( + self, + *, + agent_id: Optional[str] | NotGiven = NOT_GIVEN, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> StateListResponse: + """ + List all states, optionally filtered by query parameters. + + Args: + agent_id: Agent ID + + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/states", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + }, + state_list_params.StateListParams, + ), + ), + cast_to=StateListResponse, + ) + + async def delete( + self, + state_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> State: + """ + Delete Task State + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not state_id: + raise ValueError(f"Expected a non-empty value for `state_id` but received {state_id!r}") + return await self._delete( + f"/states/{state_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=State, + ) + + +class StatesResourceWithRawResponse: + def __init__(self, states: StatesResource) -> None: + self._states = states + + self.create = to_raw_response_wrapper( + states.create, + ) + self.retrieve = to_raw_response_wrapper( + states.retrieve, + ) + self.update = to_raw_response_wrapper( + states.update, + ) + self.list = to_raw_response_wrapper( + states.list, + ) + self.delete = to_raw_response_wrapper( + states.delete, + ) + + +class AsyncStatesResourceWithRawResponse: + def __init__(self, states: AsyncStatesResource) -> None: + self._states = states + + self.create = async_to_raw_response_wrapper( + states.create, + ) + self.retrieve = async_to_raw_response_wrapper( + states.retrieve, + ) + self.update = async_to_raw_response_wrapper( + states.update, + ) + self.list = async_to_raw_response_wrapper( + states.list, + ) + self.delete = async_to_raw_response_wrapper( + states.delete, + ) + + +class StatesResourceWithStreamingResponse: + def __init__(self, states: StatesResource) -> None: + self._states = states + + self.create = to_streamed_response_wrapper( + states.create, + ) + self.retrieve = to_streamed_response_wrapper( + states.retrieve, + ) + self.update = to_streamed_response_wrapper( + states.update, + ) + self.list = to_streamed_response_wrapper( + states.list, + ) + self.delete = to_streamed_response_wrapper( + states.delete, + ) + + +class AsyncStatesResourceWithStreamingResponse: + def __init__(self, states: AsyncStatesResource) -> None: + self._states = states + + self.create = async_to_streamed_response_wrapper( + states.create, + ) + self.retrieve = async_to_streamed_response_wrapper( + states.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + states.update, + ) + self.list = async_to_streamed_response_wrapper( + states.list, + ) + self.delete = async_to_streamed_response_wrapper( + states.delete, + ) diff --git a/src/agentex/resources/tasks/__init__.py b/src/agentex/resources/tasks/__init__.py new file mode 100644 index 00000000..5a9c81b0 --- /dev/null +++ b/src/agentex/resources/tasks/__init__.py @@ -0,0 +1,33 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from .name import ( + NameResource, + AsyncNameResource, + NameResourceWithRawResponse, + AsyncNameResourceWithRawResponse, + NameResourceWithStreamingResponse, + AsyncNameResourceWithStreamingResponse, +) +from .tasks import ( + TasksResource, + AsyncTasksResource, + TasksResourceWithRawResponse, + AsyncTasksResourceWithRawResponse, + TasksResourceWithStreamingResponse, + AsyncTasksResourceWithStreamingResponse, +) + +__all__ = [ + "NameResource", + "AsyncNameResource", + "NameResourceWithRawResponse", + "AsyncNameResourceWithRawResponse", + "NameResourceWithStreamingResponse", + "AsyncNameResourceWithStreamingResponse", + "TasksResource", + "AsyncTasksResource", + "TasksResourceWithRawResponse", + "AsyncTasksResourceWithRawResponse", + "TasksResourceWithStreamingResponse", + "AsyncTasksResourceWithStreamingResponse", +] diff --git a/src/agentex/resources/tasks/name.py b/src/agentex/resources/tasks/name.py new file mode 100644 index 00000000..7cef42eb --- /dev/null +++ b/src/agentex/resources/tasks/name.py @@ -0,0 +1,324 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.task import Task +from ..._base_client import make_request_options + +__all__ = ["NameResource", "AsyncNameResource"] + + +class NameResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> NameResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return NameResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> NameResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return NameResourceWithStreamingResponse(self) + + def retrieve( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Get a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return self._get( + f"/tasks/name/{task_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + def delete( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Delete a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return self._delete( + f"/tasks/name/{task_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + def stream_events( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[object]: + """ + Stream events for a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return self._get( + f"/tasks/name/{task_name}/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + stream=True, + stream_cls=Stream[object], + ) + + +class AsyncNameResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncNameResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncNameResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncNameResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncNameResourceWithStreamingResponse(self) + + async def retrieve( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Get a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return await self._get( + f"/tasks/name/{task_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + async def delete( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Delete a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return await self._delete( + f"/tasks/name/{task_name}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + async def stream_events( + self, + task_name: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[object]: + """ + Stream events for a task by its unique name. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_name: + raise ValueError(f"Expected a non-empty value for `task_name` but received {task_name!r}") + return await self._get( + f"/tasks/name/{task_name}/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + stream=True, + stream_cls=AsyncStream[object], + ) + + +class NameResourceWithRawResponse: + def __init__(self, name: NameResource) -> None: + self._name = name + + self.retrieve = to_raw_response_wrapper( + name.retrieve, + ) + self.delete = to_raw_response_wrapper( + name.delete, + ) + self.stream_events = to_raw_response_wrapper( + name.stream_events, + ) + + +class AsyncNameResourceWithRawResponse: + def __init__(self, name: AsyncNameResource) -> None: + self._name = name + + self.retrieve = async_to_raw_response_wrapper( + name.retrieve, + ) + self.delete = async_to_raw_response_wrapper( + name.delete, + ) + self.stream_events = async_to_raw_response_wrapper( + name.stream_events, + ) + + +class NameResourceWithStreamingResponse: + def __init__(self, name: NameResource) -> None: + self._name = name + + self.retrieve = to_streamed_response_wrapper( + name.retrieve, + ) + self.delete = to_streamed_response_wrapper( + name.delete, + ) + self.stream_events = to_streamed_response_wrapper( + name.stream_events, + ) + + +class AsyncNameResourceWithStreamingResponse: + def __init__(self, name: AsyncNameResource) -> None: + self._name = name + + self.retrieve = async_to_streamed_response_wrapper( + name.retrieve, + ) + self.delete = async_to_streamed_response_wrapper( + name.delete, + ) + self.stream_events = async_to_streamed_response_wrapper( + name.stream_events, + ) diff --git a/src/agentex/resources/tasks/tasks.py b/src/agentex/resources/tasks/tasks.py new file mode 100644 index 00000000..ba8161ed --- /dev/null +++ b/src/agentex/resources/tasks/tasks.py @@ -0,0 +1,407 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import httpx + +from .name import ( + NameResource, + AsyncNameResource, + NameResourceWithRawResponse, + AsyncNameResourceWithRawResponse, + NameResourceWithStreamingResponse, + AsyncNameResourceWithStreamingResponse, +) +from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from ..._compat import cached_property +from ..._resource import SyncAPIResource, AsyncAPIResource +from ..._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from ..._streaming import Stream, AsyncStream +from ...types.task import Task +from ..._base_client import make_request_options +from ...types.task_list_response import TaskListResponse + +__all__ = ["TasksResource", "AsyncTasksResource"] + + +class TasksResource(SyncAPIResource): + @cached_property + def name(self) -> NameResource: + return NameResource(self._client) + + @cached_property + def with_raw_response(self) -> TasksResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return TasksResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TasksResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return TasksResourceWithStreamingResponse(self) + + def retrieve( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Get a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return self._get( + f"/tasks/{task_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskListResponse: + """List all tasks.""" + return self._get( + "/tasks", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskListResponse, + ) + + def delete( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Delete a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return self._delete( + f"/tasks/{task_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + def stream_events( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Stream[object]: + """ + Stream events for a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return self._get( + f"/tasks/{task_id}/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + stream=True, + stream_cls=Stream[object], + ) + + +class AsyncTasksResource(AsyncAPIResource): + @cached_property + def name(self) -> AsyncNameResource: + return AsyncNameResource(self._client) + + @cached_property + def with_raw_response(self) -> AsyncTasksResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncTasksResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTasksResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncTasksResourceWithStreamingResponse(self) + + async def retrieve( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Get a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return await self._get( + f"/tasks/{task_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + async def list( + self, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TaskListResponse: + """List all tasks.""" + return await self._get( + "/tasks", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=TaskListResponse, + ) + + async def delete( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> Task: + """ + Delete a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return await self._delete( + f"/tasks/{task_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=Task, + ) + + async def stream_events( + self, + task_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AsyncStream[object]: + """ + Stream events for a task by its unique ID. + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not task_id: + raise ValueError(f"Expected a non-empty value for `task_id` but received {task_id!r}") + return await self._get( + f"/tasks/{task_id}/stream", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=object, + stream=True, + stream_cls=AsyncStream[object], + ) + + +class TasksResourceWithRawResponse: + def __init__(self, tasks: TasksResource) -> None: + self._tasks = tasks + + self.retrieve = to_raw_response_wrapper( + tasks.retrieve, + ) + self.list = to_raw_response_wrapper( + tasks.list, + ) + self.delete = to_raw_response_wrapper( + tasks.delete, + ) + self.stream_events = to_raw_response_wrapper( + tasks.stream_events, + ) + + @cached_property + def name(self) -> NameResourceWithRawResponse: + return NameResourceWithRawResponse(self._tasks.name) + + +class AsyncTasksResourceWithRawResponse: + def __init__(self, tasks: AsyncTasksResource) -> None: + self._tasks = tasks + + self.retrieve = async_to_raw_response_wrapper( + tasks.retrieve, + ) + self.list = async_to_raw_response_wrapper( + tasks.list, + ) + self.delete = async_to_raw_response_wrapper( + tasks.delete, + ) + self.stream_events = async_to_raw_response_wrapper( + tasks.stream_events, + ) + + @cached_property + def name(self) -> AsyncNameResourceWithRawResponse: + return AsyncNameResourceWithRawResponse(self._tasks.name) + + +class TasksResourceWithStreamingResponse: + def __init__(self, tasks: TasksResource) -> None: + self._tasks = tasks + + self.retrieve = to_streamed_response_wrapper( + tasks.retrieve, + ) + self.list = to_streamed_response_wrapper( + tasks.list, + ) + self.delete = to_streamed_response_wrapper( + tasks.delete, + ) + self.stream_events = to_streamed_response_wrapper( + tasks.stream_events, + ) + + @cached_property + def name(self) -> NameResourceWithStreamingResponse: + return NameResourceWithStreamingResponse(self._tasks.name) + + +class AsyncTasksResourceWithStreamingResponse: + def __init__(self, tasks: AsyncTasksResource) -> None: + self._tasks = tasks + + self.retrieve = async_to_streamed_response_wrapper( + tasks.retrieve, + ) + self.list = async_to_streamed_response_wrapper( + tasks.list, + ) + self.delete = async_to_streamed_response_wrapper( + tasks.delete, + ) + self.stream_events = async_to_streamed_response_wrapper( + tasks.stream_events, + ) + + @cached_property + def name(self) -> AsyncNameResourceWithStreamingResponse: + return AsyncNameResourceWithStreamingResponse(self._tasks.name) diff --git a/src/agentex/resources/tracker.py b/src/agentex/resources/tracker.py new file mode 100644 index 00000000..1a74df21 --- /dev/null +++ b/src/agentex/resources/tracker.py @@ -0,0 +1,384 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional + +import httpx + +from ..types import tracker_list_params, tracker_update_params +from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven +from .._utils import maybe_transform, async_maybe_transform +from .._compat import cached_property +from .._resource import SyncAPIResource, AsyncAPIResource +from .._response import ( + to_raw_response_wrapper, + to_streamed_response_wrapper, + async_to_raw_response_wrapper, + async_to_streamed_response_wrapper, +) +from .._base_client import make_request_options +from ..types.agent_task_tracker import AgentTaskTracker +from ..types.tracker_list_response import TrackerListResponse + +__all__ = ["TrackerResource", "AsyncTrackerResource"] + + +class TrackerResource(SyncAPIResource): + @cached_property + def with_raw_response(self) -> TrackerResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return TrackerResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> TrackerResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return TrackerResourceWithStreamingResponse(self) + + def retrieve( + self, + tracker_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentTaskTracker: + """ + Get agent task tracker by tracker ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tracker_id: + raise ValueError(f"Expected a non-empty value for `tracker_id` but received {tracker_id!r}") + return self._get( + f"/tracker/{tracker_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentTaskTracker, + ) + + def update( + self, + tracker_id: str, + *, + last_processed_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[str] | NotGiven = NOT_GIVEN, + status_reason: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentTaskTracker: + """ + Update agent task tracker by tracker ID + + Args: + last_processed_event_id: The most recent processed event ID (omit to leave unchanged) + + status: Processing status + + status_reason: Optional status reason + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tracker_id: + raise ValueError(f"Expected a non-empty value for `tracker_id` but received {tracker_id!r}") + return self._put( + f"/tracker/{tracker_id}", + body=maybe_transform( + { + "last_processed_event_id": last_processed_event_id, + "status": status, + "status_reason": status_reason, + }, + tracker_update_params.TrackerUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentTaskTracker, + ) + + def list( + self, + *, + agent_id: Optional[str] | NotGiven = NOT_GIVEN, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TrackerListResponse: + """ + List all agent task trackers, optionally filtered by query parameters. + + Args: + agent_id: Agent ID + + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return self._get( + "/tracker", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + }, + tracker_list_params.TrackerListParams, + ), + ), + cast_to=TrackerListResponse, + ) + + +class AsyncTrackerResource(AsyncAPIResource): + @cached_property + def with_raw_response(self) -> AsyncTrackerResourceWithRawResponse: + """ + This property can be used as a prefix for any HTTP method call to return + the raw response object instead of the parsed content. + + For more information, see https://www.github.com/scaleapi/agentex-python#accessing-raw-response-data-eg-headers + """ + return AsyncTrackerResourceWithRawResponse(self) + + @cached_property + def with_streaming_response(self) -> AsyncTrackerResourceWithStreamingResponse: + """ + An alternative to `.with_raw_response` that doesn't eagerly read the response body. + + For more information, see https://www.github.com/scaleapi/agentex-python#with_streaming_response + """ + return AsyncTrackerResourceWithStreamingResponse(self) + + async def retrieve( + self, + tracker_id: str, + *, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentTaskTracker: + """ + Get agent task tracker by tracker ID + + Args: + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tracker_id: + raise ValueError(f"Expected a non-empty value for `tracker_id` but received {tracker_id!r}") + return await self._get( + f"/tracker/{tracker_id}", + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentTaskTracker, + ) + + async def update( + self, + tracker_id: str, + *, + last_processed_event_id: Optional[str] | NotGiven = NOT_GIVEN, + status: Optional[str] | NotGiven = NOT_GIVEN, + status_reason: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> AgentTaskTracker: + """ + Update agent task tracker by tracker ID + + Args: + last_processed_event_id: The most recent processed event ID (omit to leave unchanged) + + status: Processing status + + status_reason: Optional status reason + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + if not tracker_id: + raise ValueError(f"Expected a non-empty value for `tracker_id` but received {tracker_id!r}") + return await self._put( + f"/tracker/{tracker_id}", + body=await async_maybe_transform( + { + "last_processed_event_id": last_processed_event_id, + "status": status, + "status_reason": status_reason, + }, + tracker_update_params.TrackerUpdateParams, + ), + options=make_request_options( + extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout + ), + cast_to=AgentTaskTracker, + ) + + async def list( + self, + *, + agent_id: Optional[str] | NotGiven = NOT_GIVEN, + task_id: Optional[str] | NotGiven = NOT_GIVEN, + # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. + # The extra values given here take precedence over values defined on the client or passed to this method. + extra_headers: Headers | None = None, + extra_query: Query | None = None, + extra_body: Body | None = None, + timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, + ) -> TrackerListResponse: + """ + List all agent task trackers, optionally filtered by query parameters. + + Args: + agent_id: Agent ID + + task_id: Task ID + + extra_headers: Send extra headers + + extra_query: Add additional query parameters to the request + + extra_body: Add additional JSON properties to the request + + timeout: Override the client-level default timeout for this request, in seconds + """ + return await self._get( + "/tracker", + options=make_request_options( + extra_headers=extra_headers, + extra_query=extra_query, + extra_body=extra_body, + timeout=timeout, + query=await async_maybe_transform( + { + "agent_id": agent_id, + "task_id": task_id, + }, + tracker_list_params.TrackerListParams, + ), + ), + cast_to=TrackerListResponse, + ) + + +class TrackerResourceWithRawResponse: + def __init__(self, tracker: TrackerResource) -> None: + self._tracker = tracker + + self.retrieve = to_raw_response_wrapper( + tracker.retrieve, + ) + self.update = to_raw_response_wrapper( + tracker.update, + ) + self.list = to_raw_response_wrapper( + tracker.list, + ) + + +class AsyncTrackerResourceWithRawResponse: + def __init__(self, tracker: AsyncTrackerResource) -> None: + self._tracker = tracker + + self.retrieve = async_to_raw_response_wrapper( + tracker.retrieve, + ) + self.update = async_to_raw_response_wrapper( + tracker.update, + ) + self.list = async_to_raw_response_wrapper( + tracker.list, + ) + + +class TrackerResourceWithStreamingResponse: + def __init__(self, tracker: TrackerResource) -> None: + self._tracker = tracker + + self.retrieve = to_streamed_response_wrapper( + tracker.retrieve, + ) + self.update = to_streamed_response_wrapper( + tracker.update, + ) + self.list = to_streamed_response_wrapper( + tracker.list, + ) + + +class AsyncTrackerResourceWithStreamingResponse: + def __init__(self, tracker: AsyncTrackerResource) -> None: + self._tracker = tracker + + self.retrieve = async_to_streamed_response_wrapper( + tracker.retrieve, + ) + self.update = async_to_streamed_response_wrapper( + tracker.update, + ) + self.list = async_to_streamed_response_wrapper( + tracker.list, + ) diff --git a/src/agentex/types/__init__.py b/src/agentex/types/__init__.py new file mode 100644 index 00000000..17dc3aa2 --- /dev/null +++ b/src/agentex/types/__init__.py @@ -0,0 +1,45 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .span import Span as Span +from .task import Task as Task +from .agent import Agent as Agent +from .event import Event as Event +from .state import State as State +from .acp_type import AcpType as AcpType +from .data_content import DataContent as DataContent +from .task_message import TaskMessage as TaskMessage +from .text_content import TextContent as TextContent +from .message_style import MessageStyle as MessageStyle +from .message_author import MessageAuthor as MessageAuthor +from .agent_rpc_params import AgentRpcParams as AgentRpcParams +from .echo_send_params import EchoSendParams as EchoSendParams +from .span_list_params import SpanListParams as SpanListParams +from .streaming_status import StreamingStatus as StreamingStatus +from .agent_list_params import AgentListParams as AgentListParams +from .event_list_params import EventListParams as EventListParams +from .state_list_params import StateListParams as StateListParams +from .agent_task_tracker import AgentTaskTracker as AgentTaskTracker +from .data_content_param import DataContentParam as DataContentParam +from .span_create_params import SpanCreateParams as SpanCreateParams +from .span_list_response import SpanListResponse as SpanListResponse +from .span_update_params import SpanUpdateParams as SpanUpdateParams +from .task_list_response import TaskListResponse as TaskListResponse +from .text_content_param import TextContentParam as TextContentParam +from .agent_list_response import AgentListResponse as AgentListResponse +from .event_list_response import EventListResponse as EventListResponse +from .message_list_params import MessageListParams as MessageListParams +from .state_create_params import StateCreateParams as StateCreateParams +from .state_list_response import StateListResponse as StateListResponse +from .state_update_params import StateUpdateParams as StateUpdateParams +from .tracker_list_params import TrackerListParams as TrackerListParams +from .tool_request_content import ToolRequestContent as ToolRequestContent +from .message_create_params import MessageCreateParams as MessageCreateParams +from .message_list_response import MessageListResponse as MessageListResponse +from .message_update_params import MessageUpdateParams as MessageUpdateParams +from .tool_response_content import ToolResponseContent as ToolResponseContent +from .tracker_list_response import TrackerListResponse as TrackerListResponse +from .tracker_update_params import TrackerUpdateParams as TrackerUpdateParams +from .tool_request_content_param import ToolRequestContentParam as ToolRequestContentParam +from .tool_response_content_param import ToolResponseContentParam as ToolResponseContentParam diff --git a/src/agentex/types/acp_type.py b/src/agentex/types/acp_type.py new file mode 100644 index 00000000..ec86ddf5 --- /dev/null +++ b/src/agentex/types/acp_type.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal, TypeAlias + +__all__ = ["AcpType"] + +AcpType: TypeAlias = Literal["sync", "agentic"] diff --git a/src/agentex/types/agent.py b/src/agentex/types/agent.py new file mode 100644 index 00000000..d339ece1 --- /dev/null +++ b/src/agentex/types/agent.py @@ -0,0 +1,29 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .acp_type import AcpType + +__all__ = ["Agent"] + + +class Agent(BaseModel): + id: str + """The unique identifier of the agent.""" + + acp_type: AcpType + """The type of the ACP Server (Either sync or agentic)""" + + description: str + """The description of the action.""" + + name: str + """The unique name of the agent.""" + + status: Optional[Literal["Pending", "Building", "Ready", "Failed", "Unknown"]] = None + """The status of the action, indicating if it's building, ready, failed, etc.""" + + status_reason: Optional[str] = None + """The reason for the status of the action.""" diff --git a/src/agentex/types/agent_list_params.py b/src/agentex/types/agent_list_params.py new file mode 100644 index 00000000..b432174a --- /dev/null +++ b/src/agentex/types/agent_list_params.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["AgentListParams"] + + +class AgentListParams(TypedDict, total=False): + task_id: Optional[str] + """Task ID""" diff --git a/src/agentex/types/agent_list_response.py b/src/agentex/types/agent_list_response.py new file mode 100644 index 00000000..f33d2016 --- /dev/null +++ b/src/agentex/types/agent_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .agent import Agent + +__all__ = ["AgentListResponse"] + +AgentListResponse: TypeAlias = List[Agent] diff --git a/src/agentex/types/agent_rpc_params.py b/src/agentex/types/agent_rpc_params.py new file mode 100644 index 00000000..2c79a7a9 --- /dev/null +++ b/src/agentex/types/agent_rpc_params.py @@ -0,0 +1,85 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Optional +from typing_extensions import Literal, Required, TypeAlias, TypedDict + +from .data_content_param import DataContentParam +from .text_content_param import TextContentParam +from .tool_request_content_param import ToolRequestContentParam +from .tool_response_content_param import ToolResponseContentParam + +__all__ = [ + "AgentRpcParams", + "Params", + "ParamsCreateTaskRequest", + "ParamsCancelTaskRequest", + "ParamsSendMessageRequest", + "ParamsSendMessageRequestContent", + "ParamsSendEventRequest", + "ParamsSendEventRequestContent", +] + + +class AgentRpcParams(TypedDict, total=False): + method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]] + + params: Required[Params] + + id: Union[int, str, None] + + jsonrpc: Literal["2.0"] + + +class ParamsCreateTaskRequest(TypedDict, total=False): + name: Optional[str] + """The name of the task to create""" + + params: Optional[Dict[str, object]] + """The parameters for the task""" + + +class ParamsCancelTaskRequest(TypedDict, total=False): + task_id: Optional[str] + """The ID of the task to cancel. Either this or task_name must be provided.""" + + task_name: Optional[str] + """The name of the task to cancel. Either this or task_id must be provided.""" + + +ParamsSendMessageRequestContent: TypeAlias = Union[ + TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam +] + + +class ParamsSendMessageRequest(TypedDict, total=False): + content: Required[ParamsSendMessageRequestContent] + """The message that was sent to the agent""" + + stream: bool + """Whether to stream the response message back to the client""" + + task_id: Optional[str] + """The ID of the task that the message was sent to""" + + +ParamsSendEventRequestContent: TypeAlias = Union[ + TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam +] + + +class ParamsSendEventRequest(TypedDict, total=False): + content: Optional[ParamsSendEventRequestContent] + """The content to send to the event""" + + task_id: Optional[str] + """The ID of the task that the event was sent to""" + + task_name: Optional[str] + """The name of the task that the event was sent to""" + + +Params: TypeAlias = Union[ + ParamsCreateTaskRequest, ParamsCancelTaskRequest, ParamsSendMessageRequest, ParamsSendEventRequest +] diff --git a/src/agentex/types/agent_task_tracker.py b/src/agentex/types/agent_task_tracker.py new file mode 100644 index 00000000..4f6ebaa7 --- /dev/null +++ b/src/agentex/types/agent_task_tracker.py @@ -0,0 +1,34 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["AgentTaskTracker"] + + +class AgentTaskTracker(BaseModel): + id: str + """The UUID of the agent task tracker""" + + agent_id: str + """The UUID of the agent""" + + created_at: datetime + """When the agent task tracker was created""" + + task_id: str + """The UUID of the task""" + + last_processed_event_id: Optional[str] = None + """The last processed event ID""" + + status: Optional[str] = None + """Processing status""" + + status_reason: Optional[str] = None + """Optional status reason""" + + updated_at: Optional[datetime] = None + """When the agent task tracker was last updated""" diff --git a/src/agentex/types/agents/__init__.py b/src/agentex/types/agents/__init__.py new file mode 100644 index 00000000..6b2986a2 --- /dev/null +++ b/src/agentex/types/agents/__init__.py @@ -0,0 +1,5 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .name_rpc_params import NameRpcParams as NameRpcParams diff --git a/src/agentex/types/agents/name_rpc_params.py b/src/agentex/types/agents/name_rpc_params.py new file mode 100644 index 00000000..6a68b0ba --- /dev/null +++ b/src/agentex/types/agents/name_rpc_params.py @@ -0,0 +1,85 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Optional +from typing_extensions import Literal, Required, TypeAlias, TypedDict + +from ..data_content_param import DataContentParam +from ..text_content_param import TextContentParam +from ..tool_request_content_param import ToolRequestContentParam +from ..tool_response_content_param import ToolResponseContentParam + +__all__ = [ + "NameRpcParams", + "Params", + "ParamsCreateTaskRequest", + "ParamsCancelTaskRequest", + "ParamsSendMessageRequest", + "ParamsSendMessageRequestContent", + "ParamsSendEventRequest", + "ParamsSendEventRequestContent", +] + + +class NameRpcParams(TypedDict, total=False): + method: Required[Literal["event/send", "task/create", "message/send", "task/cancel"]] + + params: Required[Params] + + id: Union[int, str, None] + + jsonrpc: Literal["2.0"] + + +class ParamsCreateTaskRequest(TypedDict, total=False): + name: Optional[str] + """The name of the task to create""" + + params: Optional[Dict[str, object]] + """The parameters for the task""" + + +class ParamsCancelTaskRequest(TypedDict, total=False): + task_id: Optional[str] + """The ID of the task to cancel. Either this or task_name must be provided.""" + + task_name: Optional[str] + """The name of the task to cancel. Either this or task_id must be provided.""" + + +ParamsSendMessageRequestContent: TypeAlias = Union[ + TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam +] + + +class ParamsSendMessageRequest(TypedDict, total=False): + content: Required[ParamsSendMessageRequestContent] + """The message that was sent to the agent""" + + stream: bool + """Whether to stream the response message back to the client""" + + task_id: Optional[str] + """The ID of the task that the message was sent to""" + + +ParamsSendEventRequestContent: TypeAlias = Union[ + TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam +] + + +class ParamsSendEventRequest(TypedDict, total=False): + content: Optional[ParamsSendEventRequestContent] + """The content to send to the event""" + + task_id: Optional[str] + """The ID of the task that the event was sent to""" + + task_name: Optional[str] + """The name of the task that the event was sent to""" + + +Params: TypeAlias = Union[ + ParamsCreateTaskRequest, ParamsCancelTaskRequest, ParamsSendMessageRequest, ParamsSendEventRequest +] diff --git a/src/agentex/types/data_content.py b/src/agentex/types/data_content.py new file mode 100644 index 00000000..2ed34045 --- /dev/null +++ b/src/agentex/types/data_content.py @@ -0,0 +1,30 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["DataContent"] + + +class DataContent(BaseModel): + author: MessageAuthor + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + data: Dict[str, object] + """The contents of the data message.""" + + style: Optional[MessageStyle] = None + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Optional[Literal["data"]] = None + """The type of the message, in this case `data`.""" diff --git a/src/agentex/types/data_content_param.py b/src/agentex/types/data_content_param.py new file mode 100644 index 00000000..2232e417 --- /dev/null +++ b/src/agentex/types/data_content_param.py @@ -0,0 +1,31 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Literal, Required, TypedDict + +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["DataContentParam"] + + +class DataContentParam(TypedDict, total=False): + author: Required[MessageAuthor] + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + data: Required[Dict[str, object]] + """The contents of the data message.""" + + style: MessageStyle + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Literal["data"] + """The type of the message, in this case `data`.""" diff --git a/src/agentex/types/echo_send_params.py b/src/agentex/types/echo_send_params.py new file mode 100644 index 00000000..3e237817 --- /dev/null +++ b/src/agentex/types/echo_send_params.py @@ -0,0 +1,11 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Required, TypedDict + +__all__ = ["EchoSendParams"] + + +class EchoSendParams(TypedDict, total=False): + message: Required[str] diff --git a/src/agentex/types/event.py b/src/agentex/types/event.py new file mode 100644 index 00000000..3f1e5997 --- /dev/null +++ b/src/agentex/types/event.py @@ -0,0 +1,38 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from datetime import datetime +from typing_extensions import Annotated, TypeAlias + +from .._utils import PropertyInfo +from .._models import BaseModel +from .data_content import DataContent +from .text_content import TextContent +from .tool_request_content import ToolRequestContent +from .tool_response_content import ToolResponseContent + +__all__ = ["Event", "Content"] + +Content: TypeAlias = Annotated[ + Union[TextContent, DataContent, ToolRequestContent, ToolResponseContent, None], PropertyInfo(discriminator="type") +] + + +class Event(BaseModel): + id: str + """The UUID of the event""" + + agent_id: str + """The UUID of the agent that the event belongs to""" + + sequence_id: int + """The sequence ID of the event""" + + task_id: str + """The UUID of the task that the event belongs to""" + + content: Optional[Content] = None + """The content of the event""" + + created_at: Optional[datetime] = None + """The timestamp of the event""" diff --git a/src/agentex/types/event_list_params.py b/src/agentex/types/event_list_params.py new file mode 100644 index 00000000..2d628dd6 --- /dev/null +++ b/src/agentex/types/event_list_params.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Required, TypedDict + +__all__ = ["EventListParams"] + + +class EventListParams(TypedDict, total=False): + agent_id: Required[str] + """The agent ID to filter events by""" + + task_id: Required[str] + """The task ID to filter events by""" + + last_processed_event_id: Optional[str] + """Optional event ID to get events after this ID""" + + limit: Optional[int] + """Optional limit on number of results""" diff --git a/src/agentex/types/event_list_response.py b/src/agentex/types/event_list_response.py new file mode 100644 index 00000000..05057180 --- /dev/null +++ b/src/agentex/types/event_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .event import Event + +__all__ = ["EventListResponse"] + +EventListResponse: TypeAlias = List[Event] diff --git a/src/agentex/types/message_author.py b/src/agentex/types/message_author.py new file mode 100644 index 00000000..c902ac5e --- /dev/null +++ b/src/agentex/types/message_author.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal, TypeAlias + +__all__ = ["MessageAuthor"] + +MessageAuthor: TypeAlias = Literal["user", "agent"] diff --git a/src/agentex/types/message_create_params.py b/src/agentex/types/message_create_params.py new file mode 100644 index 00000000..06ec3727 --- /dev/null +++ b/src/agentex/types/message_create_params.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Required, TypeAlias, TypedDict + +from .streaming_status import StreamingStatus +from .data_content_param import DataContentParam +from .text_content_param import TextContentParam +from .tool_request_content_param import ToolRequestContentParam +from .tool_response_content_param import ToolResponseContentParam + +__all__ = ["MessageCreateParams", "Content"] + + +class MessageCreateParams(TypedDict, total=False): + content: Required[Content] + + task_id: Required[str] + + streaming_status: Optional[StreamingStatus] + + +Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam] diff --git a/src/agentex/types/message_list_params.py b/src/agentex/types/message_list_params.py new file mode 100644 index 00000000..1bdcedb5 --- /dev/null +++ b/src/agentex/types/message_list_params.py @@ -0,0 +1,14 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Required, TypedDict + +__all__ = ["MessageListParams"] + + +class MessageListParams(TypedDict, total=False): + task_id: Required[str] + + limit: Optional[int] diff --git a/src/agentex/types/message_list_response.py b/src/agentex/types/message_list_response.py new file mode 100644 index 00000000..37123d90 --- /dev/null +++ b/src/agentex/types/message_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .task_message import TaskMessage + +__all__ = ["MessageListResponse"] + +MessageListResponse: TypeAlias = List[TaskMessage] diff --git a/src/agentex/types/message_style.py b/src/agentex/types/message_style.py new file mode 100644 index 00000000..81520ffb --- /dev/null +++ b/src/agentex/types/message_style.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal, TypeAlias + +__all__ = ["MessageStyle"] + +MessageStyle: TypeAlias = Literal["static", "active"] diff --git a/src/agentex/types/message_update_params.py b/src/agentex/types/message_update_params.py new file mode 100644 index 00000000..bc5305b8 --- /dev/null +++ b/src/agentex/types/message_update_params.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Required, TypeAlias, TypedDict + +from .streaming_status import StreamingStatus +from .data_content_param import DataContentParam +from .text_content_param import TextContentParam +from .tool_request_content_param import ToolRequestContentParam +from .tool_response_content_param import ToolResponseContentParam + +__all__ = ["MessageUpdateParams", "Content"] + + +class MessageUpdateParams(TypedDict, total=False): + content: Required[Content] + + task_id: Required[str] + + streaming_status: Optional[StreamingStatus] + + +Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam] diff --git a/src/agentex/types/messages/__init__.py b/src/agentex/types/messages/__init__.py new file mode 100644 index 00000000..00a2dde5 --- /dev/null +++ b/src/agentex/types/messages/__init__.py @@ -0,0 +1,8 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from .batch_create_params import BatchCreateParams as BatchCreateParams +from .batch_update_params import BatchUpdateParams as BatchUpdateParams +from .batch_create_response import BatchCreateResponse as BatchCreateResponse +from .batch_update_response import BatchUpdateResponse as BatchUpdateResponse diff --git a/src/agentex/types/messages/batch_create_params.py b/src/agentex/types/messages/batch_create_params.py new file mode 100644 index 00000000..c09c69a9 --- /dev/null +++ b/src/agentex/types/messages/batch_create_params.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Iterable +from typing_extensions import Required, TypeAlias, TypedDict + +from ..data_content_param import DataContentParam +from ..text_content_param import TextContentParam +from ..tool_request_content_param import ToolRequestContentParam +from ..tool_response_content_param import ToolResponseContentParam + +__all__ = ["BatchCreateParams", "Content"] + + +class BatchCreateParams(TypedDict, total=False): + contents: Required[Iterable[Content]] + + task_id: Required[str] + + +Content: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam] diff --git a/src/agentex/types/messages/batch_create_response.py b/src/agentex/types/messages/batch_create_response.py new file mode 100644 index 00000000..d110e3e3 --- /dev/null +++ b/src/agentex/types/messages/batch_create_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from ..task_message import TaskMessage + +__all__ = ["BatchCreateResponse"] + +BatchCreateResponse: TypeAlias = List[TaskMessage] diff --git a/src/agentex/types/messages/batch_update_params.py b/src/agentex/types/messages/batch_update_params.py new file mode 100644 index 00000000..68f24f8d --- /dev/null +++ b/src/agentex/types/messages/batch_update_params.py @@ -0,0 +1,22 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union +from typing_extensions import Required, TypeAlias, TypedDict + +from ..data_content_param import DataContentParam +from ..text_content_param import TextContentParam +from ..tool_request_content_param import ToolRequestContentParam +from ..tool_response_content_param import ToolResponseContentParam + +__all__ = ["BatchUpdateParams", "Updates"] + + +class BatchUpdateParams(TypedDict, total=False): + task_id: Required[str] + + updates: Required[Dict[str, Updates]] + + +Updates: TypeAlias = Union[TextContentParam, DataContentParam, ToolRequestContentParam, ToolResponseContentParam] diff --git a/src/agentex/types/messages/batch_update_response.py b/src/agentex/types/messages/batch_update_response.py new file mode 100644 index 00000000..12de8603 --- /dev/null +++ b/src/agentex/types/messages/batch_update_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from ..task_message import TaskMessage + +__all__ = ["BatchUpdateResponse"] + +BatchUpdateResponse: TypeAlias = List[TaskMessage] diff --git a/src/agentex/types/span.py b/src/agentex/types/span.py new file mode 100644 index 00000000..e5719bbe --- /dev/null +++ b/src/agentex/types/span.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, List, Union, Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["Span"] + + +class Span(BaseModel): + id: str + + name: str + """Name that describes what operation this span represents""" + + start_time: datetime + """The time the span started""" + + trace_id: str + """Unique identifier for the trace this span belongs to""" + + data: Union[Dict[str, object], List[Dict[str, object]], None] = None + """Any additional metadata or context for the span""" + + end_time: Optional[datetime] = None + """The time the span ended""" + + input: Union[Dict[str, object], List[Dict[str, object]], None] = None + """Input parameters or data for the operation""" + + output: Union[Dict[str, object], List[Dict[str, object]], None] = None + """Output data resulting from the operation""" + + parent_id: Optional[str] = None + """ID of the parent span if this is a child span in a trace""" diff --git a/src/agentex/types/span_create_params.py b/src/agentex/types/span_create_params.py new file mode 100644 index 00000000..c7111d6f --- /dev/null +++ b/src/agentex/types/span_create_params.py @@ -0,0 +1,40 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Iterable, Optional +from datetime import datetime +from typing_extensions import Required, Annotated, TypedDict + +from .._utils import PropertyInfo + +__all__ = ["SpanCreateParams"] + + +class SpanCreateParams(TypedDict, total=False): + name: Required[str] + """Name that describes what operation this span represents""" + + start_time: Required[Annotated[Union[str, datetime], PropertyInfo(format="iso8601")]] + """The time the span started""" + + trace_id: Required[str] + """Unique identifier for the trace this span belongs to""" + + id: Optional[str] + """Unique identifier for the span. If not provided, an ID will be generated.""" + + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Any additional metadata or context for the span""" + + end_time: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] + """The time the span ended""" + + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Input parameters or data for the operation""" + + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Output data resulting from the operation""" + + parent_id: Optional[str] + """ID of the parent span if this is a child span in a trace""" diff --git a/src/agentex/types/span_list_params.py b/src/agentex/types/span_list_params.py new file mode 100644 index 00000000..35218965 --- /dev/null +++ b/src/agentex/types/span_list_params.py @@ -0,0 +1,12 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["SpanListParams"] + + +class SpanListParams(TypedDict, total=False): + trace_id: Optional[str] diff --git a/src/agentex/types/span_list_response.py b/src/agentex/types/span_list_response.py new file mode 100644 index 00000000..12348665 --- /dev/null +++ b/src/agentex/types/span_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .span import Span + +__all__ = ["SpanListResponse"] + +SpanListResponse: TypeAlias = List[Span] diff --git a/src/agentex/types/span_update_params.py b/src/agentex/types/span_update_params.py new file mode 100644 index 00000000..3ebb5465 --- /dev/null +++ b/src/agentex/types/span_update_params.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict, Union, Iterable, Optional +from datetime import datetime +from typing_extensions import Annotated, TypedDict + +from .._utils import PropertyInfo + +__all__ = ["SpanUpdateParams"] + + +class SpanUpdateParams(TypedDict, total=False): + data: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Any additional metadata or context for the span""" + + end_time: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] + """The time the span ended""" + + input: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Input parameters or data for the operation""" + + name: Optional[str] + """Name that describes what operation this span represents""" + + output: Union[Dict[str, object], Iterable[Dict[str, object]], None] + """Output data resulting from the operation""" + + parent_id: Optional[str] + """ID of the parent span if this is a child span in a trace""" + + start_time: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] + """The time the span started""" + + trace_id: Optional[str] + """Unique identifier for the trace this span belongs to""" diff --git a/src/agentex/types/state.py b/src/agentex/types/state.py new file mode 100644 index 00000000..2789c7c3 --- /dev/null +++ b/src/agentex/types/state.py @@ -0,0 +1,25 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from datetime import datetime + +from .._models import BaseModel + +__all__ = ["State"] + + +class State(BaseModel): + id: str + """The task state's unique id""" + + agent_id: str + + created_at: datetime + """The timestamp when the state was created""" + + state: Dict[str, object] + + task_id: str + + updated_at: Optional[datetime] = None + """The timestamp when the state was last updated""" diff --git a/src/agentex/types/state_create_params.py b/src/agentex/types/state_create_params.py new file mode 100644 index 00000000..b90c8fbb --- /dev/null +++ b/src/agentex/types/state_create_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Required, TypedDict + +__all__ = ["StateCreateParams"] + + +class StateCreateParams(TypedDict, total=False): + agent_id: Required[str] + + state: Required[Dict[str, object]] + + task_id: Required[str] diff --git a/src/agentex/types/state_list_params.py b/src/agentex/types/state_list_params.py new file mode 100644 index 00000000..6034d345 --- /dev/null +++ b/src/agentex/types/state_list_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["StateListParams"] + + +class StateListParams(TypedDict, total=False): + agent_id: Optional[str] + """Agent ID""" + + task_id: Optional[str] + """Task ID""" diff --git a/src/agentex/types/state_list_response.py b/src/agentex/types/state_list_response.py new file mode 100644 index 00000000..2feefedd --- /dev/null +++ b/src/agentex/types/state_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .state import State + +__all__ = ["StateListResponse"] + +StateListResponse: TypeAlias = List[State] diff --git a/src/agentex/types/state_update_params.py b/src/agentex/types/state_update_params.py new file mode 100644 index 00000000..28691462 --- /dev/null +++ b/src/agentex/types/state_update_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Required, TypedDict + +__all__ = ["StateUpdateParams"] + + +class StateUpdateParams(TypedDict, total=False): + agent_id: Required[str] + + state: Required[Dict[str, object]] + + task_id: Required[str] diff --git a/src/agentex/types/streaming_status.py b/src/agentex/types/streaming_status.py new file mode 100644 index 00000000..196d7881 --- /dev/null +++ b/src/agentex/types/streaming_status.py @@ -0,0 +1,7 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal, TypeAlias + +__all__ = ["StreamingStatus"] + +StreamingStatus: TypeAlias = Literal["IN_PROGRESS", "DONE"] diff --git a/src/agentex/types/task.py b/src/agentex/types/task.py new file mode 100644 index 00000000..c3afef3d --- /dev/null +++ b/src/agentex/types/task.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from datetime import datetime +from typing_extensions import Literal + +from .._models import BaseModel + +__all__ = ["Task"] + + +class Task(BaseModel): + id: str + + created_at: Optional[datetime] = None + + name: Optional[str] = None + + status: Optional[Literal["CANCELED", "COMPLETED", "FAILED", "RUNNING", "TERMINATED", "TIMED_OUT"]] = None + + status_reason: Optional[str] = None + + updated_at: Optional[datetime] = None diff --git a/src/agentex/types/task_list_response.py b/src/agentex/types/task_list_response.py new file mode 100644 index 00000000..c9607e60 --- /dev/null +++ b/src/agentex/types/task_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .task import Task + +__all__ = ["TaskListResponse"] + +TaskListResponse: TypeAlias = List[Task] diff --git a/src/agentex/types/task_message.py b/src/agentex/types/task_message.py new file mode 100644 index 00000000..686ade7d --- /dev/null +++ b/src/agentex/types/task_message.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union, Optional +from datetime import datetime +from typing_extensions import Annotated, TypeAlias + +from .._utils import PropertyInfo +from .._models import BaseModel +from .data_content import DataContent +from .text_content import TextContent +from .streaming_status import StreamingStatus +from .tool_request_content import ToolRequestContent +from .tool_response_content import ToolResponseContent + +__all__ = ["TaskMessage", "Content"] + +Content: TypeAlias = Annotated[ + Union[TextContent, DataContent, ToolRequestContent, ToolResponseContent], PropertyInfo(discriminator="type") +] + + +class TaskMessage(BaseModel): + id: str + """The task message's unique id""" + + content: Content + + created_at: datetime + """The timestamp when the message was created""" + + task_id: str + + streaming_status: Optional[StreamingStatus] = None + + updated_at: Optional[datetime] = None + """The timestamp when the message was last updated""" diff --git a/src/agentex/types/tasks/__init__.py b/src/agentex/types/tasks/__init__.py new file mode 100644 index 00000000..f8ee8b14 --- /dev/null +++ b/src/agentex/types/tasks/__init__.py @@ -0,0 +1,3 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations diff --git a/src/agentex/types/text_content.py b/src/agentex/types/text_content.py new file mode 100644 index 00000000..c96c797f --- /dev/null +++ b/src/agentex/types/text_content.py @@ -0,0 +1,53 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["TextContent", "Attachment"] + + +class Attachment(BaseModel): + file_id: str + """The unique ID of the attached file""" + + name: str + """The name of the file""" + + size: int + """The size of the file in bytes""" + + type: str + """The MIME type or content type of the file""" + + +class TextContent(BaseModel): + author: MessageAuthor + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + content: str + """The contents of the text message.""" + + attachments: Optional[List[Attachment]] = None + """Optional list of file attachments with structured metadata.""" + + format: Optional[Literal["markdown", "plain", "code"]] = None + """The format of the message. + + This is used by the client to determine how to display the message. + """ + + style: Optional[MessageStyle] = None + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Optional[Literal["text"]] = None + """The type of the message, in this case `text`.""" diff --git a/src/agentex/types/text_content_param.py b/src/agentex/types/text_content_param.py new file mode 100644 index 00000000..87f357e0 --- /dev/null +++ b/src/agentex/types/text_content_param.py @@ -0,0 +1,54 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable, Optional +from typing_extensions import Literal, Required, TypedDict + +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["TextContentParam", "Attachment"] + + +class Attachment(TypedDict, total=False): + file_id: Required[str] + """The unique ID of the attached file""" + + name: Required[str] + """The name of the file""" + + size: Required[int] + """The size of the file in bytes""" + + type: Required[str] + """The MIME type or content type of the file""" + + +class TextContentParam(TypedDict, total=False): + author: Required[MessageAuthor] + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + content: Required[str] + """The contents of the text message.""" + + attachments: Optional[Iterable[Attachment]] + """Optional list of file attachments with structured metadata.""" + + format: Literal["markdown", "plain", "code"] + """The format of the message. + + This is used by the client to determine how to display the message. + """ + + style: MessageStyle + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Literal["text"] + """The type of the message, in this case `text`.""" diff --git a/src/agentex/types/tool_request_content.py b/src/agentex/types/tool_request_content.py new file mode 100644 index 00000000..66128630 --- /dev/null +++ b/src/agentex/types/tool_request_content.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Dict, Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["ToolRequestContent"] + + +class ToolRequestContent(BaseModel): + arguments: Dict[str, object] + """The arguments to the tool.""" + + author: MessageAuthor + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + name: str + """The name of the tool that is being requested.""" + + tool_call_id: str + """The ID of the tool call that is being requested.""" + + style: Optional[MessageStyle] = None + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Optional[Literal["tool_request"]] = None + """The type of the message, in this case `tool_request`.""" diff --git a/src/agentex/types/tool_request_content_param.py b/src/agentex/types/tool_request_content_param.py new file mode 100644 index 00000000..dc2fcb48 --- /dev/null +++ b/src/agentex/types/tool_request_content_param.py @@ -0,0 +1,37 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Dict +from typing_extensions import Literal, Required, TypedDict + +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["ToolRequestContentParam"] + + +class ToolRequestContentParam(TypedDict, total=False): + arguments: Required[Dict[str, object]] + """The arguments to the tool.""" + + author: Required[MessageAuthor] + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + name: Required[str] + """The name of the tool that is being requested.""" + + tool_call_id: Required[str] + """The ID of the tool call that is being requested.""" + + style: MessageStyle + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Literal["tool_request"] + """The type of the message, in this case `tool_request`.""" diff --git a/src/agentex/types/tool_response_content.py b/src/agentex/types/tool_response_content.py new file mode 100644 index 00000000..f6ba15b7 --- /dev/null +++ b/src/agentex/types/tool_response_content.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Optional +from typing_extensions import Literal + +from .._models import BaseModel +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["ToolResponseContent"] + + +class ToolResponseContent(BaseModel): + author: MessageAuthor + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + content: object + """The result of the tool.""" + + name: str + """The name of the tool that is being responded to.""" + + tool_call_id: str + """The ID of the tool call that is being responded to.""" + + style: Optional[MessageStyle] = None + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Optional[Literal["tool_response"]] = None + """The type of the message, in this case `tool_response`.""" diff --git a/src/agentex/types/tool_response_content_param.py b/src/agentex/types/tool_response_content_param.py new file mode 100644 index 00000000..e0ea7962 --- /dev/null +++ b/src/agentex/types/tool_response_content_param.py @@ -0,0 +1,36 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +from .message_style import MessageStyle +from .message_author import MessageAuthor + +__all__ = ["ToolResponseContentParam"] + + +class ToolResponseContentParam(TypedDict, total=False): + author: Required[MessageAuthor] + """ + The role of the messages author, in this case `system`, `user`, `assistant`, or + `tool`. + """ + + content: Required[object] + """The result of the tool.""" + + name: Required[str] + """The name of the tool that is being responded to.""" + + tool_call_id: Required[str] + """The ID of the tool call that is being responded to.""" + + style: MessageStyle + """The style of the message. + + This is used by the client to determine how to display the message. + """ + + type: Literal["tool_response"] + """The type of the message, in this case `tool_response`.""" diff --git a/src/agentex/types/tracker_list_params.py b/src/agentex/types/tracker_list_params.py new file mode 100644 index 00000000..47b4da7b --- /dev/null +++ b/src/agentex/types/tracker_list_params.py @@ -0,0 +1,16 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["TrackerListParams"] + + +class TrackerListParams(TypedDict, total=False): + agent_id: Optional[str] + """Agent ID""" + + task_id: Optional[str] + """Task ID""" diff --git a/src/agentex/types/tracker_list_response.py b/src/agentex/types/tracker_list_response.py new file mode 100644 index 00000000..c5205138 --- /dev/null +++ b/src/agentex/types/tracker_list_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List +from typing_extensions import TypeAlias + +from .agent_task_tracker import AgentTaskTracker + +__all__ = ["TrackerListResponse"] + +TrackerListResponse: TypeAlias = List[AgentTaskTracker] diff --git a/src/agentex/types/tracker_update_params.py b/src/agentex/types/tracker_update_params.py new file mode 100644 index 00000000..127dff4d --- /dev/null +++ b/src/agentex/types/tracker_update_params.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import TypedDict + +__all__ = ["TrackerUpdateParams"] + + +class TrackerUpdateParams(TypedDict, total=False): + last_processed_event_id: Optional[str] + """The most recent processed event ID (omit to leave unchanged)""" + + status: Optional[str] + """Processing status""" + + status_reason: Optional[str] + """Optional status reason""" diff --git a/src/agentex_sdk/lib/.keep b/src/agentex_sdk/lib/.keep new file mode 100644 index 00000000..5e2c99fd --- /dev/null +++ b/src/agentex_sdk/lib/.keep @@ -0,0 +1,4 @@ +File generated from our OpenAPI spec by Stainless. + +This directory can be used to store custom files to expand the SDK. +It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/__init__.py b/tests/api_resources/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/__init__.py b/tests/api_resources/agents/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/agents/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/agents/test_name.py b/tests/api_resources/agents/test_name.py new file mode 100644 index 00000000..a16fe447 --- /dev/null +++ b/tests/api_resources/agents/test_name.py @@ -0,0 +1,322 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Agent + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestName: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + name = client.agents.name.retrieve( + "agent_name", + ) + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.agents.name.with_raw_response.retrieve( + "agent_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = response.parse() + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.agents.name.with_streaming_response.retrieve( + "agent_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = response.parse() + assert_matches_type(Agent, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + client.agents.name.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_delete(self, client: Agentex) -> None: + name = client.agents.name.delete( + "agent_name", + ) + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_delete(self, client: Agentex) -> None: + response = client.agents.name.with_raw_response.delete( + "agent_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = response.parse() + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_delete(self, client: Agentex) -> None: + with client.agents.name.with_streaming_response.delete( + "agent_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = response.parse() + assert_matches_type(Agent, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_delete(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + client.agents.name.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_rpc(self, client: Agentex) -> None: + name = client.agents.name.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_rpc_with_all_params(self, client: Agentex) -> None: + name = client.agents.name.rpc( + agent_name="agent_name", + method="event/send", + params={ + "name": "name", + "params": {"foo": "bar"}, + }, + id=0, + jsonrpc="2.0", + ) + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_rpc(self, client: Agentex) -> None: + response = client.agents.name.with_raw_response.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = response.parse() + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_rpc(self, client: Agentex) -> None: + with client.agents.name.with_streaming_response.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = response.parse() + assert_matches_type(object, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_rpc(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + client.agents.name.with_raw_response.rpc( + agent_name="", + method="event/send", + params={}, + ) + + +class TestAsyncName: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + name = await async_client.agents.name.retrieve( + "agent_name", + ) + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.name.with_raw_response.retrieve( + "agent_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = await response.parse() + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.name.with_streaming_response.retrieve( + "agent_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = await response.parse() + assert_matches_type(Agent, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + await async_client.agents.name.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_delete(self, async_client: AsyncAgentex) -> None: + name = await async_client.agents.name.delete( + "agent_name", + ) + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.name.with_raw_response.delete( + "agent_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = await response.parse() + assert_matches_type(Agent, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.name.with_streaming_response.delete( + "agent_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = await response.parse() + assert_matches_type(Agent, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_delete(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + await async_client.agents.name.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_rpc(self, async_client: AsyncAgentex) -> None: + name = await async_client.agents.name.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_rpc_with_all_params(self, async_client: AsyncAgentex) -> None: + name = await async_client.agents.name.rpc( + agent_name="agent_name", + method="event/send", + params={ + "name": "name", + "params": {"foo": "bar"}, + }, + id=0, + jsonrpc="2.0", + ) + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_rpc(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.name.with_raw_response.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = await response.parse() + assert_matches_type(object, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_rpc(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.name.with_streaming_response.rpc( + agent_name="agent_name", + method="event/send", + params={}, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = await response.parse() + assert_matches_type(object, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_rpc(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_name` but received ''"): + await async_client.agents.name.with_raw_response.rpc( + agent_name="", + method="event/send", + params={}, + ) diff --git a/tests/api_resources/messages/__init__.py b/tests/api_resources/messages/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/messages/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/messages/test_batch.py b/tests/api_resources/messages/test_batch.py new file mode 100644 index 00000000..9ad5e59c --- /dev/null +++ b/tests/api_resources/messages/test_batch.py @@ -0,0 +1,232 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types.messages import BatchCreateResponse, BatchUpdateResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestBatch: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: Agentex) -> None: + batch = client.messages.batch.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: Agentex) -> None: + response = client.messages.batch.with_raw_response.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + batch = response.parse() + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: Agentex) -> None: + with client.messages.batch.with_streaming_response.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + batch = response.parse() + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: Agentex) -> None: + batch = client.messages.batch.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: Agentex) -> None: + response = client.messages.batch.with_raw_response.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + batch = response.parse() + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: Agentex) -> None: + with client.messages.batch.with_streaming_response.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + batch = response.parse() + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncBatch: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncAgentex) -> None: + batch = await async_client.messages.batch.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.batch.with_raw_response.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + batch = await response.parse() + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.batch.with_streaming_response.create( + contents=[ + { + "author": "user", + "content": "content", + } + ], + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + batch = await response.parse() + assert_matches_type(BatchCreateResponse, batch, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncAgentex) -> None: + batch = await async_client.messages.batch.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.batch.with_raw_response.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + batch = await response.parse() + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.batch.with_streaming_response.update( + task_id="task_id", + updates={ + "foo": { + "author": "user", + "content": "content", + } + }, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + batch = await response.parse() + assert_matches_type(BatchUpdateResponse, batch, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/tasks/__init__.py b/tests/api_resources/tasks/__init__.py new file mode 100644 index 00000000..fd8019a9 --- /dev/null +++ b/tests/api_resources/tasks/__init__.py @@ -0,0 +1 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. diff --git a/tests/api_resources/tasks/test_name.py b/tests/api_resources/tasks/test_name.py new file mode 100644 index 00000000..cb000659 --- /dev/null +++ b/tests/api_resources/tasks/test_name.py @@ -0,0 +1,274 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Task + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestName: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + name = client.tasks.name.retrieve( + "task_name", + ) + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.tasks.name.with_raw_response.retrieve( + "task_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = response.parse() + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.tasks.name.with_streaming_response.retrieve( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = response.parse() + assert_matches_type(Task, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + client.tasks.name.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_delete(self, client: Agentex) -> None: + name = client.tasks.name.delete( + "task_name", + ) + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_delete(self, client: Agentex) -> None: + response = client.tasks.name.with_raw_response.delete( + "task_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = response.parse() + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_delete(self, client: Agentex) -> None: + with client.tasks.name.with_streaming_response.delete( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = response.parse() + assert_matches_type(Task, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_delete(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + client.tasks.name.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_stream_events(self, client: Agentex) -> None: + name_stream = client.tasks.name.stream_events( + "task_name", + ) + name_stream.response.close() + + @pytest.mark.skip() + @parametrize + def test_raw_response_stream_events(self, client: Agentex) -> None: + response = client.tasks.name.with_raw_response.stream_events( + "task_name", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip() + @parametrize + def test_streaming_response_stream_events(self, client: Agentex) -> None: + with client.tasks.name.with_streaming_response.stream_events( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_stream_events(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + client.tasks.name.with_raw_response.stream_events( + "", + ) + + +class TestAsyncName: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + name = await async_client.tasks.name.retrieve( + "task_name", + ) + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.name.with_raw_response.retrieve( + "task_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = await response.parse() + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.name.with_streaming_response.retrieve( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = await response.parse() + assert_matches_type(Task, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + await async_client.tasks.name.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_delete(self, async_client: AsyncAgentex) -> None: + name = await async_client.tasks.name.delete( + "task_name", + ) + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.name.with_raw_response.delete( + "task_name", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + name = await response.parse() + assert_matches_type(Task, name, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.name.with_streaming_response.delete( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + name = await response.parse() + assert_matches_type(Task, name, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_delete(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + await async_client.tasks.name.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_stream_events(self, async_client: AsyncAgentex) -> None: + name_stream = await async_client.tasks.name.stream_events( + "task_name", + ) + await name_stream.response.aclose() + + @pytest.mark.skip() + @parametrize + async def test_raw_response_stream_events(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.name.with_raw_response.stream_events( + "task_name", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_stream_events(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.name.with_streaming_response.stream_events( + "task_name", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_stream_events(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_name` but received ''"): + await async_client.tasks.name.with_raw_response.stream_events( + "", + ) diff --git a/tests/api_resources/test_agents.py b/tests/api_resources/test_agents.py new file mode 100644 index 00000000..de1055f7 --- /dev/null +++ b/tests/api_resources/test_agents.py @@ -0,0 +1,394 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Agent, AgentListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestAgents: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + agent = client.agents.retrieve( + "agent_id", + ) + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.agents.with_raw_response.retrieve( + "agent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.agents.with_streaming_response.retrieve( + "agent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + client.agents.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + agent = client.agents.list() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + agent = client.agents.list( + task_id="task_id", + ) + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.agents.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = response.parse() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.agents.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = response.parse() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_delete(self, client: Agentex) -> None: + agent = client.agents.delete( + "agent_id", + ) + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_delete(self, client: Agentex) -> None: + response = client.agents.with_raw_response.delete( + "agent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_delete(self, client: Agentex) -> None: + with client.agents.with_streaming_response.delete( + "agent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_delete(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + client.agents.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_rpc(self, client: Agentex) -> None: + agent = client.agents.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_rpc_with_all_params(self, client: Agentex) -> None: + agent = client.agents.rpc( + agent_id="agent_id", + method="event/send", + params={ + "name": "name", + "params": {"foo": "bar"}, + }, + id=0, + jsonrpc="2.0", + ) + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_rpc(self, client: Agentex) -> None: + response = client.agents.with_raw_response.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = response.parse() + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_rpc(self, client: Agentex) -> None: + with client.agents.with_streaming_response.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = response.parse() + assert_matches_type(object, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_rpc(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + client.agents.with_raw_response.rpc( + agent_id="", + method="event/send", + params={}, + ) + + +class TestAsyncAgents: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.retrieve( + "agent_id", + ) + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.with_raw_response.retrieve( + "agent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = await response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.with_streaming_response.retrieve( + "agent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = await response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + await async_client.agents.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.list() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.list( + task_id="task_id", + ) + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = await response.parse() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = await response.parse() + assert_matches_type(AgentListResponse, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_delete(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.delete( + "agent_id", + ) + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.with_raw_response.delete( + "agent_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = await response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.with_streaming_response.delete( + "agent_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = await response.parse() + assert_matches_type(Agent, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_delete(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + await async_client.agents.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_rpc(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_rpc_with_all_params(self, async_client: AsyncAgentex) -> None: + agent = await async_client.agents.rpc( + agent_id="agent_id", + method="event/send", + params={ + "name": "name", + "params": {"foo": "bar"}, + }, + id=0, + jsonrpc="2.0", + ) + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_rpc(self, async_client: AsyncAgentex) -> None: + response = await async_client.agents.with_raw_response.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + agent = await response.parse() + assert_matches_type(object, agent, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_rpc(self, async_client: AsyncAgentex) -> None: + async with async_client.agents.with_streaming_response.rpc( + agent_id="agent_id", + method="event/send", + params={}, + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + agent = await response.parse() + assert_matches_type(object, agent, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_rpc(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `agent_id` but received ''"): + await async_client.agents.with_raw_response.rpc( + agent_id="", + method="event/send", + params={}, + ) diff --git a/tests/api_resources/test_client.py b/tests/api_resources/test_client.py new file mode 100644 index 00000000..82802d71 --- /dev/null +++ b/tests/api_resources/test_client.py @@ -0,0 +1,79 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestClient: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_get_root(self, client: Agentex) -> None: + client_ = client.get_root() + assert_matches_type(object, client_, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_get_root(self, client: Agentex) -> None: + response = client.with_raw_response.get_root() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + client_ = response.parse() + assert_matches_type(object, client_, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_get_root(self, client: Agentex) -> None: + with client.with_streaming_response.get_root() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + client_ = response.parse() + assert_matches_type(object, client_, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncClient: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_get_root(self, async_client: AsyncAgentex) -> None: + client = await async_client.get_root() + assert_matches_type(object, client, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_get_root(self, async_client: AsyncAgentex) -> None: + response = await async_client.with_raw_response.get_root() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + client = await response.parse() + assert_matches_type(object, client, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_get_root(self, async_client: AsyncAgentex) -> None: + async with async_client.with_streaming_response.get_root() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + client = await response.parse() + assert_matches_type(object, client, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_echo.py b/tests/api_resources/test_echo.py new file mode 100644 index 00000000..1a40d73f --- /dev/null +++ b/tests/api_resources/test_echo.py @@ -0,0 +1,91 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEcho: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_send(self, client: Agentex) -> None: + echo = client.echo.send( + message="message", + ) + assert_matches_type(object, echo, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_send(self, client: Agentex) -> None: + response = client.echo.with_raw_response.send( + message="message", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + echo = response.parse() + assert_matches_type(object, echo, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_send(self, client: Agentex) -> None: + with client.echo.with_streaming_response.send( + message="message", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + echo = response.parse() + assert_matches_type(object, echo, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncEcho: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_send(self, async_client: AsyncAgentex) -> None: + echo = await async_client.echo.send( + message="message", + ) + assert_matches_type(object, echo, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_send(self, async_client: AsyncAgentex) -> None: + response = await async_client.echo.with_raw_response.send( + message="message", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + echo = await response.parse() + assert_matches_type(object, echo, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_send(self, async_client: AsyncAgentex) -> None: + async with async_client.echo.with_streaming_response.send( + message="message", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + echo = await response.parse() + assert_matches_type(object, echo, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_events.py b/tests/api_resources/test_events.py new file mode 100644 index 00000000..c11c6c94 --- /dev/null +++ b/tests/api_resources/test_events.py @@ -0,0 +1,204 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Event, EventListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestEvents: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + event = client.events.retrieve( + "event_id", + ) + assert_matches_type(Event, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.events.with_raw_response.retrieve( + "event_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + event = response.parse() + assert_matches_type(Event, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.events.with_streaming_response.retrieve( + "event_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + event = response.parse() + assert_matches_type(Event, event, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `event_id` but received ''"): + client.events.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + event = client.events.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + event = client.events.list( + agent_id="agent_id", + task_id="task_id", + last_processed_event_id="last_processed_event_id", + limit=1, + ) + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.events.with_raw_response.list( + agent_id="agent_id", + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + event = response.parse() + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.events.with_streaming_response.list( + agent_id="agent_id", + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + event = response.parse() + assert_matches_type(EventListResponse, event, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncEvents: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + event = await async_client.events.retrieve( + "event_id", + ) + assert_matches_type(Event, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.events.with_raw_response.retrieve( + "event_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + event = await response.parse() + assert_matches_type(Event, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.events.with_streaming_response.retrieve( + "event_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + event = await response.parse() + assert_matches_type(Event, event, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `event_id` but received ''"): + await async_client.events.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + event = await async_client.events.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + event = await async_client.events.list( + agent_id="agent_id", + task_id="task_id", + last_processed_event_id="last_processed_event_id", + limit=1, + ) + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.events.with_raw_response.list( + agent_id="agent_id", + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + event = await response.parse() + assert_matches_type(EventListResponse, event, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.events.with_streaming_response.list( + agent_id="agent_id", + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + event = await response.parse() + assert_matches_type(EventListResponse, event, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_messages.py b/tests/api_resources/test_messages.py new file mode 100644 index 00000000..eb0af8cd --- /dev/null +++ b/tests/api_resources/test_messages.py @@ -0,0 +1,525 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import ( + TaskMessage, + MessageListResponse, +) + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestMessages: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: Agentex) -> None: + message = client.messages.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_create_with_all_params(self, client: Agentex) -> None: + message = client.messages.create( + content={ + "author": "user", + "content": "content", + "attachments": [ + { + "file_id": "file_id", + "name": "name", + "size": 0, + "type": "type", + } + ], + "format": "markdown", + "style": "static", + "type": "text", + }, + task_id="task_id", + streaming_status="IN_PROGRESS", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: Agentex) -> None: + response = client.messages.with_raw_response.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: Agentex) -> None: + with client.messages.with_streaming_response.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + message = client.messages.retrieve( + "message_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.messages.with_raw_response.retrieve( + "message_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.messages.with_streaming_response.retrieve( + "message_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): + client.messages.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: Agentex) -> None: + message = client.messages.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_update_with_all_params(self, client: Agentex) -> None: + message = client.messages.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "attachments": [ + { + "file_id": "file_id", + "name": "name", + "size": 0, + "type": "type", + } + ], + "format": "markdown", + "style": "static", + "type": "text", + }, + task_id="task_id", + streaming_status="IN_PROGRESS", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: Agentex) -> None: + response = client.messages.with_raw_response.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: Agentex) -> None: + with client.messages.with_streaming_response.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_update(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): + client.messages.with_raw_response.update( + message_id="", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + message = client.messages.list( + task_id="task_id", + ) + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + message = client.messages.list( + task_id="task_id", + limit=0, + ) + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.messages.with_raw_response.list( + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = response.parse() + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.messages.with_streaming_response.list( + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = response.parse() + assert_matches_type(MessageListResponse, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncMessages: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.create( + content={ + "author": "user", + "content": "content", + "attachments": [ + { + "file_id": "file_id", + "name": "name", + "size": 0, + "type": "type", + } + ], + "format": "markdown", + "style": "static", + "type": "text", + }, + task_id="task_id", + streaming_status="IN_PROGRESS", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.with_raw_response.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.with_streaming_response.create( + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.retrieve( + "message_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.with_raw_response.retrieve( + "message_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.with_streaming_response.retrieve( + "message_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): + await async_client.messages.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "attachments": [ + { + "file_id": "file_id", + "name": "name", + "size": 0, + "type": "type", + } + ], + "format": "markdown", + "style": "static", + "type": "text", + }, + task_id="task_id", + streaming_status="IN_PROGRESS", + ) + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.with_raw_response.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.with_streaming_response.update( + message_id="message_id", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = await response.parse() + assert_matches_type(TaskMessage, message, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_update(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): + await async_client.messages.with_raw_response.update( + message_id="", + content={ + "author": "user", + "content": "content", + "type": "text", + }, + task_id="task_id", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.list( + task_id="task_id", + ) + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + message = await async_client.messages.list( + task_id="task_id", + limit=0, + ) + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.messages.with_raw_response.list( + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + message = await response.parse() + assert_matches_type(MessageListResponse, message, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.messages.with_streaming_response.list( + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + message = await response.parse() + assert_matches_type(MessageListResponse, message, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_spans.py b/tests/api_resources/test_spans.py new file mode 100644 index 00000000..344b7779 --- /dev/null +++ b/tests/api_resources/test_spans.py @@ -0,0 +1,409 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Span, SpanListResponse +from agentex._utils import parse_datetime + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestSpans: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: Agentex) -> None: + span = client.spans.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_create_with_all_params(self, client: Agentex) -> None: + span = client.spans.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + id="id", + data={"foo": "bar"}, + end_time=parse_datetime("2019-12-27T18:11:19.117Z"), + input={"foo": "bar"}, + output={"foo": "bar"}, + parent_id="parent_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: Agentex) -> None: + response = client.spans.with_raw_response.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: Agentex) -> None: + with client.spans.with_streaming_response.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + span = client.spans.retrieve( + "span_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.spans.with_raw_response.retrieve( + "span_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.spans.with_streaming_response.retrieve( + "span_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): + client.spans.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: Agentex) -> None: + span = client.spans.update( + span_id="span_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_update_with_all_params(self, client: Agentex) -> None: + span = client.spans.update( + span_id="span_id", + data={"foo": "bar"}, + end_time=parse_datetime("2019-12-27T18:11:19.117Z"), + input={"foo": "bar"}, + name="name", + output={"foo": "bar"}, + parent_id="parent_id", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: Agentex) -> None: + response = client.spans.with_raw_response.update( + span_id="span_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: Agentex) -> None: + with client.spans.with_streaming_response.update( + span_id="span_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_update(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): + client.spans.with_raw_response.update( + span_id="", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + span = client.spans.list() + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + span = client.spans.list( + trace_id="trace_id", + ) + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.spans.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = response.parse() + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.spans.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = response.parse() + assert_matches_type(SpanListResponse, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncSpans: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_create_with_all_params(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + id="id", + data={"foo": "bar"}, + end_time=parse_datetime("2019-12-27T18:11:19.117Z"), + input={"foo": "bar"}, + output={"foo": "bar"}, + parent_id="parent_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncAgentex) -> None: + response = await async_client.spans.with_raw_response.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncAgentex) -> None: + async with async_client.spans.with_streaming_response.create( + name="name", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.retrieve( + "span_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.spans.with_raw_response.retrieve( + "span_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.spans.with_streaming_response.retrieve( + "span_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): + await async_client.spans.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.update( + span_id="span_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.update( + span_id="span_id", + data={"foo": "bar"}, + end_time=parse_datetime("2019-12-27T18:11:19.117Z"), + input={"foo": "bar"}, + name="name", + output={"foo": "bar"}, + parent_id="parent_id", + start_time=parse_datetime("2019-12-27T18:11:19.117Z"), + trace_id="trace_id", + ) + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncAgentex) -> None: + response = await async_client.spans.with_raw_response.update( + span_id="span_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncAgentex) -> None: + async with async_client.spans.with_streaming_response.update( + span_id="span_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = await response.parse() + assert_matches_type(Span, span, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_update(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `span_id` but received ''"): + await async_client.spans.with_raw_response.update( + span_id="", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.list() + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + span = await async_client.spans.list( + trace_id="trace_id", + ) + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.spans.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + span = await response.parse() + assert_matches_type(SpanListResponse, span, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.spans.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + span = await response.parse() + assert_matches_type(SpanListResponse, span, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/api_resources/test_states.py b/tests/api_resources/test_states.py new file mode 100644 index 00000000..8e4fc62c --- /dev/null +++ b/tests/api_resources/test_states.py @@ -0,0 +1,454 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import State, StateListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestStates: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_create(self, client: Agentex) -> None: + state = client.states.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_create(self, client: Agentex) -> None: + response = client.states.with_raw_response.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_create(self, client: Agentex) -> None: + with client.states.with_streaming_response.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + state = client.states.retrieve( + "state_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.states.with_raw_response.retrieve( + "state_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.states.with_streaming_response.retrieve( + "state_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + client.states.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: Agentex) -> None: + state = client.states.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: Agentex) -> None: + response = client.states.with_raw_response.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: Agentex) -> None: + with client.states.with_streaming_response.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_update(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + client.states.with_raw_response.update( + state_id="", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + state = client.states.list() + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + state = client.states.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.states.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = response.parse() + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.states.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = response.parse() + assert_matches_type(StateListResponse, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_delete(self, client: Agentex) -> None: + state = client.states.delete( + "state_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_delete(self, client: Agentex) -> None: + response = client.states.with_raw_response.delete( + "state_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_delete(self, client: Agentex) -> None: + with client.states.with_streaming_response.delete( + "state_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_delete(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + client.states.with_raw_response.delete( + "", + ) + + +class TestAsyncStates: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_create(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_create(self, async_client: AsyncAgentex) -> None: + response = await async_client.states.with_raw_response.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_create(self, async_client: AsyncAgentex) -> None: + async with async_client.states.with_streaming_response.create( + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.retrieve( + "state_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.states.with_raw_response.retrieve( + "state_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.states.with_streaming_response.retrieve( + "state_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + await async_client.states.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncAgentex) -> None: + response = await async_client.states.with_raw_response.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncAgentex) -> None: + async with async_client.states.with_streaming_response.update( + state_id="state_id", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_update(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + await async_client.states.with_raw_response.update( + state_id="", + agent_id="agent_id", + state={"foo": "bar"}, + task_id="task_id", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.list() + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.states.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = await response.parse() + assert_matches_type(StateListResponse, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.states.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = await response.parse() + assert_matches_type(StateListResponse, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_delete(self, async_client: AsyncAgentex) -> None: + state = await async_client.states.delete( + "state_id", + ) + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None: + response = await async_client.states.with_raw_response.delete( + "state_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None: + async with async_client.states.with_streaming_response.delete( + "state_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + state = await response.parse() + assert_matches_type(State, state, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_delete(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `state_id` but received ''"): + await async_client.states.with_raw_response.delete( + "", + ) diff --git a/tests/api_resources/test_tasks.py b/tests/api_resources/test_tasks.py new file mode 100644 index 00000000..060f051c --- /dev/null +++ b/tests/api_resources/test_tasks.py @@ -0,0 +1,330 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import Task, TaskListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTasks: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + task = client.tasks.retrieve( + "task_id", + ) + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.tasks.with_raw_response.retrieve( + "task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = response.parse() + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.tasks.with_streaming_response.retrieve( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = response.parse() + assert_matches_type(Task, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + client.tasks.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + task = client.tasks.list() + assert_matches_type(TaskListResponse, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.tasks.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = response.parse() + assert_matches_type(TaskListResponse, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.tasks.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = response.parse() + assert_matches_type(TaskListResponse, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_method_delete(self, client: Agentex) -> None: + task = client.tasks.delete( + "task_id", + ) + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_delete(self, client: Agentex) -> None: + response = client.tasks.with_raw_response.delete( + "task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = response.parse() + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_delete(self, client: Agentex) -> None: + with client.tasks.with_streaming_response.delete( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = response.parse() + assert_matches_type(Task, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_delete(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + client.tasks.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_stream_events(self, client: Agentex) -> None: + task_stream = client.tasks.stream_events( + "task_id", + ) + task_stream.response.close() + + @pytest.mark.skip() + @parametrize + def test_raw_response_stream_events(self, client: Agentex) -> None: + response = client.tasks.with_raw_response.stream_events( + "task_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = response.parse() + stream.close() + + @pytest.mark.skip() + @parametrize + def test_streaming_response_stream_events(self, client: Agentex) -> None: + with client.tasks.with_streaming_response.stream_events( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = response.parse() + stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_stream_events(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + client.tasks.with_raw_response.stream_events( + "", + ) + + +class TestAsyncTasks: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + task = await async_client.tasks.retrieve( + "task_id", + ) + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.with_raw_response.retrieve( + "task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = await response.parse() + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.with_streaming_response.retrieve( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = await response.parse() + assert_matches_type(Task, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + await async_client.tasks.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + task = await async_client.tasks.list() + assert_matches_type(TaskListResponse, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = await response.parse() + assert_matches_type(TaskListResponse, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = await response.parse() + assert_matches_type(TaskListResponse, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_method_delete(self, async_client: AsyncAgentex) -> None: + task = await async_client.tasks.delete( + "task_id", + ) + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_delete(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.with_raw_response.delete( + "task_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + task = await response.parse() + assert_matches_type(Task, task, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_delete(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.with_streaming_response.delete( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + task = await response.parse() + assert_matches_type(Task, task, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_delete(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + await async_client.tasks.with_raw_response.delete( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_stream_events(self, async_client: AsyncAgentex) -> None: + task_stream = await async_client.tasks.stream_events( + "task_id", + ) + await task_stream.response.aclose() + + @pytest.mark.skip() + @parametrize + async def test_raw_response_stream_events(self, async_client: AsyncAgentex) -> None: + response = await async_client.tasks.with_raw_response.stream_events( + "task_id", + ) + + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + stream = await response.parse() + await stream.close() + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_stream_events(self, async_client: AsyncAgentex) -> None: + async with async_client.tasks.with_streaming_response.stream_events( + "task_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + stream = await response.parse() + await stream.close() + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_stream_events(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `task_id` but received ''"): + await async_client.tasks.with_raw_response.stream_events( + "", + ) diff --git a/tests/api_resources/test_tracker.py b/tests/api_resources/test_tracker.py new file mode 100644 index 00000000..11366810 --- /dev/null +++ b/tests/api_resources/test_tracker.py @@ -0,0 +1,288 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +from typing import Any, cast + +import pytest + +from agentex import Agentex, AsyncAgentex +from tests.utils import assert_matches_type +from agentex.types import AgentTaskTracker, TrackerListResponse + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + + +class TestTracker: + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) + + @pytest.mark.skip() + @parametrize + def test_method_retrieve(self, client: Agentex) -> None: + tracker = client.tracker.retrieve( + "tracker_id", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_retrieve(self, client: Agentex) -> None: + response = client.tracker.with_raw_response.retrieve( + "tracker_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_retrieve(self, client: Agentex) -> None: + with client.tracker.with_streaming_response.retrieve( + "tracker_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_retrieve(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tracker_id` but received ''"): + client.tracker.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + def test_method_update(self, client: Agentex) -> None: + tracker = client.tracker.update( + tracker_id="tracker_id", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_update_with_all_params(self, client: Agentex) -> None: + tracker = client.tracker.update( + tracker_id="tracker_id", + last_processed_event_id="last_processed_event_id", + status="status", + status_reason="status_reason", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_update(self, client: Agentex) -> None: + response = client.tracker.with_raw_response.update( + tracker_id="tracker_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_update(self, client: Agentex) -> None: + with client.tracker.with_streaming_response.update( + tracker_id="tracker_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + def test_path_params_update(self, client: Agentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tracker_id` but received ''"): + client.tracker.with_raw_response.update( + tracker_id="", + ) + + @pytest.mark.skip() + @parametrize + def test_method_list(self, client: Agentex) -> None: + tracker = client.tracker.list() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_method_list_with_all_params(self, client: Agentex) -> None: + tracker = client.tracker.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_raw_response_list(self, client: Agentex) -> None: + response = client.tracker.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = response.parse() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + def test_streaming_response_list(self, client: Agentex) -> None: + with client.tracker.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = response.parse() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True + + +class TestAsyncTracker: + parametrize = pytest.mark.parametrize( + "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] + ) + + @pytest.mark.skip() + @parametrize + async def test_method_retrieve(self, async_client: AsyncAgentex) -> None: + tracker = await async_client.tracker.retrieve( + "tracker_id", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_retrieve(self, async_client: AsyncAgentex) -> None: + response = await async_client.tracker.with_raw_response.retrieve( + "tracker_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = await response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_retrieve(self, async_client: AsyncAgentex) -> None: + async with async_client.tracker.with_streaming_response.retrieve( + "tracker_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = await response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_retrieve(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tracker_id` but received ''"): + await async_client.tracker.with_raw_response.retrieve( + "", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_update(self, async_client: AsyncAgentex) -> None: + tracker = await async_client.tracker.update( + tracker_id="tracker_id", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_update_with_all_params(self, async_client: AsyncAgentex) -> None: + tracker = await async_client.tracker.update( + tracker_id="tracker_id", + last_processed_event_id="last_processed_event_id", + status="status", + status_reason="status_reason", + ) + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_update(self, async_client: AsyncAgentex) -> None: + response = await async_client.tracker.with_raw_response.update( + tracker_id="tracker_id", + ) + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = await response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_update(self, async_client: AsyncAgentex) -> None: + async with async_client.tracker.with_streaming_response.update( + tracker_id="tracker_id", + ) as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = await response.parse() + assert_matches_type(AgentTaskTracker, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True + + @pytest.mark.skip() + @parametrize + async def test_path_params_update(self, async_client: AsyncAgentex) -> None: + with pytest.raises(ValueError, match=r"Expected a non-empty value for `tracker_id` but received ''"): + await async_client.tracker.with_raw_response.update( + tracker_id="", + ) + + @pytest.mark.skip() + @parametrize + async def test_method_list(self, async_client: AsyncAgentex) -> None: + tracker = await async_client.tracker.list() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_method_list_with_all_params(self, async_client: AsyncAgentex) -> None: + tracker = await async_client.tracker.list( + agent_id="agent_id", + task_id="task_id", + ) + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_raw_response_list(self, async_client: AsyncAgentex) -> None: + response = await async_client.tracker.with_raw_response.list() + + assert response.is_closed is True + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + tracker = await response.parse() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + @pytest.mark.skip() + @parametrize + async def test_streaming_response_list(self, async_client: AsyncAgentex) -> None: + async with async_client.tracker.with_streaming_response.list() as response: + assert not response.is_closed + assert response.http_request.headers.get("X-Stainless-Lang") == "python" + + tracker = await response.parse() + assert_matches_type(TrackerListResponse, tracker, path=["response"]) + + assert cast(Any, response.is_closed) is True diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..d08e65cf --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,84 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import os +import logging +from typing import TYPE_CHECKING, Iterator, AsyncIterator + +import httpx +import pytest +from pytest_asyncio import is_async_test + +from agentex import Agentex, AsyncAgentex, DefaultAioHttpClient +from agentex._utils import is_dict + +if TYPE_CHECKING: + from _pytest.fixtures import FixtureRequest # pyright: ignore[reportPrivateImportUsage] + +pytest.register_assert_rewrite("tests.utils") + +logging.getLogger("agentex").setLevel(logging.DEBUG) + + +# automatically add `pytest.mark.asyncio()` to all of our async tests +# so we don't have to add that boilerplate everywhere +def pytest_collection_modifyitems(items: list[pytest.Function]) -> None: + pytest_asyncio_tests = (item for item in items if is_async_test(item)) + session_scope_marker = pytest.mark.asyncio(loop_scope="session") + for async_test in pytest_asyncio_tests: + async_test.add_marker(session_scope_marker, append=False) + + # We skip tests that use both the aiohttp client and respx_mock as respx_mock + # doesn't support custom transports. + for item in items: + if "async_client" not in item.fixturenames or "respx_mock" not in item.fixturenames: + continue + + if not hasattr(item, "callspec"): + continue + + async_client_param = item.callspec.params.get("async_client") + if is_dict(async_client_param) and async_client_param.get("http_client") == "aiohttp": + item.add_marker(pytest.mark.skip(reason="aiohttp client is not compatible with respx_mock")) + + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + +api_key = "My API Key" + + +@pytest.fixture(scope="session") +def client(request: FixtureRequest) -> Iterator[Agentex]: + strict = getattr(request, "param", True) + if not isinstance(strict, bool): + raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") + + with Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + yield client + + +@pytest.fixture(scope="session") +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncAgentex]: + param = getattr(request, "param", True) + + # defaults + strict = True + http_client: None | httpx.AsyncClient = None + + if isinstance(param, bool): + strict = param + elif is_dict(param): + strict = param.get("strict", True) + assert isinstance(strict, bool) + + http_client_type = param.get("http_client", "httpx") + if http_client_type == "aiohttp": + http_client = DefaultAioHttpClient() + else: + raise TypeError(f"Unexpected fixture parameter type {type(param)}, expected bool or dict") + + async with AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=strict, http_client=http_client + ) as client: + yield client diff --git a/tests/sample_file.txt b/tests/sample_file.txt new file mode 100644 index 00000000..af5626b4 --- /dev/null +++ b/tests/sample_file.txt @@ -0,0 +1 @@ +Hello, world! diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 00000000..bb8ce7ed --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,1746 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +import gc +import os +import sys +import json +import time +import asyncio +import inspect +import subprocess +import tracemalloc +from typing import Any, Union, cast +from textwrap import dedent +from unittest import mock +from typing_extensions import Literal + +import httpx +import pytest +from respx import MockRouter +from pydantic import ValidationError + +from agentex import Agentex, AsyncAgentex, APIResponseValidationError +from agentex._types import Omit +from agentex._models import BaseModel, FinalRequestOptions +from agentex._exceptions import APIStatusError, APITimeoutError, APIResponseValidationError +from agentex._base_client import ( + DEFAULT_TIMEOUT, + HTTPX_DEFAULT_TIMEOUT, + BaseClient, + DefaultHttpxClient, + DefaultAsyncHttpxClient, + make_request_options, +) + +from .utils import update_env + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") +api_key = "My API Key" + + +def _get_params(client: BaseClient[Any, Any]) -> dict[str, str]: + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + return dict(url.params) + + +def _low_retry_timeout(*_args: Any, **_kwargs: Any) -> float: + return 0.1 + + +def _get_open_connections(client: Agentex | AsyncAgentex) -> int: + transport = client._client._transport + assert isinstance(transport, httpx.HTTPTransport) or isinstance(transport, httpx.AsyncHTTPTransport) + + pool = transport._pool + return len(pool._requests) + + +class TestAgentex: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + @pytest.mark.respx(base_url=base_url) + def test_raw_response(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self) -> None: + copied = self.client.copy() + assert id(copied) != id(self.client) + + copied = self.client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert self.client.api_key == "My API Key" + + def test_copy_default_options(self) -> None: + # options that have a default are overridden correctly + copied = self.client.copy(max_retries=7) + assert copied.max_retries == 7 + assert self.client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(self.client.timeout, httpx.Timeout) + copied = self.client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(self.client.timeout, httpx.Timeout) + + def test_copy_default_headers(self) -> None: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + + def test_copy_default_query(self) -> None: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + def test_copy_signature(self) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + self.client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(self.client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client = self.client.copy() + client._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "agentex/_legacy_response.py", + "agentex/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "agentex/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + def test_request_timeout(self) -> None: + request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = self.client._build_request( + FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) + ) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + def test_client_timeout_option(self) -> None: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0)) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + with httpx.Client(timeout=None) as http_client: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + # no timeout given to the httpx client should not use the httpx default + with httpx.Client() as http_client: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + # explicitly passing the default timeout currently results in it being ignored + with httpx.Client(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + async def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + async with httpx.AsyncClient() as http_client: + Agentex( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + def test_default_headers_option(self) -> None: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + client2 = Agentex( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + def test_validate_headers(self) -> None: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with update_env(**{"AGENTEX_SDK_API_KEY": Omit()}): + client2 = Agentex(base_url=base_url, api_key=None, _strict_response_validation=True) + + with pytest.raises( + TypeError, + match="Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted", + ): + client2._build_request(FinalRequestOptions(method="get", url="/foo")) + + request2 = client2._build_request( + FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()}) + ) + assert request2.headers.get("Authorization") is None + + def test_default_query_option(self) -> None: + client = Agentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + def test_request_extra_json(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, client: Agentex) -> None: + request = client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + def test_basic_union_response(self, respx_mock: MockRouter) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = self.client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + def test_base_url_setter(self) -> None: + client = Agentex(base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + def test_base_url_env(self) -> None: + with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"): + client = Agentex(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + @pytest.mark.parametrize( + "client", + [ + Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Agentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_trailing_slash(self, client: Agentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Agentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_no_trailing_slash(self, client: Agentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + Agentex(base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True), + Agentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.Client(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_absolute_request_url(self, client: Agentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + + def test_copied_client_does_not_close_http(self) -> None: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not client.is_closed() + + copied = client.copy() + assert copied is not client + + del copied + + assert not client.is_closed() + + def test_client_context_manager(self) -> None: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + with client as c2: + assert c2 is client + assert not c2.is_closed() + assert not client.is_closed() + assert client.is_closed() + + @pytest.mark.respx(base_url=base_url) + def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + self.client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None)) + + @pytest.mark.respx(base_url=base_url) + def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + strict_client.get("/foo", cast_to=Model) + + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: + client = Agentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_timeout_errors_doesnt_leak(self, respx_mock: MockRouter, client: Agentex) -> None: + respx_mock.post("/echo").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + client.echo.with_streaming_response.send(message="message").__enter__() + + assert _get_open_connections(self.client) == 0 + + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, client: Agentex) -> None: + respx_mock.post("/echo").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + client.echo.with_streaming_response.send(message="message").__enter__() + assert _get_open_connections(self.client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + def test_retries_taken( + self, + client: Agentex, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = client.echo.with_raw_response.send(message="message") + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_omit_retry_count_header( + self, client: Agentex, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = client.echo.with_raw_response.send( + message="message", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + def test_overwrite_retry_count_header( + self, client: Agentex, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = client.echo.with_raw_response.send( + message="message", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects(self, respx_mock: MockRouter) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + self.client.post( + "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response + ) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" + + +class TestAsyncAgentex: + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_raw_response(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_raw_response_for_binary(self, respx_mock: MockRouter) -> None: + respx_mock.post("/foo").mock( + return_value=httpx.Response(200, headers={"Content-Type": "application/binary"}, content='{"foo": "bar"}') + ) + + response = await self.client.post("/foo", cast_to=httpx.Response) + assert response.status_code == 200 + assert isinstance(response, httpx.Response) + assert response.json() == {"foo": "bar"} + + def test_copy(self) -> None: + copied = self.client.copy() + assert id(copied) != id(self.client) + + copied = self.client.copy(api_key="another My API Key") + assert copied.api_key == "another My API Key" + assert self.client.api_key == "My API Key" + + def test_copy_default_options(self) -> None: + # options that have a default are overridden correctly + copied = self.client.copy(max_retries=7) + assert copied.max_retries == 7 + assert self.client.max_retries == 2 + + copied2 = copied.copy(max_retries=6) + assert copied2.max_retries == 6 + assert copied.max_retries == 7 + + # timeout + assert isinstance(self.client.timeout, httpx.Timeout) + copied = self.client.copy(timeout=None) + assert copied.timeout is None + assert isinstance(self.client.timeout, httpx.Timeout) + + def test_copy_default_headers(self) -> None: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + assert client.default_headers["X-Foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert copied.default_headers["X-Foo"] == "bar" + + # merges already given headers + copied = client.copy(default_headers={"X-Bar": "stainless"}) + assert copied.default_headers["X-Foo"] == "bar" + assert copied.default_headers["X-Bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_headers={"X-Foo": "stainless"}) + assert copied.default_headers["X-Foo"] == "stainless" + + # set_default_headers + + # completely overrides already set values + copied = client.copy(set_default_headers={}) + assert copied.default_headers.get("X-Foo") is None + + copied = client.copy(set_default_headers={"X-Bar": "Robert"}) + assert copied.default_headers["X-Bar"] == "Robert" + + with pytest.raises( + ValueError, + match="`default_headers` and `set_default_headers` arguments are mutually exclusive", + ): + client.copy(set_default_headers={}, default_headers={"X-Foo": "Bar"}) + + def test_copy_default_query(self) -> None: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"foo": "bar"} + ) + assert _get_params(client)["foo"] == "bar" + + # does not override the already given value when not specified + copied = client.copy() + assert _get_params(copied)["foo"] == "bar" + + # merges already given params + copied = client.copy(default_query={"bar": "stainless"}) + params = _get_params(copied) + assert params["foo"] == "bar" + assert params["bar"] == "stainless" + + # uses new values for any already given headers + copied = client.copy(default_query={"foo": "stainless"}) + assert _get_params(copied)["foo"] == "stainless" + + # set_default_query + + # completely overrides already set values + copied = client.copy(set_default_query={}) + assert _get_params(copied) == {} + + copied = client.copy(set_default_query={"bar": "Robert"}) + assert _get_params(copied)["bar"] == "Robert" + + with pytest.raises( + ValueError, + # TODO: update + match="`default_query` and `set_default_query` arguments are mutually exclusive", + ): + client.copy(set_default_query={}, default_query={"foo": "Bar"}) + + def test_copy_signature(self) -> None: + # ensure the same parameters that can be passed to the client are defined in the `.copy()` method + init_signature = inspect.signature( + # mypy doesn't like that we access the `__init__` property. + self.client.__init__, # type: ignore[misc] + ) + copy_signature = inspect.signature(self.client.copy) + exclude_params = {"transport", "proxies", "_strict_response_validation"} + + for name in init_signature.parameters.keys(): + if name in exclude_params: + continue + + copy_param = copy_signature.parameters.get(name) + assert copy_param is not None, f"copy() signature is missing the {name} param" + + @pytest.mark.skipif(sys.version_info >= (3, 10), reason="fails because of a memory leak that started from 3.12") + def test_copy_build_request(self) -> None: + options = FinalRequestOptions(method="get", url="/foo") + + def build_request(options: FinalRequestOptions) -> None: + client = self.client.copy() + client._build_request(options) + + # ensure that the machinery is warmed up before tracing starts. + build_request(options) + gc.collect() + + tracemalloc.start(1000) + + snapshot_before = tracemalloc.take_snapshot() + + ITERATIONS = 10 + for _ in range(ITERATIONS): + build_request(options) + + gc.collect() + snapshot_after = tracemalloc.take_snapshot() + + tracemalloc.stop() + + def add_leak(leaks: list[tracemalloc.StatisticDiff], diff: tracemalloc.StatisticDiff) -> None: + if diff.count == 0: + # Avoid false positives by considering only leaks (i.e. allocations that persist). + return + + if diff.count % ITERATIONS != 0: + # Avoid false positives by considering only leaks that appear per iteration. + return + + for frame in diff.traceback: + if any( + frame.filename.endswith(fragment) + for fragment in [ + # to_raw_response_wrapper leaks through the @functools.wraps() decorator. + # + # removing the decorator fixes the leak for reasons we don't understand. + "agentex/_legacy_response.py", + "agentex/_response.py", + # pydantic.BaseModel.model_dump || pydantic.BaseModel.dict leak memory for some reason. + "agentex/_compat.py", + # Standard library leaks we don't care about. + "/logging/__init__.py", + ] + ): + return + + leaks.append(diff) + + leaks: list[tracemalloc.StatisticDiff] = [] + for diff in snapshot_after.compare_to(snapshot_before, "traceback"): + add_leak(leaks, diff) + if leaks: + for leak in leaks: + print("MEMORY LEAK:", leak) + for frame in leak.traceback: + print(frame) + raise AssertionError() + + async def test_request_timeout(self) -> None: + request = self.client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + request = self.client._build_request( + FinalRequestOptions(method="get", url="/foo", timeout=httpx.Timeout(100.0)) + ) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(100.0) + + async def test_client_timeout_option(self) -> None: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, timeout=httpx.Timeout(0) + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(0) + + async def test_http_client_timeout_option(self) -> None: + # custom timeout given to the httpx client should be used + async with httpx.AsyncClient(timeout=None) as http_client: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == httpx.Timeout(None) + + # no timeout given to the httpx client should not use the httpx default + async with httpx.AsyncClient() as http_client: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT + + # explicitly passing the default timeout currently results in it being ignored + async with httpx.AsyncClient(timeout=HTTPX_DEFAULT_TIMEOUT) as http_client: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, http_client=http_client + ) + + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + timeout = httpx.Timeout(**request.extensions["timeout"]) # type: ignore + assert timeout == DEFAULT_TIMEOUT # our default + + def test_invalid_http_client(self) -> None: + with pytest.raises(TypeError, match="Invalid `http_client` arg"): + with httpx.Client() as http_client: + AsyncAgentex( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + http_client=cast(Any, http_client), + ) + + def test_default_headers_option(self) -> None: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_headers={"X-Foo": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "bar" + assert request.headers.get("x-stainless-lang") == "python" + + client2 = AsyncAgentex( + base_url=base_url, + api_key=api_key, + _strict_response_validation=True, + default_headers={ + "X-Foo": "stainless", + "X-Stainless-Lang": "my-overriding-header", + }, + ) + request = client2._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("x-foo") == "stainless" + assert request.headers.get("x-stainless-lang") == "my-overriding-header" + + def test_validate_headers(self) -> None: + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + assert request.headers.get("Authorization") == f"Bearer {api_key}" + + with update_env(**{"AGENTEX_SDK_API_KEY": Omit()}): + client2 = AsyncAgentex(base_url=base_url, api_key=None, _strict_response_validation=True) + + with pytest.raises( + TypeError, + match="Could not resolve authentication method. Expected the api_key to be set. Or for the `Authorization` headers to be explicitly omitted", + ): + client2._build_request(FinalRequestOptions(method="get", url="/foo")) + + request2 = client2._build_request( + FinalRequestOptions(method="get", url="/foo", headers={"Authorization": Omit()}) + ) + assert request2.headers.get("Authorization") is None + + def test_default_query_option(self) -> None: + client = AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, default_query={"query_param": "bar"} + ) + request = client._build_request(FinalRequestOptions(method="get", url="/foo")) + url = httpx.URL(request.url) + assert dict(url.params) == {"query_param": "bar"} + + request = client._build_request( + FinalRequestOptions( + method="get", + url="/foo", + params={"foo": "baz", "query_param": "overridden"}, + ) + ) + url = httpx.URL(request.url) + assert dict(url.params) == {"foo": "baz", "query_param": "overridden"} + + def test_request_extra_json(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": False} + + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + extra_json={"baz": False}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"baz": False} + + # `extra_json` takes priority over `json_data` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar", "baz": True}, + extra_json={"baz": None}, + ), + ) + data = json.loads(request.content.decode("utf-8")) + assert data == {"foo": "bar", "baz": None} + + def test_request_extra_headers(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options(extra_headers={"X-Foo": "Foo"}), + ), + ) + assert request.headers.get("X-Foo") == "Foo" + + # `extra_headers` takes priority over `default_headers` when keys clash + request = self.client.with_options(default_headers={"X-Bar": "true"})._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_headers={"X-Bar": "false"}, + ), + ), + ) + assert request.headers.get("X-Bar") == "false" + + def test_request_extra_query(self) -> None: + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + extra_query={"my_query_param": "Foo"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"my_query_param": "Foo"} + + # if both `query` and `extra_query` are given, they are merged + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"bar": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"bar": "1", "foo": "2"} + + # `extra_query` takes priority over `query` when keys clash + request = self.client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + **make_request_options( + query={"foo": "1"}, + extra_query={"foo": "2"}, + ), + ), + ) + params = dict(request.url.params) + assert params == {"foo": "2"} + + def test_multipart_repeating_array(self, async_client: AsyncAgentex) -> None: + request = async_client._build_request( + FinalRequestOptions.construct( + method="post", + url="/foo", + headers={"Content-Type": "multipart/form-data; boundary=6b7ba517decee4a450543ea6ae821c82"}, + json_data={"array": ["foo", "bar"]}, + files=[("foo.txt", b"hello world")], + ) + ) + + assert request.read().split(b"\r\n") == [ + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"foo", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="array[]"', + b"", + b"bar", + b"--6b7ba517decee4a450543ea6ae821c82", + b'Content-Disposition: form-data; name="foo.txt"; filename="upload"', + b"Content-Type: application/octet-stream", + b"", + b"hello world", + b"--6b7ba517decee4a450543ea6ae821c82--", + b"", + ] + + @pytest.mark.respx(base_url=base_url) + async def test_basic_union_response(self, respx_mock: MockRouter) -> None: + class Model1(BaseModel): + name: str + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + @pytest.mark.respx(base_url=base_url) + async def test_union_response_different_types(self, respx_mock: MockRouter) -> None: + """Union of objects with the same field name using a different type""" + + class Model1(BaseModel): + foo: int + + class Model2(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": "bar"})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model2) + assert response.foo == "bar" + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": 1})) + + response = await self.client.get("/foo", cast_to=cast(Any, Union[Model1, Model2])) + assert isinstance(response, Model1) + assert response.foo == 1 + + @pytest.mark.respx(base_url=base_url) + async def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None: + """ + Response that sets Content-Type to something other than application/json but returns json data + """ + + class Model(BaseModel): + foo: int + + respx_mock.get("/foo").mock( + return_value=httpx.Response( + 200, + content=json.dumps({"foo": 2}), + headers={"Content-Type": "application/text"}, + ) + ) + + response = await self.client.get("/foo", cast_to=Model) + assert isinstance(response, Model) + assert response.foo == 2 + + def test_base_url_setter(self) -> None: + client = AsyncAgentex( + base_url="https://example.com/from_init", api_key=api_key, _strict_response_validation=True + ) + assert client.base_url == "https://example.com/from_init/" + + client.base_url = "https://example.com/from_setter" # type: ignore[assignment] + + assert client.base_url == "https://example.com/from_setter/" + + def test_base_url_env(self) -> None: + with update_env(AGENTEX_BASE_URL="http://localhost:5000/from/env"): + client = AsyncAgentex(api_key=api_key, _strict_response_validation=True) + assert client.base_url == "http://localhost:5000/from/env/" + + @pytest.mark.parametrize( + "client", + [ + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_trailing_slash(self, client: AsyncAgentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_base_url_no_trailing_slash(self, client: AsyncAgentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "http://localhost:5000/custom/path/foo" + + @pytest.mark.parametrize( + "client", + [ + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", api_key=api_key, _strict_response_validation=True + ), + AsyncAgentex( + base_url="http://localhost:5000/custom/path/", + api_key=api_key, + _strict_response_validation=True, + http_client=httpx.AsyncClient(), + ), + ], + ids=["standard", "custom http client"], + ) + def test_absolute_request_url(self, client: AsyncAgentex) -> None: + request = client._build_request( + FinalRequestOptions( + method="post", + url="https://myapi.com/foo", + json_data={"foo": "bar"}, + ), + ) + assert request.url == "https://myapi.com/foo" + + async def test_copied_client_does_not_close_http(self) -> None: + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + assert not client.is_closed() + + copied = client.copy() + assert copied is not client + + del copied + + await asyncio.sleep(0.2) + assert not client.is_closed() + + async def test_client_context_manager(self) -> None: + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + async with client as c2: + assert c2 is client + assert not c2.is_closed() + assert not client.is_closed() + assert client.is_closed() + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_client_response_validation_error(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + foo: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, json={"foo": {"invalid": True}})) + + with pytest.raises(APIResponseValidationError) as exc: + await self.client.get("/foo", cast_to=Model) + + assert isinstance(exc.value.__cause__, ValidationError) + + async def test_client_max_retries_validation(self) -> None: + with pytest.raises(TypeError, match=r"max_retries cannot be None"): + AsyncAgentex( + base_url=base_url, api_key=api_key, _strict_response_validation=True, max_retries=cast(Any, None) + ) + + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_received_text_for_expected_json(self, respx_mock: MockRouter) -> None: + class Model(BaseModel): + name: str + + respx_mock.get("/foo").mock(return_value=httpx.Response(200, text="my-custom-format")) + + strict_client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + with pytest.raises(APIResponseValidationError): + await strict_client.get("/foo", cast_to=Model) + + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=False) + + response = await client.get("/foo", cast_to=Model) + assert isinstance(response, str) # type: ignore[unreachable] + + @pytest.mark.parametrize( + "remaining_retries,retry_after,timeout", + [ + [3, "20", 20], + [3, "0", 0.5], + [3, "-10", 0.5], + [3, "60", 60], + [3, "61", 0.5], + [3, "Fri, 29 Sep 2023 16:26:57 GMT", 20], + [3, "Fri, 29 Sep 2023 16:26:37 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "Fri, 29 Sep 2023 16:27:37 GMT", 60], + [3, "Fri, 29 Sep 2023 16:27:38 GMT", 0.5], + [3, "99999999999999999999999999999999999", 0.5], + [3, "Zun, 29 Sep 2023 16:26:27 GMT", 0.5], + [3, "", 0.5], + [2, "", 0.5 * 2.0], + [1, "", 0.5 * 4.0], + [-1100, "", 8], # test large number potentially overflowing + ], + ) + @mock.patch("time.time", mock.MagicMock(return_value=1696004797)) + @pytest.mark.asyncio + async def test_parse_retry_after_header(self, remaining_retries: int, retry_after: str, timeout: float) -> None: + client = AsyncAgentex(base_url=base_url, api_key=api_key, _strict_response_validation=True) + + headers = httpx.Headers({"retry-after": retry_after}) + options = FinalRequestOptions(method="get", url="/foo", max_retries=3) + calculated = client._calculate_retry_timeout(remaining_retries, options, headers) + assert calculated == pytest.approx(timeout, 0.5 * 0.875) # pyright: ignore[reportUnknownMemberType] + + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_timeout_errors_doesnt_leak( + self, respx_mock: MockRouter, async_client: AsyncAgentex + ) -> None: + respx_mock.post("/echo").mock(side_effect=httpx.TimeoutException("Test timeout error")) + + with pytest.raises(APITimeoutError): + await async_client.echo.with_streaming_response.send(message="message").__aenter__() + + assert _get_open_connections(self.client) == 0 + + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + async def test_retrying_status_errors_doesnt_leak(self, respx_mock: MockRouter, async_client: AsyncAgentex) -> None: + respx_mock.post("/echo").mock(return_value=httpx.Response(500)) + + with pytest.raises(APIStatusError): + await async_client.echo.with_streaming_response.send(message="message").__aenter__() + assert _get_open_connections(self.client) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + @pytest.mark.parametrize("failure_mode", ["status", "exception"]) + async def test_retries_taken( + self, + async_client: AsyncAgentex, + failures_before_success: int, + failure_mode: Literal["status", "exception"], + respx_mock: MockRouter, + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + if failure_mode == "exception": + raise RuntimeError("oops") + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = await client.echo.with_raw_response.send(message="message") + + assert response.retries_taken == failures_before_success + assert int(response.http_request.headers.get("x-stainless-retry-count")) == failures_before_success + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_omit_retry_count_header( + self, async_client: AsyncAgentex, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = await client.echo.with_raw_response.send( + message="message", extra_headers={"x-stainless-retry-count": Omit()} + ) + + assert len(response.http_request.headers.get_list("x-stainless-retry-count")) == 0 + + @pytest.mark.parametrize("failures_before_success", [0, 2, 4]) + @mock.patch("agentex._base_client.BaseClient._calculate_retry_timeout", _low_retry_timeout) + @pytest.mark.respx(base_url=base_url) + @pytest.mark.asyncio + async def test_overwrite_retry_count_header( + self, async_client: AsyncAgentex, failures_before_success: int, respx_mock: MockRouter + ) -> None: + client = async_client.with_options(max_retries=4) + + nb_retries = 0 + + def retry_handler(_request: httpx.Request) -> httpx.Response: + nonlocal nb_retries + if nb_retries < failures_before_success: + nb_retries += 1 + return httpx.Response(500) + return httpx.Response(200) + + respx_mock.post("/echo").mock(side_effect=retry_handler) + + response = await client.echo.with_raw_response.send( + message="message", extra_headers={"x-stainless-retry-count": "42"} + ) + + assert response.http_request.headers.get("x-stainless-retry-count") == "42" + + def test_get_platform(self) -> None: + # A previous implementation of asyncify could leave threads unterminated when + # used with nest_asyncio. + # + # Since nest_asyncio.apply() is global and cannot be un-applied, this + # test is run in a separate process to avoid affecting other tests. + test_code = dedent(""" + import asyncio + import nest_asyncio + import threading + + from agentex._utils import asyncify + from agentex._base_client import get_platform + + async def test_main() -> None: + result = await asyncify(get_platform)() + print(result) + for thread in threading.enumerate(): + print(thread.name) + + nest_asyncio.apply() + asyncio.run(test_main()) + """) + with subprocess.Popen( + [sys.executable, "-c", test_code], + text=True, + ) as process: + timeout = 10 # seconds + + start_time = time.monotonic() + while True: + return_code = process.poll() + if return_code is not None: + if return_code != 0: + raise AssertionError("calling get_platform using asyncify resulted in a non-zero exit code") + + # success + break + + if time.monotonic() - start_time > timeout: + process.kill() + raise AssertionError("calling get_platform using asyncify resulted in a hung process") + + time.sleep(0.1) + + async def test_proxy_environment_variables(self, monkeypatch: pytest.MonkeyPatch) -> None: + # Test that the proxy environment variables are set correctly + monkeypatch.setenv("HTTPS_PROXY", "https://example.org") + + client = DefaultAsyncHttpxClient() + + mounts = tuple(client._mounts.items()) + assert len(mounts) == 1 + assert mounts[0][0].pattern == "https://" + + @pytest.mark.filterwarnings("ignore:.*deprecated.*:DeprecationWarning") + async def test_default_client_creation(self) -> None: + # Ensure that the client can be initialized without any exceptions + DefaultAsyncHttpxClient( + verify=True, + cert=None, + trust_env=True, + http1=True, + http2=False, + limits=httpx.Limits(max_connections=100, max_keepalive_connections=20), + ) + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects(self, respx_mock: MockRouter) -> None: + # Test that the default follow_redirects=True allows following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + respx_mock.get("/redirected").mock(return_value=httpx.Response(200, json={"status": "ok"})) + + response = await self.client.post("/redirect", body={"key": "value"}, cast_to=httpx.Response) + assert response.status_code == 200 + assert response.json() == {"status": "ok"} + + @pytest.mark.respx(base_url=base_url) + async def test_follow_redirects_disabled(self, respx_mock: MockRouter) -> None: + # Test that follow_redirects=False prevents following redirects + respx_mock.post("/redirect").mock( + return_value=httpx.Response(302, headers={"Location": f"{base_url}/redirected"}) + ) + + with pytest.raises(APIStatusError) as exc_info: + await self.client.post( + "/redirect", body={"key": "value"}, options={"follow_redirects": False}, cast_to=httpx.Response + ) + + assert exc_info.value.response.status_code == 302 + assert exc_info.value.response.headers["Location"] == f"{base_url}/redirected" diff --git a/tests/test_deepcopy.py b/tests/test_deepcopy.py new file mode 100644 index 00000000..3fc74ccb --- /dev/null +++ b/tests/test_deepcopy.py @@ -0,0 +1,58 @@ +from agentex._utils import deepcopy_minimal + + +def assert_different_identities(obj1: object, obj2: object) -> None: + assert obj1 == obj2 + assert id(obj1) != id(obj2) + + +def test_simple_dict() -> None: + obj1 = {"foo": "bar"} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_dict() -> None: + obj1 = {"foo": {"bar": True}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + + +def test_complex_nested_dict() -> None: + obj1 = {"foo": {"bar": [{"hello": "world"}]}} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1["foo"], obj2["foo"]) + assert_different_identities(obj1["foo"]["bar"], obj2["foo"]["bar"]) + assert_different_identities(obj1["foo"]["bar"][0], obj2["foo"]["bar"][0]) + + +def test_simple_list() -> None: + obj1 = ["a", "b", "c"] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + + +def test_nested_list() -> None: + obj1 = ["a", [1, 2, 3]] + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert_different_identities(obj1[1], obj2[1]) + + +class MyObject: ... + + +def test_ignores_other_types() -> None: + # custom classes + my_obj = MyObject() + obj1 = {"foo": my_obj} + obj2 = deepcopy_minimal(obj1) + assert_different_identities(obj1, obj2) + assert obj1["foo"] is my_obj + + # tuples + obj3 = ("a", "b") + obj4 = deepcopy_minimal(obj3) + assert obj3 is obj4 diff --git a/tests/test_extract_files.py b/tests/test_extract_files.py new file mode 100644 index 00000000..43b12be2 --- /dev/null +++ b/tests/test_extract_files.py @@ -0,0 +1,64 @@ +from __future__ import annotations + +from typing import Sequence + +import pytest + +from agentex._types import FileTypes +from agentex._utils import extract_files + + +def test_removes_files_from_input() -> None: + query = {"foo": "bar"} + assert extract_files(query, paths=[]) == [] + assert query == {"foo": "bar"} + + query2 = {"foo": b"Bar", "hello": "world"} + assert extract_files(query2, paths=[["foo"]]) == [("foo", b"Bar")] + assert query2 == {"hello": "world"} + + query3 = {"foo": {"foo": {"bar": b"Bar"}}, "hello": "world"} + assert extract_files(query3, paths=[["foo", "foo", "bar"]]) == [("foo[foo][bar]", b"Bar")] + assert query3 == {"foo": {"foo": {}}, "hello": "world"} + + query4 = {"foo": {"bar": b"Bar", "baz": "foo"}, "hello": "world"} + assert extract_files(query4, paths=[["foo", "bar"]]) == [("foo[bar]", b"Bar")] + assert query4 == {"hello": "world", "foo": {"baz": "foo"}} + + +def test_multiple_files() -> None: + query = {"documents": [{"file": b"My first file"}, {"file": b"My second file"}]} + assert extract_files(query, paths=[["documents", "", "file"]]) == [ + ("documents[][file]", b"My first file"), + ("documents[][file]", b"My second file"), + ] + assert query == {"documents": [{}, {}]} + + +@pytest.mark.parametrize( + "query,paths,expected", + [ + [ + {"foo": {"bar": "baz"}}, + [["foo", "", "bar"]], + [], + ], + [ + {"foo": ["bar", "baz"]}, + [["foo", "bar"]], + [], + ], + [ + {"foo": {"bar": "baz"}}, + [["foo", "foo"]], + [], + ], + ], + ids=["dict expecting array", "array expecting dict", "unknown keys"], +) +def test_ignores_incorrect_paths( + query: dict[str, object], + paths: Sequence[Sequence[str]], + expected: list[tuple[str, FileTypes]], +) -> None: + assert extract_files(query, paths=paths) == expected diff --git a/tests/test_files.py b/tests/test_files.py new file mode 100644 index 00000000..e88f71f4 --- /dev/null +++ b/tests/test_files.py @@ -0,0 +1,51 @@ +from pathlib import Path + +import anyio +import pytest +from dirty_equals import IsDict, IsList, IsBytes, IsTuple + +from agentex._files import to_httpx_files, async_to_httpx_files + +readme_path = Path(__file__).parent.parent.joinpath("README.md") + + +def test_pathlib_includes_file_name() -> None: + result = to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +def test_tuple_input() -> None: + result = to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +@pytest.mark.asyncio +async def test_async_pathlib_includes_file_name() -> None: + result = await async_to_httpx_files({"file": readme_path}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_supports_anyio_path() -> None: + result = await async_to_httpx_files({"file": anyio.Path(readme_path)}) + print(result) + assert result == IsDict({"file": IsTuple("README.md", IsBytes())}) + + +@pytest.mark.asyncio +async def test_async_tuple_input() -> None: + result = await async_to_httpx_files([("file", readme_path)]) + print(result) + assert result == IsList(IsTuple("file", IsTuple("README.md", IsBytes()))) + + +def test_string_not_allowed() -> None: + with pytest.raises(TypeError, match="Expected file types input to be a FileContent type or to be a tuple"): + to_httpx_files( + { + "file": "foo", # type: ignore + } + ) diff --git a/tests/test_models.py b/tests/test_models.py new file mode 100644 index 00000000..a8867d60 --- /dev/null +++ b/tests/test_models.py @@ -0,0 +1,936 @@ +import json +from typing import Any, Dict, List, Union, Optional, cast +from datetime import datetime, timezone +from typing_extensions import Literal, Annotated, TypeAliasType + +import pytest +import pydantic +from pydantic import Field + +from agentex._utils import PropertyInfo +from agentex._compat import PYDANTIC_V2, parse_obj, model_dump, model_json +from agentex._models import BaseModel, construct_type + + +class BasicModel(BaseModel): + foo: str + + +@pytest.mark.parametrize("value", ["hello", 1], ids=["correct type", "mismatched"]) +def test_basic(value: object) -> None: + m = BasicModel.construct(foo=value) + assert m.foo == value + + +def test_directly_nested_model() -> None: + class NestedModel(BaseModel): + nested: BasicModel + + m = NestedModel.construct(nested={"foo": "Foo!"}) + assert m.nested.foo == "Foo!" + + # mismatched types + m = NestedModel.construct(nested="hello!") + assert cast(Any, m.nested) == "hello!" + + +def test_optional_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[BasicModel] + + m1 = NestedModel.construct(nested=None) + assert m1.nested is None + + m2 = NestedModel.construct(nested={"foo": "bar"}) + assert m2.nested is not None + assert m2.nested.foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested={"foo"}) + assert isinstance(cast(Any, m3.nested), set) + assert cast(Any, m3.nested) == {"foo"} + + +def test_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[BasicModel] + + m = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0].foo == "bar" + assert m.nested[1].foo == "2" + + # mismatched types + m = NestedModel.construct(nested=True) + assert cast(Any, m.nested) is True + + m = NestedModel.construct(nested=[False]) + assert cast(Any, m.nested) == [False] + + +def test_optional_list_nested_model() -> None: + class NestedModel(BaseModel): + nested: Optional[List[BasicModel]] + + m1 = NestedModel.construct(nested=[{"foo": "bar"}, {"foo": "2"}]) + assert m1.nested is not None + assert isinstance(m1.nested, list) + assert len(m1.nested) == 2 + assert m1.nested[0].foo == "bar" + assert m1.nested[1].foo == "2" + + m2 = NestedModel.construct(nested=None) + assert m2.nested is None + + # mismatched types + m3 = NestedModel.construct(nested={1}) + assert cast(Any, m3.nested) == {1} + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_optional_items_nested_model() -> None: + class NestedModel(BaseModel): + nested: List[Optional[BasicModel]] + + m = NestedModel.construct(nested=[None, {"foo": "bar"}]) + assert m.nested is not None + assert isinstance(m.nested, list) + assert len(m.nested) == 2 + assert m.nested[0] is None + assert m.nested[1] is not None + assert m.nested[1].foo == "bar" + + # mismatched types + m3 = NestedModel.construct(nested="foo") + assert cast(Any, m3.nested) == "foo" + + m4 = NestedModel.construct(nested=[False]) + assert cast(Any, m4.nested) == [False] + + +def test_list_mismatched_type() -> None: + class NestedModel(BaseModel): + nested: List[str] + + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_raw_dictionary() -> None: + class NestedModel(BaseModel): + nested: Dict[str, str] + + m = NestedModel.construct(nested={"hello": "world"}) + assert m.nested == {"hello": "world"} + + # mismatched types + m = NestedModel.construct(nested=False) + assert cast(Any, m.nested) is False + + +def test_nested_dictionary_model() -> None: + class NestedModel(BaseModel): + nested: Dict[str, BasicModel] + + m = NestedModel.construct(nested={"hello": {"foo": "bar"}}) + assert isinstance(m.nested, dict) + assert m.nested["hello"].foo == "bar" + + # mismatched types + m = NestedModel.construct(nested={"hello": False}) + assert cast(Any, m.nested["hello"]) is False + + +def test_unknown_fields() -> None: + m1 = BasicModel.construct(foo="foo", unknown=1) + assert m1.foo == "foo" + assert cast(Any, m1).unknown == 1 + + m2 = BasicModel.construct(foo="foo", unknown={"foo_bar": True}) + assert m2.foo == "foo" + assert cast(Any, m2).unknown == {"foo_bar": True} + + assert model_dump(m2) == {"foo": "foo", "unknown": {"foo_bar": True}} + + +def test_strict_validation_unknown_fields() -> None: + class Model(BaseModel): + foo: str + + model = parse_obj(Model, dict(foo="hello!", user="Robert")) + assert model.foo == "hello!" + assert cast(Any, model).user == "Robert" + + assert model_dump(model) == {"foo": "hello!", "user": "Robert"} + + +def test_aliases() -> None: + class Model(BaseModel): + my_field: int = Field(alias="myField") + + m = Model.construct(myField=1) + assert m.my_field == 1 + + # mismatched types + m = Model.construct(myField={"hello": False}) + assert cast(Any, m.my_field) == {"hello": False} + + +def test_repr() -> None: + model = BasicModel(foo="bar") + assert str(model) == "BasicModel(foo='bar')" + assert repr(model) == "BasicModel(foo='bar')" + + +def test_repr_nested_model() -> None: + class Child(BaseModel): + name: str + age: int + + class Parent(BaseModel): + name: str + child: Child + + model = Parent(name="Robert", child=Child(name="Foo", age=5)) + assert str(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + assert repr(model) == "Parent(name='Robert', child=Child(name='Foo', age=5))" + + +def test_optional_list() -> None: + class Submodel(BaseModel): + name: str + + class Model(BaseModel): + items: Optional[List[Submodel]] + + m = Model.construct(items=None) + assert m.items is None + + m = Model.construct(items=[]) + assert m.items == [] + + m = Model.construct(items=[{"name": "Robert"}]) + assert m.items is not None + assert len(m.items) == 1 + assert m.items[0].name == "Robert" + + +def test_nested_union_of_models() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + +def test_nested_union_of_mixed_types() -> None: + class Submodel1(BaseModel): + bar: bool + + class Model(BaseModel): + foo: Union[Submodel1, Literal[True], Literal["CARD_HOLDER"]] + + m = Model.construct(foo=True) + assert m.foo is True + + m = Model.construct(foo="CARD_HOLDER") + assert m.foo == "CARD_HOLDER" + + m = Model.construct(foo={"bar": False}) + assert isinstance(m.foo, Submodel1) + assert m.foo.bar is False + + +def test_nested_union_multiple_variants() -> None: + class Submodel1(BaseModel): + bar: bool + + class Submodel2(BaseModel): + thing: str + + class Submodel3(BaseModel): + foo: int + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2, None, Submodel3] + + m = Model.construct(foo={"thing": "hello"}) + assert isinstance(m.foo, Submodel2) + assert m.foo.thing == "hello" + + m = Model.construct(foo=None) + assert m.foo is None + + m = Model.construct() + assert m.foo is None + + m = Model.construct(foo={"foo": "1"}) + assert isinstance(m.foo, Submodel3) + assert m.foo.foo == 1 + + +def test_nested_union_invalid_data() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + foo: Union[Submodel1, Submodel2] + + m = Model.construct(foo=True) + assert cast(bool, m.foo) is True + + m = Model.construct(foo={"name": 3}) + if PYDANTIC_V2: + assert isinstance(m.foo, Submodel1) + assert m.foo.name == 3 # type: ignore + else: + assert isinstance(m.foo, Submodel2) + assert m.foo.name == "3" + + +def test_list_of_unions() -> None: + class Submodel1(BaseModel): + level: int + + class Submodel2(BaseModel): + name: str + + class Model(BaseModel): + items: List[Union[Submodel1, Submodel2]] + + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], Submodel2) + assert m.items[1].name == "Robert" + + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], Submodel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_union_of_lists() -> None: + class SubModel1(BaseModel): + level: int + + class SubModel2(BaseModel): + name: str + + class Model(BaseModel): + items: Union[List[SubModel1], List[SubModel2]] + + # with one valid entry + m = Model.construct(items=[{"name": "Robert"}]) + assert len(m.items) == 1 + assert isinstance(m.items[0], SubModel2) + assert m.items[0].name == "Robert" + + # with two entries pointing to different types + m = Model.construct(items=[{"level": 1}, {"name": "Robert"}]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == 1 + assert isinstance(m.items[1], SubModel1) + assert cast(Any, m.items[1]).name == "Robert" + + # with two entries pointing to *completely* different types + m = Model.construct(items=[{"level": -1}, 156]) + assert len(m.items) == 2 + assert isinstance(m.items[0], SubModel1) + assert m.items[0].level == -1 + assert cast(Any, m.items[1]) == 156 + + +def test_dict_of_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Dict[str, Union[SubModel1, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel2) + assert m.data["foo"].foo == "bar" + + # TODO: test mismatched type + + +def test_double_nested_union() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + bar: str + + class Model(BaseModel): + data: Dict[str, List[Union[SubModel1, SubModel2]]] + + m = Model.construct(data={"foo": [{"bar": "baz"}, {"name": "Robert"}]}) + assert len(m.data["foo"]) == 2 + + entry1 = m.data["foo"][0] + assert isinstance(entry1, SubModel2) + assert entry1.bar == "baz" + + entry2 = m.data["foo"][1] + assert isinstance(entry2, SubModel1) + assert entry2.name == "Robert" + + # TODO: test mismatched type + + +def test_union_of_dict() -> None: + class SubModel1(BaseModel): + name: str + + class SubModel2(BaseModel): + foo: str + + class Model(BaseModel): + data: Union[Dict[str, SubModel1], Dict[str, SubModel2]] + + m = Model.construct(data={"hello": {"name": "there"}, "foo": {"foo": "bar"}}) + assert len(list(m.data.keys())) == 2 + assert isinstance(m.data["hello"], SubModel1) + assert m.data["hello"].name == "there" + assert isinstance(m.data["foo"], SubModel1) + assert cast(Any, m.data["foo"]).foo == "bar" + + +def test_iso8601_datetime() -> None: + class Model(BaseModel): + created_at: datetime + + expected = datetime(2019, 12, 27, 18, 11, 19, 117000, tzinfo=timezone.utc) + + if PYDANTIC_V2: + expected_json = '{"created_at":"2019-12-27T18:11:19.117000Z"}' + else: + expected_json = '{"created_at": "2019-12-27T18:11:19.117000+00:00"}' + + model = Model.construct(created_at="2019-12-27T18:11:19.117Z") + assert model.created_at == expected + assert model_json(model) == expected_json + + model = parse_obj(Model, dict(created_at="2019-12-27T18:11:19.117Z")) + assert model.created_at == expected + assert model_json(model) == expected_json + + +def test_does_not_coerce_int() -> None: + class Model(BaseModel): + bar: int + + assert Model.construct(bar=1).bar == 1 + assert Model.construct(bar=10.9).bar == 10.9 + assert Model.construct(bar="19").bar == "19" # type: ignore[comparison-overlap] + assert Model.construct(bar=False).bar is False + + +def test_int_to_float_safe_conversion() -> None: + class Model(BaseModel): + float_field: float + + m = Model.construct(float_field=10) + assert m.float_field == 10.0 + assert isinstance(m.float_field, float) + + m = Model.construct(float_field=10.12) + assert m.float_field == 10.12 + assert isinstance(m.float_field, float) + + # number too big + m = Model.construct(float_field=2**53 + 1) + assert m.float_field == 2**53 + 1 + assert isinstance(m.float_field, int) + + +def test_deprecated_alias() -> None: + class Model(BaseModel): + resource_id: str = Field(alias="model_id") + + @property + def model_id(self) -> str: + return self.resource_id + + m = Model.construct(model_id="id") + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + m = parse_obj(Model, {"model_id": "id"}) + assert m.model_id == "id" + assert m.resource_id == "id" + assert m.resource_id is m.model_id + + +def test_omitted_fields() -> None: + class Model(BaseModel): + resource_id: Optional[str] = None + + m = Model.construct() + assert m.resource_id is None + assert "resource_id" not in m.model_fields_set + + m = Model.construct(resource_id=None) + assert m.resource_id is None + assert "resource_id" in m.model_fields_set + + m = Model.construct(resource_id="foo") + assert m.resource_id == "foo" + assert "resource_id" in m.model_fields_set + + +def test_to_dict() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.to_dict() == {"FOO": "hello"} + assert m.to_dict(use_api_names=False) == {"foo": "hello"} + + m2 = Model() + assert m2.to_dict() == {} + assert m2.to_dict(exclude_unset=False) == {"FOO": None} + assert m2.to_dict(exclude_unset=False, exclude_none=True) == {} + assert m2.to_dict(exclude_unset=False, exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.to_dict() == {"FOO": None} + assert m3.to_dict(exclude_none=True) == {} + assert m3.to_dict(exclude_defaults=True) == {} + + class Model2(BaseModel): + created_at: datetime + + time_str = "2024-03-21T11:39:01.275859" + m4 = Model2.construct(created_at=time_str) + assert m4.to_dict(mode="python") == {"created_at": datetime.fromisoformat(time_str)} + assert m4.to_dict(mode="json") == {"created_at": time_str} + + if not PYDANTIC_V2: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_dict(warnings=False) + + +def test_forwards_compat_model_dump_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert m.model_dump() == {"foo": "hello"} + assert m.model_dump(include={"bar"}) == {} + assert m.model_dump(exclude={"foo"}) == {} + assert m.model_dump(by_alias=True) == {"FOO": "hello"} + + m2 = Model() + assert m2.model_dump() == {"foo": None} + assert m2.model_dump(exclude_unset=True) == {} + assert m2.model_dump(exclude_none=True) == {} + assert m2.model_dump(exclude_defaults=True) == {} + + m3 = Model(FOO=None) + assert m3.model_dump() == {"foo": None} + assert m3.model_dump(exclude_none=True) == {} + + if not PYDANTIC_V2: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump(warnings=False) + + +def test_compat_method_no_error_for_warnings() -> None: + class Model(BaseModel): + foo: Optional[str] + + m = Model(foo="hello") + assert isinstance(model_dump(m, warnings=False), dict) + + +def test_to_json() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.to_json()) == {"FOO": "hello"} + assert json.loads(m.to_json(use_api_names=False)) == {"foo": "hello"} + + if PYDANTIC_V2: + assert m.to_json(indent=None) == '{"FOO":"hello"}' + else: + assert m.to_json(indent=None) == '{"FOO": "hello"}' + + m2 = Model() + assert json.loads(m2.to_json()) == {} + assert json.loads(m2.to_json(exclude_unset=False)) == {"FOO": None} + assert json.loads(m2.to_json(exclude_unset=False, exclude_none=True)) == {} + assert json.loads(m2.to_json(exclude_unset=False, exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.to_json()) == {"FOO": None} + assert json.loads(m3.to_json(exclude_none=True)) == {} + + if not PYDANTIC_V2: + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.to_json(warnings=False) + + +def test_forwards_compat_model_dump_json_method() -> None: + class Model(BaseModel): + foo: Optional[str] = Field(alias="FOO", default=None) + + m = Model(FOO="hello") + assert json.loads(m.model_dump_json()) == {"foo": "hello"} + assert json.loads(m.model_dump_json(include={"bar"})) == {} + assert json.loads(m.model_dump_json(include={"foo"})) == {"foo": "hello"} + assert json.loads(m.model_dump_json(by_alias=True)) == {"FOO": "hello"} + + assert m.model_dump_json(indent=2) == '{\n "foo": "hello"\n}' + + m2 = Model() + assert json.loads(m2.model_dump_json()) == {"foo": None} + assert json.loads(m2.model_dump_json(exclude_unset=True)) == {} + assert json.loads(m2.model_dump_json(exclude_none=True)) == {} + assert json.loads(m2.model_dump_json(exclude_defaults=True)) == {} + + m3 = Model(FOO=None) + assert json.loads(m3.model_dump_json()) == {"foo": None} + assert json.loads(m3.model_dump_json(exclude_none=True)) == {} + + if not PYDANTIC_V2: + with pytest.raises(ValueError, match="round_trip is only supported in Pydantic v2"): + m.model_dump_json(round_trip=True) + + with pytest.raises(ValueError, match="warnings is only supported in Pydantic v2"): + m.model_dump_json(warnings=False) + + +def test_type_compat() -> None: + # our model type can be assigned to Pydantic's model type + + def takes_pydantic(model: pydantic.BaseModel) -> None: # noqa: ARG001 + ... + + class OurModel(BaseModel): + foo: Optional[str] = None + + takes_pydantic(OurModel()) + + +def test_annotated_types() -> None: + class Model(BaseModel): + value: str + + m = construct_type( + value={"value": "foo"}, + type_=cast(Any, Annotated[Model, "random metadata"]), + ) + assert isinstance(m, Model) + assert m.value == "foo" + + +def test_discriminated_unions_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, A) + assert m.type == "a" + if PYDANTIC_V2: + assert m.data == 100 # type: ignore[comparison-overlap] + else: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + + +def test_discriminated_unions_unknown_variant() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + m = construct_type( + value={"type": "c", "data": None, "new_thing": "bar"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + + # just chooses the first variant + assert isinstance(m, A) + assert m.type == "c" # type: ignore[comparison-overlap] + assert m.data == None # type: ignore[unreachable] + assert m.new_thing == "bar" + + +def test_discriminated_unions_invalid_data_nested_unions() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + class C(BaseModel): + type: Literal["c"] + + data: bool + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "c", "data": "foo"}, + type_=cast(Any, Annotated[Union[Union[A, B], C], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, C) + assert m.type == "c" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_with_aliases_invalid_data() -> None: + class A(BaseModel): + foo_type: Literal["a"] = Field(alias="type") + + data: str + + class B(BaseModel): + foo_type: Literal["b"] = Field(alias="type") + + data: int + + m = construct_type( + value={"type": "b", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, B) + assert m.foo_type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + m = construct_type( + value={"type": "a", "data": 100}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="foo_type")]), + ) + assert isinstance(m, A) + assert m.foo_type == "a" + if PYDANTIC_V2: + assert m.data == 100 # type: ignore[comparison-overlap] + else: + # pydantic v1 automatically converts inputs to strings + # if the expected type is a str + assert m.data == "100" + + +def test_discriminated_unions_overlapping_discriminators_invalid_data() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["a"] + + data: int + + m = construct_type( + value={"type": "a", "data": "foo"}, + type_=cast(Any, Annotated[Union[A, B], PropertyInfo(discriminator="type")]), + ) + assert isinstance(m, B) + assert m.type == "a" + assert m.data == "foo" # type: ignore[comparison-overlap] + + +def test_discriminated_unions_invalid_data_uses_cache() -> None: + class A(BaseModel): + type: Literal["a"] + + data: str + + class B(BaseModel): + type: Literal["b"] + + data: int + + UnionType = cast(Any, Union[A, B]) + + assert not hasattr(UnionType, "__discriminator__") + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + discriminator = UnionType.__discriminator__ + assert discriminator is not None + + m = construct_type( + value={"type": "b", "data": "foo"}, type_=cast(Any, Annotated[UnionType, PropertyInfo(discriminator="type")]) + ) + assert isinstance(m, B) + assert m.type == "b" + assert m.data == "foo" # type: ignore[comparison-overlap] + + # if the discriminator details object stays the same between invocations then + # we hit the cache + assert UnionType.__discriminator__ is discriminator + + +@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +def test_type_alias_type() -> None: + Alias = TypeAliasType("Alias", str) # pyright: ignore + + class Model(BaseModel): + alias: Alias + union: Union[int, Alias] + + m = construct_type(value={"alias": "foo", "union": "bar"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.alias, str) + assert m.alias == "foo" + assert isinstance(m.union, str) + assert m.union == "bar" + + +@pytest.mark.skipif(not PYDANTIC_V2, reason="TypeAliasType is not supported in Pydantic v1") +def test_field_named_cls() -> None: + class Model(BaseModel): + cls: str + + m = construct_type(value={"cls": "foo"}, type_=Model) + assert isinstance(m, Model) + assert isinstance(m.cls, str) + + +def test_discriminated_union_case() -> None: + class A(BaseModel): + type: Literal["a"] + + data: bool + + class B(BaseModel): + type: Literal["b"] + + data: List[Union[A, object]] + + class ModelA(BaseModel): + type: Literal["modelA"] + + data: int + + class ModelB(BaseModel): + type: Literal["modelB"] + + required: str + + data: Union[A, B] + + # when constructing ModelA | ModelB, value data doesn't match ModelB exactly - missing `required` + m = construct_type( + value={"type": "modelB", "data": {"type": "a", "data": True}}, + type_=cast(Any, Annotated[Union[ModelA, ModelB], PropertyInfo(discriminator="type")]), + ) + + assert isinstance(m, ModelB) + + +def test_nested_discriminated_union() -> None: + class InnerType1(BaseModel): + type: Literal["type_1"] + + class InnerModel(BaseModel): + inner_value: str + + class InnerType2(BaseModel): + type: Literal["type_2"] + some_inner_model: InnerModel + + class Type1(BaseModel): + base_type: Literal["base_type_1"] + value: Annotated[ + Union[ + InnerType1, + InnerType2, + ], + PropertyInfo(discriminator="type"), + ] + + class Type2(BaseModel): + base_type: Literal["base_type_2"] + + T = Annotated[ + Union[ + Type1, + Type2, + ], + PropertyInfo(discriminator="base_type"), + ] + + model = construct_type( + type_=T, + value={ + "base_type": "base_type_1", + "value": { + "type": "type_2", + }, + }, + ) + assert isinstance(model, Type1) + assert isinstance(model.value, InnerType2) diff --git a/tests/test_qs.py b/tests/test_qs.py new file mode 100644 index 00000000..a938eb26 --- /dev/null +++ b/tests/test_qs.py @@ -0,0 +1,78 @@ +from typing import Any, cast +from functools import partial +from urllib.parse import unquote + +import pytest + +from agentex._qs import Querystring, stringify + + +def test_empty() -> None: + assert stringify({}) == "" + assert stringify({"a": {}}) == "" + assert stringify({"a": {"b": {"c": {}}}}) == "" + + +def test_basic() -> None: + assert stringify({"a": 1}) == "a=1" + assert stringify({"a": "b"}) == "a=b" + assert stringify({"a": True}) == "a=true" + assert stringify({"a": False}) == "a=false" + assert stringify({"a": 1.23456}) == "a=1.23456" + assert stringify({"a": None}) == "" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_nested_dotted(method: str) -> None: + if method == "class": + serialise = Querystring(nested_format="dots").stringify + else: + serialise = partial(stringify, nested_format="dots") + + assert unquote(serialise({"a": {"b": "c"}})) == "a.b=c" + assert unquote(serialise({"a": {"b": "c", "d": "e", "f": "g"}})) == "a.b=c&a.d=e&a.f=g" + assert unquote(serialise({"a": {"b": {"c": {"d": "e"}}}})) == "a.b.c.d=e" + assert unquote(serialise({"a": {"b": True}})) == "a.b=true" + + +def test_nested_brackets() -> None: + assert unquote(stringify({"a": {"b": "c"}})) == "a[b]=c" + assert unquote(stringify({"a": {"b": "c", "d": "e", "f": "g"}})) == "a[b]=c&a[d]=e&a[f]=g" + assert unquote(stringify({"a": {"b": {"c": {"d": "e"}}}})) == "a[b][c][d]=e" + assert unquote(stringify({"a": {"b": True}})) == "a[b]=true" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_comma(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="comma").stringify + else: + serialise = partial(stringify, array_format="comma") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in=foo,bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b]=true,false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b]=true,false,true" + + +def test_array_repeat() -> None: + assert unquote(stringify({"in": ["foo", "bar"]})) == "in=foo&in=bar" + assert unquote(stringify({"a": {"b": [True, False]}})) == "a[b]=true&a[b]=false" + assert unquote(stringify({"a": {"b": [True, False, None, True]}})) == "a[b]=true&a[b]=false&a[b]=true" + assert unquote(stringify({"in": ["foo", {"b": {"c": ["d", "e"]}}]})) == "in=foo&in[b][c]=d&in[b][c]=e" + + +@pytest.mark.parametrize("method", ["class", "function"]) +def test_array_brackets(method: str) -> None: + if method == "class": + serialise = Querystring(array_format="brackets").stringify + else: + serialise = partial(stringify, array_format="brackets") + + assert unquote(serialise({"in": ["foo", "bar"]})) == "in[]=foo&in[]=bar" + assert unquote(serialise({"a": {"b": [True, False]}})) == "a[b][]=true&a[b][]=false" + assert unquote(serialise({"a": {"b": [True, False, None, True]}})) == "a[b][]=true&a[b][]=false&a[b][]=true" + + +def test_unknown_array_format() -> None: + with pytest.raises(NotImplementedError, match="Unknown array_format value: foo, choose from comma, repeat"): + stringify({"a": ["foo", "bar"]}, array_format=cast(Any, "foo")) diff --git a/tests/test_required_args.py b/tests/test_required_args.py new file mode 100644 index 00000000..f507b1c0 --- /dev/null +++ b/tests/test_required_args.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import pytest + +from agentex._utils import required_args + + +def test_too_many_positional_params() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + with pytest.raises(TypeError, match=r"foo\(\) takes 1 argument\(s\) but 2 were given"): + foo("a", "b") # type: ignore + + +def test_positional_param() -> None: + @required_args(["a"]) + def foo(a: str | None = None) -> str | None: + return a + + assert foo("a") == "a" + assert foo(None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_keyword_only_param() -> None: + @required_args(["a"]) + def foo(*, a: str | None = None) -> str | None: + return a + + assert foo(a="a") == "a" + assert foo(a=None) is None + assert foo(a="b") == "b" + + with pytest.raises(TypeError, match="Missing required argument: 'a'"): + foo() + + +def test_multiple_params() -> None: + @required_args(["a", "b", "c"]) + def foo(a: str = "", *, b: str = "", c: str = "") -> str | None: + return f"{a} {b} {c}" + + assert foo(a="a", b="b", c="c") == "a b c" + + error_message = r"Missing required arguments.*" + + with pytest.raises(TypeError, match=error_message): + foo() + + with pytest.raises(TypeError, match=error_message): + foo(a="a") + + with pytest.raises(TypeError, match=error_message): + foo(b="b") + + with pytest.raises(TypeError, match=error_message): + foo(c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'a'"): + foo(b="a", c="c") + + with pytest.raises(TypeError, match=r"Missing required argument: 'b'"): + foo("a", c="c") + + +def test_multiple_variants() -> None: + @required_args(["a"], ["b"]) + def foo(*, a: str | None = None, b: str | None = None) -> str | None: + return a if a is not None else b + + assert foo(a="foo") == "foo" + assert foo(b="bar") == "bar" + assert foo(a=None) is None + assert foo(b=None) is None + + # TODO: this error message could probably be improved + with pytest.raises( + TypeError, + match=r"Missing required arguments; Expected either \('a'\) or \('b'\) arguments to be given", + ): + foo() + + +def test_multiple_params_multiple_variants() -> None: + @required_args(["a", "b"], ["c"]) + def foo(*, a: str | None = None, b: str | None = None, c: str | None = None) -> str | None: + if a is not None: + return a + if b is not None: + return b + return c + + error_message = r"Missing required arguments; Expected either \('a' and 'b'\) or \('c'\) arguments to be given" + + with pytest.raises(TypeError, match=error_message): + foo(a="foo") + + with pytest.raises(TypeError, match=error_message): + foo(b="bar") + + with pytest.raises(TypeError, match=error_message): + foo() + + assert foo(a=None, b="bar") == "bar" + assert foo(c=None) is None + assert foo(c="foo") == "foo" diff --git a/tests/test_response.py b/tests/test_response.py new file mode 100644 index 00000000..ed94eb68 --- /dev/null +++ b/tests/test_response.py @@ -0,0 +1,277 @@ +import json +from typing import Any, List, Union, cast +from typing_extensions import Annotated + +import httpx +import pytest +import pydantic + +from agentex import Agentex, BaseModel, AsyncAgentex +from agentex._response import ( + APIResponse, + BaseAPIResponse, + AsyncAPIResponse, + BinaryAPIResponse, + AsyncBinaryAPIResponse, + extract_response_type, +) +from agentex._streaming import Stream +from agentex._base_client import FinalRequestOptions + + +class ConcreteBaseAPIResponse(APIResponse[bytes]): ... + + +class ConcreteAPIResponse(APIResponse[List[str]]): ... + + +class ConcreteAsyncAPIResponse(APIResponse[httpx.Response]): ... + + +def test_extract_response_type_direct_classes() -> None: + assert extract_response_type(BaseAPIResponse[str]) == str + assert extract_response_type(APIResponse[str]) == str + assert extract_response_type(AsyncAPIResponse[str]) == str + + +def test_extract_response_type_direct_class_missing_type_arg() -> None: + with pytest.raises( + RuntimeError, + match="Expected type to have a type argument at index 0 but it did not", + ): + extract_response_type(AsyncAPIResponse) + + +def test_extract_response_type_concrete_subclasses() -> None: + assert extract_response_type(ConcreteBaseAPIResponse) == bytes + assert extract_response_type(ConcreteAPIResponse) == List[str] + assert extract_response_type(ConcreteAsyncAPIResponse) == httpx.Response + + +def test_extract_response_type_binary_response() -> None: + assert extract_response_type(BinaryAPIResponse) == bytes + assert extract_response_type(AsyncBinaryAPIResponse) == bytes + + +class PydanticModel(pydantic.BaseModel): ... + + +def test_response_parse_mismatched_basemodel(client: Agentex) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from agentex import BaseModel`", + ): + response.parse(to=PydanticModel) + + +@pytest.mark.asyncio +async def test_async_response_parse_mismatched_basemodel(async_client: AsyncAgentex) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + with pytest.raises( + TypeError, + match="Pydantic models must subclass our base model type, e.g. `from agentex import BaseModel`", + ): + await response.parse(to=PydanticModel) + + +def test_response_parse_custom_stream(client: Agentex) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo"), + client=client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = response.parse(to=Stream[int]) + assert stream._cast_to == int + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_stream(async_client: AsyncAgentex) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo"), + client=async_client, + stream=True, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + stream = await response.parse(to=Stream[int]) + assert stream._cast_to == int + + +class CustomModel(BaseModel): + foo: str + bar: int + + +def test_response_parse_custom_model(client: Agentex) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.asyncio +async def test_async_response_parse_custom_model(async_client: AsyncAgentex) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=CustomModel) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +def test_response_parse_annotated_type(client: Agentex) -> None: + response = APIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +async def test_async_response_parse_annotated_type(async_client: AsyncAgentex) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=json.dumps({"foo": "hello!", "bar": 2})), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse( + to=cast("type[CustomModel]", Annotated[CustomModel, "random metadata"]), + ) + assert obj.foo == "hello!" + assert obj.bar == 2 + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +def test_response_parse_bool(client: Agentex, content: str, expected: bool) -> None: + response = APIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = response.parse(to=bool) + assert result is expected + + +@pytest.mark.parametrize( + "content, expected", + [ + ("false", False), + ("true", True), + ("False", False), + ("True", True), + ("TrUe", True), + ("FalSe", False), + ], +) +async def test_async_response_parse_bool(client: AsyncAgentex, content: str, expected: bool) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=content), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + result = await response.parse(to=bool) + assert result is expected + + +class OtherModel(BaseModel): + a: str + + +@pytest.mark.parametrize("client", [False], indirect=True) # loose validation +def test_response_parse_expect_model_union_non_json_content(client: Agentex) -> None: + response = APIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("async_client", [False], indirect=True) # loose validation +async def test_async_response_parse_expect_model_union_non_json_content(async_client: AsyncAgentex) -> None: + response = AsyncAPIResponse( + raw=httpx.Response(200, content=b"foo", headers={"Content-Type": "application/text"}), + client=async_client, + stream=False, + stream_cls=None, + cast_to=str, + options=FinalRequestOptions.construct(method="get", url="/foo"), + ) + + obj = await response.parse(to=cast(Any, Union[CustomModel, OtherModel])) + assert isinstance(obj, str) + assert obj == "foo" diff --git a/tests/test_streaming.py b/tests/test_streaming.py new file mode 100644 index 00000000..4b6a392d --- /dev/null +++ b/tests/test_streaming.py @@ -0,0 +1,248 @@ +from __future__ import annotations + +from typing import Iterator, AsyncIterator + +import httpx +import pytest + +from agentex import Agentex, AsyncAgentex +from agentex._streaming import Stream, AsyncStream, ServerSentEvent + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_basic(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: completion\n" + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_missing_event(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"foo":true}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_event_missing_data(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"\n" + yield b"event: completion\n" + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.data == "" + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.data == "" + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_events_with_data(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo":true}\n' + yield b"\n" + yield b"event: completion\n" + yield b'data: {"bar":false}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + sse = await iter_next(iterator) + assert sse.event == "completion" + assert sse.json() == {"bar": False} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines_with_empty_line(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: \n" + yield b"data:\n" + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + assert sse.data == '{\n"foo":\n\n\ntrue}' + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_data_json_escaped_double_new_line(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b'data: {"foo": "my long\\n\\ncontent"}' + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": "my long\n\ncontent"} + + await assert_empty_iter(iterator) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multiple_data_lines(sync: bool, client: Agentex, async_client: AsyncAgentex) -> None: + def body() -> Iterator[bytes]: + yield b"event: ping\n" + yield b"data: {\n" + yield b'data: "foo":\n' + yield b"data: true}\n" + yield b"\n\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event == "ping" + assert sse.json() == {"foo": True} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_special_new_line_character( + sync: bool, + client: Agentex, + async_client: AsyncAgentex, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":" culpa"}\n' + yield b"\n" + yield b'data: {"content":" \xe2\x80\xa8"}\n' + yield b"\n" + yield b'data: {"content":"foo"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " culpa"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": " 
"} + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "foo"} + + await assert_empty_iter(iterator) + + +@pytest.mark.parametrize("sync", [True, False], ids=["sync", "async"]) +async def test_multi_byte_character_multiple_chunks( + sync: bool, + client: Agentex, + async_client: AsyncAgentex, +) -> None: + def body() -> Iterator[bytes]: + yield b'data: {"content":"' + # bytes taken from the string 'известни' and arbitrarily split + # so that some multi-byte characters span multiple chunks + yield b"\xd0" + yield b"\xb8\xd0\xb7\xd0" + yield b"\xb2\xd0\xb5\xd1\x81\xd1\x82\xd0\xbd\xd0\xb8" + yield b'"}\n' + yield b"\n" + + iterator = make_event_iterator(content=body(), sync=sync, client=client, async_client=async_client) + + sse = await iter_next(iterator) + assert sse.event is None + assert sse.json() == {"content": "известни"} + + +async def to_aiter(iter: Iterator[bytes]) -> AsyncIterator[bytes]: + for chunk in iter: + yield chunk + + +async def iter_next(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> ServerSentEvent: + if isinstance(iter, AsyncIterator): + return await iter.__anext__() + + return next(iter) + + +async def assert_empty_iter(iter: Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]) -> None: + with pytest.raises((StopAsyncIteration, RuntimeError)): + await iter_next(iter) + + +def make_event_iterator( + content: Iterator[bytes], + *, + sync: bool, + client: Agentex, + async_client: AsyncAgentex, +) -> Iterator[ServerSentEvent] | AsyncIterator[ServerSentEvent]: + if sync: + return Stream(cast_to=object, client=client, response=httpx.Response(200, content=content))._iter_events() + + return AsyncStream( + cast_to=object, client=async_client, response=httpx.Response(200, content=to_aiter(content)) + )._iter_events() diff --git a/tests/test_transform.py b/tests/test_transform.py new file mode 100644 index 00000000..606a1788 --- /dev/null +++ b/tests/test_transform.py @@ -0,0 +1,453 @@ +from __future__ import annotations + +import io +import pathlib +from typing import Any, Dict, List, Union, TypeVar, Iterable, Optional, cast +from datetime import date, datetime +from typing_extensions import Required, Annotated, TypedDict + +import pytest + +from agentex._types import NOT_GIVEN, Base64FileInput +from agentex._utils import ( + PropertyInfo, + transform as _transform, + parse_datetime, + async_transform as _async_transform, +) +from agentex._compat import PYDANTIC_V2 +from agentex._models import BaseModel + +_T = TypeVar("_T") + +SAMPLE_FILE_PATH = pathlib.Path(__file__).parent.joinpath("sample_file.txt") + + +async def transform( + data: _T, + expected_type: object, + use_async: bool, +) -> _T: + if use_async: + return await _async_transform(data, expected_type=expected_type) + + return _transform(data, expected_type=expected_type) + + +parametrize = pytest.mark.parametrize("use_async", [False, True], ids=["sync", "async"]) + + +class Foo1(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +@parametrize +@pytest.mark.asyncio +async def test_top_level_alias(use_async: bool) -> None: + assert await transform({"foo_bar": "hello"}, expected_type=Foo1, use_async=use_async) == {"fooBar": "hello"} + + +class Foo2(TypedDict): + bar: Bar2 + + +class Bar2(TypedDict): + this_thing: Annotated[int, PropertyInfo(alias="this__thing")] + baz: Annotated[Baz2, PropertyInfo(alias="Baz")] + + +class Baz2(TypedDict): + my_baz: Annotated[str, PropertyInfo(alias="myBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_recursive_typeddict(use_async: bool) -> None: + assert await transform({"bar": {"this_thing": 1}}, Foo2, use_async) == {"bar": {"this__thing": 1}} + assert await transform({"bar": {"baz": {"my_baz": "foo"}}}, Foo2, use_async) == {"bar": {"Baz": {"myBaz": "foo"}}} + + +class Foo3(TypedDict): + things: List[Bar3] + + +class Bar3(TypedDict): + my_field: Annotated[str, PropertyInfo(alias="myField")] + + +@parametrize +@pytest.mark.asyncio +async def test_list_of_typeddict(use_async: bool) -> None: + result = await transform({"things": [{"my_field": "foo"}, {"my_field": "foo2"}]}, Foo3, use_async) + assert result == {"things": [{"myField": "foo"}, {"myField": "foo2"}]} + + +class Foo4(TypedDict): + foo: Union[Bar4, Baz4] + + +class Bar4(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz4(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_typeddict(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo4, use_async) == {"foo": {"fooBar": "bar"}} + assert await transform({"foo": {"foo_baz": "baz"}}, Foo4, use_async) == {"foo": {"fooBaz": "baz"}} + assert await transform({"foo": {"foo_baz": "baz", "foo_bar": "bar"}}, Foo4, use_async) == { + "foo": {"fooBaz": "baz", "fooBar": "bar"} + } + + +class Foo5(TypedDict): + foo: Annotated[Union[Bar4, List[Baz4]], PropertyInfo(alias="FOO")] + + +class Bar5(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz5(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_union_of_list(use_async: bool) -> None: + assert await transform({"foo": {"foo_bar": "bar"}}, Foo5, use_async) == {"FOO": {"fooBar": "bar"}} + assert await transform( + { + "foo": [ + {"foo_baz": "baz"}, + {"foo_baz": "baz"}, + ] + }, + Foo5, + use_async, + ) == {"FOO": [{"fooBaz": "baz"}, {"fooBaz": "baz"}]} + + +class Foo6(TypedDict): + bar: Annotated[str, PropertyInfo(alias="Bar")] + + +@parametrize +@pytest.mark.asyncio +async def test_includes_unknown_keys(use_async: bool) -> None: + assert await transform({"bar": "bar", "baz_": {"FOO": 1}}, Foo6, use_async) == { + "Bar": "bar", + "baz_": {"FOO": 1}, + } + + +class Foo7(TypedDict): + bar: Annotated[List[Bar7], PropertyInfo(alias="bAr")] + foo: Bar7 + + +class Bar7(TypedDict): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_ignores_invalid_input(use_async: bool) -> None: + assert await transform({"bar": ""}, Foo7, use_async) == {"bAr": ""} + assert await transform({"foo": ""}, Foo7, use_async) == {"foo": ""} + + +class DatetimeDict(TypedDict, total=False): + foo: Annotated[datetime, PropertyInfo(format="iso8601")] + + bar: Annotated[Optional[datetime], PropertyInfo(format="iso8601")] + + required: Required[Annotated[Optional[datetime], PropertyInfo(format="iso8601")]] + + list_: Required[Annotated[Optional[List[datetime]], PropertyInfo(format="iso8601")]] + + union: Annotated[Union[int, datetime], PropertyInfo(format="iso8601")] + + +class DateDict(TypedDict, total=False): + foo: Annotated[date, PropertyInfo(format="iso8601")] + + +class DatetimeModel(BaseModel): + foo: datetime + + +class DateModel(BaseModel): + foo: Optional[date] + + +@parametrize +@pytest.mark.asyncio +async def test_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + tz = "Z" if PYDANTIC_V2 else "+00:00" + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692" + tz} # type: ignore[comparison-overlap] + + dt = dt.replace(tzinfo=None) + assert await transform({"foo": dt}, DatetimeDict, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + assert await transform(DatetimeModel(foo=dt), Any, use_async) == {"foo": "2023-02-23T14:16:36.337692"} # type: ignore[comparison-overlap] + + assert await transform({"foo": None}, DateDict, use_async) == {"foo": None} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=None), Any, use_async) == {"foo": None} # type: ignore + assert await transform({"foo": date.fromisoformat("2023-02-23")}, DateDict, use_async) == {"foo": "2023-02-23"} # type: ignore[comparison-overlap] + assert await transform(DateModel(foo=date.fromisoformat("2023-02-23")), DateDict, use_async) == { + "foo": "2023-02-23" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_optional_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"bar": dt}, DatetimeDict, use_async) == {"bar": "2023-02-23T14:16:36.337692+00:00"} # type: ignore[comparison-overlap] + + assert await transform({"bar": None}, DatetimeDict, use_async) == {"bar": None} + + +@parametrize +@pytest.mark.asyncio +async def test_required_iso8601_format(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"required": dt}, DatetimeDict, use_async) == { + "required": "2023-02-23T14:16:36.337692+00:00" + } # type: ignore[comparison-overlap] + + assert await transform({"required": None}, DatetimeDict, use_async) == {"required": None} + + +@parametrize +@pytest.mark.asyncio +async def test_union_datetime(use_async: bool) -> None: + dt = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + assert await transform({"union": dt}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "union": "2023-02-23T14:16:36.337692+00:00" + } + + assert await transform({"union": "foo"}, DatetimeDict, use_async) == {"union": "foo"} + + +@parametrize +@pytest.mark.asyncio +async def test_nested_list_iso6801_format(use_async: bool) -> None: + dt1 = datetime.fromisoformat("2023-02-23T14:16:36.337692+00:00") + dt2 = parse_datetime("2022-01-15T06:34:23Z") + assert await transform({"list_": [dt1, dt2]}, DatetimeDict, use_async) == { # type: ignore[comparison-overlap] + "list_": ["2023-02-23T14:16:36.337692+00:00", "2022-01-15T06:34:23+00:00"] + } + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_custom_format(use_async: bool) -> None: + dt = parse_datetime("2022-01-15T06:34:23Z") + + result = await transform(dt, Annotated[datetime, PropertyInfo(format="custom", format_template="%H")], use_async) + assert result == "06" # type: ignore[comparison-overlap] + + +class DateDictWithRequiredAlias(TypedDict, total=False): + required_prop: Required[Annotated[date, PropertyInfo(format="iso8601", alias="prop")]] + + +@parametrize +@pytest.mark.asyncio +async def test_datetime_with_alias(use_async: bool) -> None: + assert await transform({"required_prop": None}, DateDictWithRequiredAlias, use_async) == {"prop": None} # type: ignore[comparison-overlap] + assert await transform( + {"required_prop": date.fromisoformat("2023-02-23")}, DateDictWithRequiredAlias, use_async + ) == {"prop": "2023-02-23"} # type: ignore[comparison-overlap] + + +class MyModel(BaseModel): + foo: str + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_model_to_dictionary(use_async: bool) -> None: + assert cast(Any, await transform(MyModel(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + assert cast(Any, await transform(MyModel.construct(foo="hi!"), Any, use_async)) == {"foo": "hi!"} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_empty_model(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(), Any, use_async)) == {} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_unknown_field(use_async: bool) -> None: + assert cast(Any, await transform(MyModel.construct(my_untyped_field=True), Any, use_async)) == { + "my_untyped_field": True + } + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_types(use_async: bool) -> None: + model = MyModel.construct(foo=True) + if PYDANTIC_V2: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + else: + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": True} + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_mismatched_object_type(use_async: bool) -> None: + model = MyModel.construct(foo=MyModel.construct(hello="world")) + if PYDANTIC_V2: + with pytest.warns(UserWarning): + params = await transform(model, Any, use_async) + else: + params = await transform(model, Any, use_async) + assert cast(Any, params) == {"foo": {"hello": "world"}} + + +class ModelNestedObjects(BaseModel): + nested: MyModel + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_nested_objects(use_async: bool) -> None: + model = ModelNestedObjects.construct(nested={"foo": "stainless"}) + assert isinstance(model.nested, MyModel) + assert cast(Any, await transform(model, Any, use_async)) == {"nested": {"foo": "stainless"}} + + +class ModelWithDefaultField(BaseModel): + foo: str + with_none_default: Union[str, None] = None + with_str_default: str = "foo" + + +@parametrize +@pytest.mark.asyncio +async def test_pydantic_default_field(use_async: bool) -> None: + # should be excluded when defaults are used + model = ModelWithDefaultField.construct() + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {} + + # should be included when the default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default=None, with_str_default="foo") + assert model.with_none_default is None + assert model.with_str_default == "foo" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": None, "with_str_default": "foo"} + + # should be included when a non-default value is explicitly given + model = ModelWithDefaultField.construct(with_none_default="bar", with_str_default="baz") + assert model.with_none_default == "bar" + assert model.with_str_default == "baz" + assert cast(Any, await transform(model, Any, use_async)) == {"with_none_default": "bar", "with_str_default": "baz"} + + +class TypedDictIterableUnion(TypedDict): + foo: Annotated[Union[Bar8, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +class Bar8(TypedDict): + foo_bar: Annotated[str, PropertyInfo(alias="fooBar")] + + +class Baz8(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_of_dictionaries(use_async: bool) -> None: + assert await transform({"foo": [{"foo_baz": "bar"}]}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "bar"}] + } + assert cast(Any, await transform({"foo": ({"foo_baz": "bar"},)}, TypedDictIterableUnion, use_async)) == { + "FOO": [{"fooBaz": "bar"}] + } + + def my_iter() -> Iterable[Baz8]: + yield {"foo_baz": "hello"} + yield {"foo_baz": "world"} + + assert await transform({"foo": my_iter()}, TypedDictIterableUnion, use_async) == { + "FOO": [{"fooBaz": "hello"}, {"fooBaz": "world"}] + } + + +@parametrize +@pytest.mark.asyncio +async def test_dictionary_items(use_async: bool) -> None: + class DictItems(TypedDict): + foo_baz: Annotated[str, PropertyInfo(alias="fooBaz")] + + assert await transform({"foo": {"foo_baz": "bar"}}, Dict[str, DictItems], use_async) == {"foo": {"fooBaz": "bar"}} + + +class TypedDictIterableUnionStr(TypedDict): + foo: Annotated[Union[str, Iterable[Baz8]], PropertyInfo(alias="FOO")] + + +@parametrize +@pytest.mark.asyncio +async def test_iterable_union_str(use_async: bool) -> None: + assert await transform({"foo": "bar"}, TypedDictIterableUnionStr, use_async) == {"FOO": "bar"} + assert cast(Any, await transform(iter([{"foo_baz": "bar"}]), Union[str, Iterable[Baz8]], use_async)) == [ + {"fooBaz": "bar"} + ] + + +class TypedDictBase64Input(TypedDict): + foo: Annotated[Union[str, Base64FileInput], PropertyInfo(format="base64")] + + +@parametrize +@pytest.mark.asyncio +async def test_base64_file_input(use_async: bool) -> None: + # strings are left as-is + assert await transform({"foo": "bar"}, TypedDictBase64Input, use_async) == {"foo": "bar"} + + # pathlib.Path is automatically converted to base64 + assert await transform({"foo": SAMPLE_FILE_PATH}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQo=" + } # type: ignore[comparison-overlap] + + # io instances are automatically converted to base64 + assert await transform({"foo": io.StringIO("Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + assert await transform({"foo": io.BytesIO(b"Hello, world!")}, TypedDictBase64Input, use_async) == { + "foo": "SGVsbG8sIHdvcmxkIQ==" + } # type: ignore[comparison-overlap] + + +@parametrize +@pytest.mark.asyncio +async def test_transform_skipping(use_async: bool) -> None: + # lists of ints are left as-is + data = [1, 2, 3] + assert await transform(data, List[int], use_async) is data + + # iterables of ints are converted to a list + data = iter([1, 2, 3]) + assert await transform(data, Iterable[int], use_async) == [1, 2, 3] + + +@parametrize +@pytest.mark.asyncio +async def test_strips_notgiven(use_async: bool) -> None: + assert await transform({"foo_bar": "bar"}, Foo1, use_async) == {"fooBar": "bar"} + assert await transform({"foo_bar": NOT_GIVEN}, Foo1, use_async) == {} diff --git a/tests/test_utils/test_proxy.py b/tests/test_utils/test_proxy.py new file mode 100644 index 00000000..21a15f98 --- /dev/null +++ b/tests/test_utils/test_proxy.py @@ -0,0 +1,34 @@ +import operator +from typing import Any +from typing_extensions import override + +from agentex._utils import LazyProxy + + +class RecursiveLazyProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + return self + + def __call__(self, *_args: Any, **_kwds: Any) -> Any: + raise RuntimeError("This should never be called!") + + +def test_recursive_proxy() -> None: + proxy = RecursiveLazyProxy() + assert repr(proxy) == "RecursiveLazyProxy" + assert str(proxy) == "RecursiveLazyProxy" + assert dir(proxy) == [] + assert type(proxy).__name__ == "RecursiveLazyProxy" + assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy" + + +def test_isinstance_does_not_error() -> None: + class AlwaysErrorProxy(LazyProxy[Any]): + @override + def __load__(self) -> Any: + raise RuntimeError("Mocking missing dependency") + + proxy = AlwaysErrorProxy() + assert not isinstance(proxy, dict) + assert isinstance(proxy, LazyProxy) diff --git a/tests/test_utils/test_typing.py b/tests/test_utils/test_typing.py new file mode 100644 index 00000000..cd4f1800 --- /dev/null +++ b/tests/test_utils/test_typing.py @@ -0,0 +1,73 @@ +from __future__ import annotations + +from typing import Generic, TypeVar, cast + +from agentex._utils import extract_type_var_from_base + +_T = TypeVar("_T") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") + + +class BaseGeneric(Generic[_T]): ... + + +class SubclassGeneric(BaseGeneric[_T]): ... + + +class BaseGenericMultipleTypeArgs(Generic[_T, _T2, _T3]): ... + + +class SubclassGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T, _T2, _T3]): ... + + +class SubclassDifferentOrderGenericMultipleTypeArgs(BaseGenericMultipleTypeArgs[_T2, _T, _T3]): ... + + +def test_extract_type_var() -> None: + assert ( + extract_type_var_from_base( + BaseGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_generic_subclass() -> None: + assert ( + extract_type_var_from_base( + SubclassGeneric[int], + index=0, + generic_bases=cast("tuple[type, ...]", (BaseGeneric,)), + ) + == int + ) + + +def test_extract_type_var_multiple() -> None: + typ = BaseGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_multiple() -> None: + typ = SubclassGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) + + +def test_extract_type_var_generic_subclass_different_ordering_multiple() -> None: + typ = SubclassDifferentOrderGenericMultipleTypeArgs[int, str, None] + + generic_bases = cast("tuple[type, ...]", (BaseGenericMultipleTypeArgs,)) + assert extract_type_var_from_base(typ, index=0, generic_bases=generic_bases) == int + assert extract_type_var_from_base(typ, index=1, generic_bases=generic_bases) == str + assert extract_type_var_from_base(typ, index=2, generic_bases=generic_bases) == type(None) diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..f1d43e95 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import os +import inspect +import traceback +import contextlib +from typing import Any, TypeVar, Iterator, cast +from datetime import date, datetime +from typing_extensions import Literal, get_args, get_origin, assert_type + +from agentex._types import Omit, NoneType +from agentex._utils import ( + is_dict, + is_list, + is_list_type, + is_union_type, + extract_type_arg, + is_annotated_type, + is_type_alias_type, +) +from agentex._compat import PYDANTIC_V2, field_outer_type, get_model_fields +from agentex._models import BaseModel + +BaseModelT = TypeVar("BaseModelT", bound=BaseModel) + + +def assert_matches_model(model: type[BaseModelT], value: BaseModelT, *, path: list[str]) -> bool: + for name, field in get_model_fields(model).items(): + field_value = getattr(value, name) + if PYDANTIC_V2: + allow_none = False + else: + # in v1 nullability was structured differently + # https://docs.pydantic.dev/2.0/migration/#required-optional-and-nullable-fields + allow_none = getattr(field, "allow_none", False) + + assert_matches_type( + field_outer_type(field), + field_value, + path=[*path, name], + allow_none=allow_none, + ) + + return True + + +# Note: the `path` argument is only used to improve error messages when `--showlocals` is used +def assert_matches_type( + type_: Any, + value: object, + *, + path: list[str], + allow_none: bool = False, +) -> None: + if is_type_alias_type(type_): + type_ = type_.__value__ + + # unwrap `Annotated[T, ...]` -> `T` + if is_annotated_type(type_): + type_ = extract_type_arg(type_, 0) + + if allow_none and value is None: + return + + if type_ is None or type_ is NoneType: + assert value is None + return + + origin = get_origin(type_) or type_ + + if is_list_type(type_): + return _assert_list_type(type_, value) + + if origin == str: + assert isinstance(value, str) + elif origin == int: + assert isinstance(value, int) + elif origin == bool: + assert isinstance(value, bool) + elif origin == float: + assert isinstance(value, float) + elif origin == bytes: + assert isinstance(value, bytes) + elif origin == datetime: + assert isinstance(value, datetime) + elif origin == date: + assert isinstance(value, date) + elif origin == object: + # nothing to do here, the expected type is unknown + pass + elif origin == Literal: + assert value in get_args(type_) + elif origin == dict: + assert is_dict(value) + + args = get_args(type_) + key_type = args[0] + items_type = args[1] + + for key, item in value.items(): + assert_matches_type(key_type, key, path=[*path, ""]) + assert_matches_type(items_type, item, path=[*path, ""]) + elif is_union_type(type_): + variants = get_args(type_) + + try: + none_index = variants.index(type(None)) + except ValueError: + pass + else: + # special case Optional[T] for better error messages + if len(variants) == 2: + if value is None: + # valid + return + + return assert_matches_type(type_=variants[not none_index], value=value, path=path) + + for i, variant in enumerate(variants): + try: + assert_matches_type(variant, value, path=[*path, f"variant {i}"]) + return + except AssertionError: + traceback.print_exc() + continue + + raise AssertionError("Did not match any variants") + elif issubclass(origin, BaseModel): + assert isinstance(value, type_) + assert assert_matches_model(type_, cast(Any, value), path=path) + elif inspect.isclass(origin) and origin.__name__ == "HttpxBinaryResponseContent": + assert value.__class__.__name__ == "HttpxBinaryResponseContent" + else: + assert None, f"Unhandled field type: {type_}" + + +def _assert_list_type(type_: type[object], value: object) -> None: + assert is_list(value) + + inner_type = get_args(type_)[0] + for entry in value: + assert_type(inner_type, entry) # type: ignore + + +@contextlib.contextmanager +def update_env(**new_env: str | Omit) -> Iterator[None]: + old = os.environ.copy() + + try: + for name, value in new_env.items(): + if isinstance(value, Omit): + os.environ.pop(name, None) + else: + os.environ[name] = value + + yield None + finally: + os.environ.clear() + os.environ.update(old)