diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..97d5f5f --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +*.jpg binary diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 366fec0..0c43291 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,8 +20,8 @@ jobs: - uses: actions/setup-node@v4 with: node-version: '20' - - name: Install tsx - run: npm install -g tsx + - name: Install Transloadit CLI + run: npm install -g transloadit - name: Set up Python uses: actions/setup-python@v4 @@ -89,3 +89,46 @@ jobs: path: | coverage.json htmlcov/ + + python-e2e: + runs-on: ubuntu-latest + needs: python + if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository + env: + PYTHON_SDK_E2E: "1" + TRANSLOADIT_KEY: ${{ secrets.TRANSLOADIT_KEY }} + TRANSLOADIT_SECRET: ${{ secrets.TRANSLOADIT_SECRET }} + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '20' + - name: Install Transloadit CLI + run: npm install -g transloadit + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.12' + architecture: x64 + cache: 'pip' + + - name: Install Poetry + run: pip install --upgrade poetry + + - name: Install dependencies + run: poetry install + + - name: Ensure credentials present + run: | + if [ -z "$TRANSLOADIT_KEY" ] || [ -z "$TRANSLOADIT_SECRET" ]; then + echo "TRANSLOADIT_KEY and TRANSLOADIT_SECRET secrets must be configured for the E2E job" >&2 + exit 1 + fi + + - name: Run E2E upload test + env: + TEST_NODE_PARITY: 0 + run: | + poetry run pytest tests/test_e2e_upload.py -q --maxfail=1 --no-cov diff --git a/.gitignore b/.gitignore index 3bdf5a7..8117c35 100644 --- a/.gitignore +++ b/.gitignore @@ -43,6 +43,7 @@ htmlcov/ .cache nosetests.xml coverage.xml +coverage.json *.cover .hypothesis/ @@ -98,6 +99,9 @@ ENV/ # mkdocs documentation /site +# Docker/local build helpers +.docker-cache/ + # mypy .mypy_cache/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 996d397..41b438e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,9 @@ +### 1.0.3/ 2025-28-10 ### +* Added a Docker-based test harness (`scripts/test-in-docker.sh`) that mirrors our GitHub Actions matrix locally, including optional Smart CDN parity checks via the official Transloadit CLI. +* Introduced an opt-in end-to-end image resize test (`tests/test_e2e_upload.py`) plus supporting `chameleon.jpg` fixture; enable by setting `PYTHON_SDK_E2E=1` along with `TRANSLOADIT_KEY`/`TRANSLOADIT_SECRET`. +* Updated CI to run the E2E upload on Python 3.12 with guarded secrets and to skip coverage for that targeted job. +* Documented the new workflows and ensured the Transloadit CLI integration replaces the legacy TypeScript helper. + ### 1.0.2/ 2024-03-12 ### * Add support for Python 3.13 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..a808055 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,51 @@ +# Contributing + +## Release Checklist + +Use this checklist whenever you cut a new version of the Python SDK. + +### Prerequisites + +- Docker installed (our helper scripts build and run inside the project Docker image). +- Writable Transloadit GitHub repository access. +- A PyPI API token with upload rights to `pytransloadit` (`PYPI_TOKEN`). Store it in your shell or `.env`. +- Optionally, a TestPyPI token (`PYPI_TEST_TOKEN`) if you want to dry‑run the release before pushing to the real registry. + +### 1. Prepare the Release Commit + +1. Update the version in all synced files: + - `pyproject.toml` + - `transloadit/__init__.py` + - `tests/test_request.py` (the `Transloadit-Client` header) +2. Add a matching entry to `CHANGELOG.md`. +3. Run the test matrix (add `PYTHON_SDK_E2E=1` if you want to exercise the live upload): + ```bash + ./scripts/test-in-docker.sh --python 3.12 + ``` +4. Commit the changes with a message such as `Prepare 1.0.3 release`. + +### 2. Tag the Release + +After landing the release commit on `main` (or the branch you will tag), create and push an annotated tag: + +```bash +git tag -a v1.0.3 -m "v1.0.3" +git push origin main --tags +``` + +### 3. Publish to PyPI + +The `scripts/notify-registry.sh` helper publishes from inside our Docker image and performs the usual safety checks (clean git tree, version consistency, changelog entry). It looks for tokens in the environment or `.env`. + +Publish to the real registry: + +```bash +PYPI_TOKEN=... scripts/notify-registry.sh +``` + +### 4. Announce the Release + +1. Draft a GitHub release for the new tag and paste the changelog entry. +2. Confirm that the [Read the Docs build](https://transloadit.readthedocs.io/en/latest/) completes (it is triggered when you publish the GitHub release). + +That’s it—PyPI and the documentation are now up to date. For additional background see the internal guide: . diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..844b74b --- /dev/null +++ b/Dockerfile @@ -0,0 +1,31 @@ +# syntax=docker/dockerfile:1 + +ARG PYTHON_VERSION=3.12 +FROM python:${PYTHON_VERSION}-slim AS base + +ENV DEBIAN_FRONTEND=noninteractive \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=true + +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + curl \ + gnupg \ + ca-certificates \ + build-essential \ + git \ + && rm -rf /var/lib/apt/lists/* + +# Install Node.js 20 (for Smart CDN parity tests) and supporting CLI tooling +RUN curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get update \ + && apt-get install -y --no-install-recommends nodejs \ + && npm install -g transloadit \ + && rm -rf /var/lib/apt/lists/* + +# Install Poetry so we match the GitHub Actions toolchain +RUN pip install --no-cache-dir --upgrade pip \ + && pip install --no-cache-dir poetry + +WORKDIR /workspace diff --git a/README.md b/README.md index 4c9a243..bc0b056 100644 --- a/README.md +++ b/README.md @@ -48,6 +48,30 @@ See [readthedocs](https://transloadit.readthedocs.io) for full API documentation ### Running tests +You can mirror our GitHub Actions setup locally by running the test matrix inside Docker: + +```bash +scripts/test-in-docker.sh +``` + +This script will: + +- build images for the Python versions we test in CI (3.9–3.13) +- install Poetry, Node.js 20, and the Transloadit CLI +- pass credentials from `.env` (if present) so end-to-end tests can run against real Transloadit accounts + +Signature parity tests use `npx transloadit smart_sig` under the hood, matching the reference implementation used by our other SDKs. Our GitHub Actions workflow also runs the E2E upload against Python 3.12 on every push/PR using a dedicated Transloadit test account (wired through the `TRANSLOADIT_KEY` and `TRANSLOADIT_SECRET` secrets). + +Pass `--python 3.12` (or set `PYTHON_VERSIONS`) to restrict the matrix, or append a custom command after `--`, for example `scripts/test-in-docker.sh -- pytest -k smartcdn`. + +To exercise the optional end-to-end upload against a real Transloadit account, provide `TRANSLOADIT_KEY` and `TRANSLOADIT_SECRET` (via environment variables or `.env`) and set `PYTHON_SDK_E2E=1`: + +```bash +PYTHON_SDK_E2E=1 scripts/test-in-docker.sh --python 3.12 -- pytest tests/test_e2e_upload.py +``` + +The test uploads `chameleon.jpg`, resizes it, and asserts on the live assembly results. + If you have a global installation of `poetry`, you can run the tests with: ```bash @@ -72,4 +96,8 @@ Generate a coverage report with: poetry run pytest --cov=transloadit --cov-report=html tests ``` -Then view the coverage report locally by opening `htmlcov/index.html` in your browser. \ No newline at end of file +Then view the coverage report locally by opening `htmlcov/index.html` in your browser. + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) for local development, testing, and release instructions. diff --git a/RELEASE.md b/RELEASE.md deleted file mode 100644 index 4b01a3a..0000000 --- a/RELEASE.md +++ /dev/null @@ -1,52 +0,0 @@ -## HOWTO Release - -This is a Howto guide on the commands to run and files to update in order to publish a new release of the Python SDK to Pypi - -### Prerequisite - -Poetry will handle things for us. You need to configure poetry with your pypi token for the publishing process to work. - -Enable testing publishing on pypi test index. - -```bash -poetry config repositories.test-pypi https://test.pypi.org/legacy/ -poetry config pypi-token.test-pypi pypi-XXXXX -``` - -To setup your token to publish to pypi. - -```bash -poetry config pypi-token.pypi pypi-XXXXX````` -``` - -### Release Steps - -1. Update the changelog, the version file, and the test file as done in [this commit](https://github.com/transloadit/python-sdk/commit/35789c535bd02086ff8f3a07eda9583d6e676d4d) and push it to main. -2. Update the version -```bash -# e.g: 0.2.2 -> 0.2.3a0 -poetry version prerelease -# or the following for, e.g.: 0.2.3 -poetry version patch -``` -3. Publish to Pypi - -Pypi test index - -```bash -poetry build -poetry publish -r test-pypi -``` - -To publish to pypi -```bash -poetry publish -``` - -4. Now that release has been published on Pypi, please head to GitHub to [draft a new tag release](https://github.com/transloadit/python-sdk/releases). Point this tag release to the latest commit pushed on step 1 above. Once you're done drafting the release, go ahead to publish it. - -If all the steps above have been followed without errors, then you've successfully published a release. 🎉 - ---- - -Further reading for Transloadians: https://github.com/transloadit/team-internals/blob/HEAD/_howtos/2020-12-14-maintain-python-sdk.md diff --git a/chameleon.jpg b/chameleon.jpg new file mode 100644 index 0000000..ea5dcc0 Binary files /dev/null and b/chameleon.jpg differ diff --git a/pyproject.toml b/pyproject.toml index dd2c1f3..99e425c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pytransloadit" -version = "1.0.2" +version = "1.0.3" description = "A Python Integration for Transloadit's file uploading and encoding service." authors = ["Ifedapo Olarewaju"] maintainers = ["Florian Kuenzig", "Arnaud Limbourg"] @@ -48,6 +48,9 @@ build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] addopts = "--cov=transloadit --cov-report=term-missing" testpaths = ["tests"] +markers = [ + "e2e: marks tests that hit the live Transloadit API" +] [tool.coverage.run] source = ["transloadit"] diff --git a/scripts/notify-registry.sh b/scripts/notify-registry.sh new file mode 100755 index 0000000..4e00a9f --- /dev/null +++ b/scripts/notify-registry.sh @@ -0,0 +1,192 @@ +#!/usr/bin/env bash +set -euo pipefail + +IMAGE_NAME=${IMAGE_NAME:-transloadit-python-sdk-dev} +CACHE_ROOT=${CACHE_ROOT:-.docker-cache} +POETRY_CACHE_DIR="$CACHE_ROOT/pypoetry" +PIP_CACHE_DIR="$CACHE_ROOT/pip" +HOME_DIR="$CACHE_ROOT/home" + +usage() { + cat <<'EOF' +Usage: scripts/notify-registry.sh [options] + +Options: + --dry-run Build the package but skip publishing to PyPI + -h, --help Show this help text + +Environment: + PYPI_TOKEN API token with upload rights for pypi.org. This variable can + optionally be defined in .env +EOF +} + +err() { + echo "notify-registry: $*" >&2 +} + +ensure_docker() { + if ! command -v docker >/dev/null 2>&1; then + err "Docker is required to run this script." + exit 1 + fi + + if ! docker info >/dev/null 2>&1; then + if [[ -z "${DOCKER_HOST:-}" && -S "$HOME/.colima/default/docker.sock" ]]; then + export DOCKER_HOST="unix://$HOME/.colima/default/docker.sock" + fi + fi + + if ! docker info >/dev/null 2>&1; then + err "Docker daemon is not reachable. Start Docker (or Colima) and retry." + exit 1 + fi +} + +configure_platform() { + if [[ -z "${DOCKER_PLATFORM:-}" ]]; then + local arch + arch=$(uname -m) + if [[ "$arch" == "arm64" || "$arch" == "aarch64" ]]; then + DOCKER_PLATFORM=linux/amd64 + fi + fi +} + +run_outside_container() { + ensure_docker + configure_platform + + mkdir -p "$CACHE_ROOT" "$POETRY_CACHE_DIR" "$PIP_CACHE_DIR" "$HOME_DIR" + + local build_args=() + if [[ -n "${DOCKER_PLATFORM:-}" ]]; then + build_args+=(--platform "$DOCKER_PLATFORM") + fi + build_args+=(-t "$IMAGE_NAME" -f Dockerfile .) + + docker build "${build_args[@]}" + + local docker_args=( + --rm + --user "$(id -u):$(id -g)" + -e HOME=/workspace/$HOME_DIR + -e POETRY_CACHE_DIR=/workspace/$POETRY_CACHE_DIR + -e PIP_CACHE_DIR=/workspace/$PIP_CACHE_DIR + -v "$PWD":/workspace + -v "$PWD/$POETRY_CACHE_DIR":/workspace/"$POETRY_CACHE_DIR" + -v "$PWD/$PIP_CACHE_DIR":/workspace/"$PIP_CACHE_DIR" + -v "$PWD/$HOME_DIR":/workspace/"$HOME_DIR" + -w /workspace + ) + + if [[ -n "${DOCKER_PLATFORM:-}" ]]; then + docker_args+=(--platform "$DOCKER_PLATFORM") + fi + + if [[ -f .env ]]; then + docker_args+=(--env-file "$PWD/.env") + fi + + if [[ -n "${PYPI_TOKEN:-}" ]]; then + docker_args+=(-e "PYPI_TOKEN=${PYPI_TOKEN}") + fi + + exec docker run "${docker_args[@]}" "$IMAGE_NAME" bash -lc "set -euo pipefail; scripts/notify-registry.sh --inside-container \"$@\"" +} + +load_env_var() { + local var_name=$1 + if [[ -n "${!var_name:-}" ]]; then + return 0 + fi + + if [[ -f .env ]]; then + # shellcheck disable=SC1091 + source .env || err "Failed to source .env" + fi +} + +verify_repo_state() { + if [[ -n "$(git status --porcelain)" ]]; then + err "Git working tree is not clean. Commit or stash changes before publishing." + exit 1 + fi +} + +verify_versions_consistent() { + local version python_version header_version + version=$(poetry version -s) + python_version=$(python -c "import transloadit; print(transloadit.__version__)") + header_version=$(grep -oE 'python-sdk:[0-9]+\.[0-9]+\.[0-9]+' tests/test_request.py | tail -n1 | cut -d: -f2) + + if [[ "$version" != "$python_version" ]]; then + err "Version mismatch: pyproject.toml=$version but transloadit/__init__.py=$python_version" + exit 1 + fi + if [[ "$version" != "$header_version" ]]; then + err "Version mismatch: tests/test_request.py expects $header_version but pyproject.toml has $version" + exit 1 + fi + if ! grep -q "### ${version}/" CHANGELOG.md; then + err "CHANGELOG.md does not contain an entry for ${version}" + exit 1 + fi +} + +publish_inside_container() { + local dry_run=0 + + while [[ $# -gt 0 ]]; do + case "$1" in + --dry-run) + dry_run=1 + shift + ;; + --inside-container) + shift + ;; + -h|--help) + usage + exit 0 + ;; + *) + err "Unknown option: $1" + usage + exit 1 + ;; + esac + done + + load_env_var "PYPI_TOKEN" + if [[ -z "${PYPI_TOKEN:-}" ]]; then + err "PYPI_TOKEN is not set. Export it or add it to .env before publishing." + exit 1 + fi + export POETRY_PYPI_TOKEN_PYPI="$PYPI_TOKEN" + + verify_repo_state + verify_versions_consistent + + rm -rf dist + poetry build + + if [[ "$dry_run" == "1" ]]; then + err "Dry run complete. Built artifacts in dist/ but skipped publishing." + exit 0 + fi + + poetry publish --no-interaction --no-ansi + err "Published package to pypi.org." +} + +main() { + if [[ "${1:-}" != "--inside-container" ]]; then + run_outside_container "$@" + return + fi + + publish_inside_container "$@" +} + +main "$@" diff --git a/scripts/test-in-docker.sh b/scripts/test-in-docker.sh new file mode 100755 index 0000000..4018b7f --- /dev/null +++ b/scripts/test-in-docker.sh @@ -0,0 +1,206 @@ +#!/usr/bin/env bash +set -euo pipefail + +IMAGE_PREFIX=${IMAGE_PREFIX:-transloadit-python-sdk-dev} +CACHE_ROOT=${CACHE_ROOT:-.docker-cache} +POETRY_CACHE_DIR="$CACHE_ROOT/pypoetry" +PIP_CACHE_DIR="$CACHE_ROOT/pip" +NPM_CACHE_DIR="$CACHE_ROOT/npm" +HOME_DIR="$CACHE_ROOT/home" +DEFAULT_MATRIX=("3.9" "3.10" "3.11" "3.12" "3.13") +declare -a PYTHON_MATRIX=() +declare -a CUSTOM_COMMAND=() + +usage() { + cat <<'EOF' +Usage: scripts/test-in-docker.sh [options] [-- command ...] + +Options: + -p, --python VERSION Only run for the given Python version (repeatable) + -h, --help Show this help + +Environment: + PYTHON_VERSIONS Space-separated Python versions to run (default CI matrix) + SKIP_POETRY_RUN Set to 1 to run the custom command without "poetry run" + IMAGE_NAME Override the Docker image name prefix + CACHE_ROOT Override the cache directory (default: .docker-cache) + +Examples: + scripts/test-in-docker.sh + scripts/test-in-docker.sh --python 3.12 + scripts/test-in-docker.sh -- pytest tests/test_client.py + SKIP_POETRY_RUN=1 scripts/test-in-docker.sh -- python -m pytest -k smartcdn +EOF +} + +ensure_docker() { + if ! command -v docker >/dev/null 2>&1; then + echo "Docker is required to run this script." >&2 + exit 1 + fi + + if ! docker info >/dev/null 2>&1; then + if [[ -z "${DOCKER_HOST:-}" && -S "$HOME/.colima/default/docker.sock" ]]; then + export DOCKER_HOST="unix://$HOME/.colima/default/docker.sock" + fi + fi + + if ! docker info >/dev/null 2>&1; then + echo "Docker daemon is not reachable. Start Docker (or Colima) and retry." >&2 + exit 1 + fi +} + +configure_platform() { + if [[ -z "${DOCKER_PLATFORM:-}" ]]; then + local arch + arch=$(uname -m) + if [[ "$arch" == "arm64" || "$arch" == "aarch64" ]]; then + DOCKER_PLATFORM=linux/amd64 + fi + fi +} + +parse_python_versions() { + local -a cli_versions=() + local -a custom_cmd=() + + while [[ $# -gt 0 ]]; do + case "$1" in + -p|--python) + if [[ $# -lt 2 ]]; then + echo "Missing value for $1" >&2 + exit 1 + fi + cli_versions+=("$2") + shift 2 + ;; + --python=*) + cli_versions+=("${1#*=}") + shift + ;; + -h|--help) + usage + exit 0 + ;; + --) + shift + custom_cmd=("$@") + break + ;; + *) + custom_cmd+=("$1") + shift + ;; + esac + done + + if [[ ${#cli_versions[@]} -gt 0 ]]; then + PYTHON_MATRIX=("${cli_versions[@]}") + elif [[ -n "${PYTHON_VERSIONS:-}" ]]; then + read -r -a PYTHON_MATRIX <<< "$PYTHON_VERSIONS" + else + PYTHON_MATRIX=("${DEFAULT_MATRIX[@]}") + fi + + if [[ ${#PYTHON_MATRIX[@]} -eq 0 ]]; then + PYTHON_MATRIX=("${DEFAULT_MATRIX[@]}") + fi + + CUSTOM_COMMAND=("${custom_cmd[@]}") +} + +build_image_for_version() { + local version=$1 + local image_name=$2 + + local -a build_args=() + if [[ -n "${DOCKER_PLATFORM:-}" ]]; then + build_args+=(--platform "$DOCKER_PLATFORM") + fi + build_args+=(-t "$image_name" --build-arg "PYTHON_VERSION=$version" -f Dockerfile .) + + echo "==> Building image $image_name (Python $version)" + docker build "${build_args[@]}" +} + +run_for_version() { + local version=$1 + local image_name=$2 + + local -a docker_args=( + --rm + --user "$(id -u):$(id -g)" + -v "$PWD":/workspace + -w /workspace + ) + + if [[ -n "${DOCKER_PLATFORM:-}" ]]; then + docker_args+=(--platform "$DOCKER_PLATFORM") + fi + + mkdir -p "$POETRY_CACHE_DIR" "$PIP_CACHE_DIR" "$NPM_CACHE_DIR" "$HOME_DIR" + + local container_home="/workspace/$HOME_DIR" + docker_args+=( + -e "HOME=$container_home" + -e "PIP_CACHE_DIR=/workspace/$PIP_CACHE_DIR" + -e "POETRY_CACHE_DIR=/workspace/$POETRY_CACHE_DIR" + -e "NPM_CONFIG_CACHE=/workspace/$NPM_CACHE_DIR" + -e "PYTHON_VERSION_UNDER_TEST=$version" + -v "$PWD/$POETRY_CACHE_DIR":/workspace/"$POETRY_CACHE_DIR" + -v "$PWD/$PIP_CACHE_DIR":/workspace/"$PIP_CACHE_DIR" + -v "$PWD/$NPM_CACHE_DIR":/workspace/"$NPM_CACHE_DIR" + -v "$PWD/$HOME_DIR":/workspace/"$HOME_DIR" + ) + + if [[ -f .env ]]; then + docker_args+=(--env-file "$PWD/.env") + fi + + local -a passthrough_envs=(TRANSLOADIT_KEY TRANSLOADIT_SECRET TRANSLOADIT_TEMPLATE_ID PYTHON_SDK_E2E) + for var in "${passthrough_envs[@]}"; do + if [[ -n "${!var:-}" ]]; then + docker_args+=(-e "$var=${!var}") + fi + done + + if [[ "$version" == "3.12" && ${#CUSTOM_COMMAND[@]} -eq 0 ]]; then + docker_args+=(-e TEST_NODE_PARITY=1) + fi + + local run_cmd + if [[ ${#CUSTOM_COMMAND[@]} -gt 0 ]]; then + printf -v user_cmd '%q ' "${CUSTOM_COMMAND[@]}" + if [[ "${SKIP_POETRY_RUN:-0}" == "1" ]]; then + run_cmd="set -euo pipefail; poetry install; ${user_cmd}" + else + run_cmd="set -euo pipefail; poetry install; poetry run ${user_cmd}" + fi + else + if [[ "$version" == "3.12" ]]; then + run_cmd='set -euo pipefail; poetry install; poetry run pytest --cov=transloadit --cov-report=xml --cov-report=json --cov-report=html --cov-report=term-missing --cov-fail-under=65 tests' + else + run_cmd='set -euo pipefail; poetry install; poetry run pytest tests' + fi + fi + + echo "==> Running Python $version: $run_cmd" + docker run "${docker_args[@]}" "$image_name" bash -lc "$run_cmd" +} + +main() { + parse_python_versions "$@" + ensure_docker + configure_platform + + mkdir -p "$CACHE_ROOT" + + for version in "${PYTHON_MATRIX[@]}"; do + image_name="${IMAGE_NAME:-$IMAGE_PREFIX}-${version//./}" + build_image_for_version "$version" "$image_name" + run_for_version "$version" "$image_name" + done +} + +main "$@" diff --git a/tests/node-smartcdn-sig.ts b/tests/node-smartcdn-sig.ts deleted file mode 100755 index 2873f84..0000000 --- a/tests/node-smartcdn-sig.ts +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env tsx -// Reference Smart CDN (https://transloadit.com/services/content-delivery/) Signature implementation -// And CLI tester to see if our SDK's implementation -// matches Node's - -/// - -import { createHash, createHmac } from 'crypto' - -interface SmartCDNParams { - workspace: string - template: string - input: string - expire_at_ms?: number - auth_key?: string - auth_secret?: string - url_params?: Record -} - -function signSmartCDNUrl(params: SmartCDNParams): string { - const { - workspace, - template, - input, - expire_at_ms, - auth_key, - auth_secret, - url_params = {}, - } = params - - if (!workspace) throw new Error('workspace is required') - if (!template) throw new Error('template is required') - if (input === null || input === undefined) - throw new Error('input must be a string') - if (!auth_key) throw new Error('auth_key is required') - if (!auth_secret) throw new Error('auth_secret is required') - - const workspaceSlug = encodeURIComponent(workspace) - const templateSlug = encodeURIComponent(template) - const inputField = encodeURIComponent(input) - - const expireAt = expire_at_ms ?? Date.now() + 60 * 60 * 1000 // 1 hour default - - const queryParams: Record = {} - - // Handle url_params - Object.entries(url_params).forEach(([key, value]) => { - if (value === null || value === undefined) return - if (Array.isArray(value)) { - value.forEach((val) => { - if (val === null || val === undefined) return - ;(queryParams[key] ||= []).push(String(val)) - }) - } else { - queryParams[key] = [String(value)] - } - }) - - queryParams.auth_key = [auth_key] - queryParams.exp = [String(expireAt)] - - // Sort parameters to ensure consistent ordering - const sortedParams = Object.entries(queryParams) - .sort() - .map(([key, values]) => - values.map((v) => `${encodeURIComponent(key)}=${encodeURIComponent(v)}`) - ) - .flat() - .join('&') - - const stringToSign = `${workspaceSlug}/${templateSlug}/${inputField}?${sortedParams}` - const signature = createHmac('sha256', auth_secret) - .update(stringToSign) - .digest('hex') - - const finalParams = `${sortedParams}&sig=${encodeURIComponent( - `sha256:${signature}` - )}` - return `https://${workspaceSlug}.tlcdn.com/${templateSlug}/${inputField}?${finalParams}` -} - -// Read JSON from stdin -let jsonInput = '' -process.stdin.on('data', (chunk) => { - jsonInput += chunk -}) - -process.stdin.on('end', () => { - const params = JSON.parse(jsonInput) - console.log(signSmartCDNUrl(params)) -}) diff --git a/tests/test_client.py b/tests/test_client.py index 1b4c207..f363169 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -5,7 +5,6 @@ import platform import subprocess import time -from pathlib import Path import requests_mock from six.moves import urllib @@ -24,23 +23,29 @@ def get_expected_url(params): print('Skipping Node.js parity testing on Windows') return None - # Check for tsx before trying to use it - tsx_path = subprocess.run(['which', 'tsx'], capture_output=True) - if tsx_path.returncode != 0: - raise RuntimeError('tsx command not found. Please install it with: npm install -g tsx') + # Check for npx before trying to use the CLI + npx_path = subprocess.run(['which', 'npx'], capture_output=True) + if npx_path.returncode != 0: + raise RuntimeError('npx command not found. Please install Node.js (>=20) to use the Transloadit CLI.') - script_path = Path(__file__).parent / 'node-smartcdn-sig.ts' - json_input = json.dumps(params) + cli_params = {k: v for k, v in params.items() if k not in {'auth_key', 'auth_secret'}} + json_input = json.dumps(cli_params) + env = os.environ.copy() + env.update({ + 'TRANSLOADIT_KEY': params.get('auth_key', ''), + 'TRANSLOADIT_SECRET': params.get('auth_secret', '') + }) result = subprocess.run( - ['tsx', str(script_path)], + ['npx', '--yes', 'transloadit', 'smart_sig'], input=json_input, capture_output=True, - text=True + text=True, + env=env ) if result.returncode != 0: - raise RuntimeError(f'Node script failed: {result.stderr}') + raise RuntimeError(f'Transloadit CLI smart_sig failed: {result.stderr}') return result.stdout.strip() diff --git a/tests/test_e2e_upload.py b/tests/test_e2e_upload.py new file mode 100644 index 0000000..77042f0 --- /dev/null +++ b/tests/test_e2e_upload.py @@ -0,0 +1,87 @@ +import os +from pathlib import Path + +import pytest + +from transloadit.client import Transloadit + + +def _is_enabled(): + flag = os.getenv("PYTHON_SDK_E2E", "") + return flag.lower() in {"1", "true", "yes", "on"} + + +pytestmark = [ + pytest.mark.e2e, + pytest.mark.skipif(not _is_enabled(), reason="Set PYTHON_SDK_E2E=1 to run E2E tests"), +] + + +def test_e2e_image_resize(): + key = os.getenv("TRANSLOADIT_KEY") + secret = os.getenv("TRANSLOADIT_SECRET") + + if not key or not secret: + pytest.skip("TRANSLOADIT_KEY and TRANSLOADIT_SECRET must be set to run E2E tests") + + fixture_path = Path(__file__).resolve().parents[1] / "chameleon.jpg" + if not fixture_path.exists(): + pytest.skip("chameleon.jpg fixture missing; run from repository root") + + client = Transloadit(key, secret) + + assembly = client.new_assembly() + + with fixture_path.open("rb") as upload: + assembly.add_file(upload) + assembly.add_step( + "resize", + "/image/resize", + { + "use": ":original", + "width": 128, + "height": 128, + "resize_strategy": "fit", + "format": "png", + }, + ) + + response = assembly.create(wait=True, resumable=False) + + data = response.data + assembly_ssl_url = data.get("assembly_ssl_url") or data.get("assembly_url") + assembly_id = data.get("assembly_id") + print(f"[python-sdk][e2e] Assembly URL: {assembly_ssl_url} (id={assembly_id})") + assert data.get("ok") == "ASSEMBLY_COMPLETED", data + + uploads = data.get("uploads") or [] + assert uploads, f"Expected uploads in assembly response: {data}" + upload_info = uploads[0] + basename = upload_info.get("basename") + if basename: + assert basename == fixture_path.stem + filename = upload_info.get("name") + if filename: + assert filename == fixture_path.name + + results = (data.get("results") or {}).get("resize") or [] + assert results, f"Expected resize results in assembly response: {data}" + first_result = results[0] + + ssl_url = first_result.get("ssl_url") + assert ssl_url and ssl_url.startswith("https://"), f"Unexpected ssl_url: {ssl_url}" + + meta = first_result.get("meta") or {} + width = meta.get("width") + height = meta.get("height") + if width is not None: + width = int(width) + if height is not None: + height = int(height) + assert width and height, f"Missing dimensions in result metadata: {meta}" + assert 0 < width <= 128 and 0 < height <= 128 + print( + "[python-sdk][e2e] Result dimensions: " + f"{width}x{height}, ssl_url={ssl_url}, basename={upload_info.get('basename')}, " + f"filename={upload_info.get('name')}" + ) diff --git a/tests/test_request.py b/tests/test_request.py index 19a4a6c..e5679be 100644 --- a/tests/test_request.py +++ b/tests/test_request.py @@ -19,7 +19,7 @@ def test_get(self, mock): mock.get( url, text='{"ok": "it works"}', - request_headers={"Transloadit-Client": "python-sdk:1.0.2"}, + request_headers={"Transloadit-Client": "python-sdk:1.0.3"}, ) response = self.request.get("/foo") diff --git a/transloadit/__init__.py b/transloadit/__init__.py index 7863915..976498a 100644 --- a/transloadit/__init__.py +++ b/transloadit/__init__.py @@ -1 +1 @@ -__version__ = "1.0.2" +__version__ = "1.0.3"