diff --git a/.changeset/README.md b/.changeset/README.md new file mode 100644 index 00000000..e5b6d8d6 --- /dev/null +++ b/.changeset/README.md @@ -0,0 +1,8 @@ +# Changesets + +Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works +with multi-package repos, or single-package repos to help you version and publish your code. You can +find the full documentation for it [in our repository](https://github.com/changesets/changesets) + +We have a quick list of common questions to get you started engaging with this project in +[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 00000000..d88011f6 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@3.1.1/schema.json", + "changelog": "@changesets/cli/changelog", + "commit": false, + "fixed": [], + "linked": [], + "access": "restricted", + "baseBranch": "main", + "updateInternalDependencies": "patch", + "ignore": [] +} diff --git a/.github/release.yml b/.github/release.yml deleted file mode 100644 index b752cfae..00000000 --- a/.github/release.yml +++ /dev/null @@ -1,14 +0,0 @@ -changelog: - categories: - - title: Breaking Changes ⚠️ - labels: - - breaking-change - - title: New Features 🎉 - labels: - - "enhancement" - - title: Bug Fixes 🐛 - labels: - - bug - - title: Documentation 📚 - labels: - - documentation diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 79b03e11..6202f715 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,8 +15,8 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v6 - - name: Run linter - shell: bash - run: | - uv sync --all-extras --all-packages - uv run -- pre-commit run -a + - name: Install dev dependencies + run: uv sync --all-extras --all-packages + + - name: Run pre-commit + run: uv run pre-commit run -a diff --git a/.github/workflows/publish_changesets.yml b/.github/workflows/publish_changesets.yml new file mode 100644 index 00000000..15df1c12 --- /dev/null +++ b/.github/workflows/publish_changesets.yml @@ -0,0 +1,82 @@ +name: Version Bump and Release + +on: + push: + branches: + - main + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +jobs: + release: + name: Release + runs-on: ubuntu-latest + # Only run on main branch pushes + if: github.ref == 'refs/heads/main' + steps: + - name: Checkout Repo + uses: actions/checkout@v5 + + - uses: pnpm/action-setup@v4 + + - name: Setup Node.js + uses: actions/setup-node@v5 + with: + node-version: "22" + cache: "pnpm" + + - name: Setup Python + uses: actions/setup-python@v6 + with: + python-version: "3.11" + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Install dependencies + run: pnpm install + + - name: Create Release Pull Request or Publish packages + id: changesets + uses: changesets/action@v1 + with: + commit: "chore: version packages" + title: "chore: version packages" + # Custom version script + version: pnpm -w run version + # Custom publish script + publish: pnpm -w run publish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + UV_PUBLISH_TOKEN: ${{ secrets.UV_PUBLISH_TOKEN }} + + - name: Generate GitHub App token + if: steps.changesets.outputs.published == 'true' + id: app-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.CI_BOT_APP_ID }} + private-key: ${{ secrets.CI_BOT_PRIVATE_KEY }} + + - name: Extract published llama-index-workflows version + if: steps.changesets.outputs.published == 'true' + id: workflows-version + run: | + PACKAGES='${{ steps.changesets.outputs.publishedPackages }}' + VERSION=$(echo "$PACKAGES" | jq -r '.[] | select(.name == "llama-index-workflows") | .version') + + if [ -n "$VERSION" ]; then + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "tag=llama-index-workflows@v${VERSION}" >> "$GITHUB_OUTPUT" + echo "Found llama-index-workflows version: $VERSION" + else + echo "llama-index-workflows was not published in this release" + fi + + - name: Trigger OpenAPI publish for llama-index-workflows + if: steps.changesets.outputs.published == 'true' && steps.workflows-version.outputs.version != '' + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ steps.app-token.outputs.token }} + event-type: publish-openapi + client-payload: '{"tag": "${{ steps.workflows-version.outputs.tag }}"}' diff --git a/.github/workflows/publish_openapi.yml b/.github/workflows/publish_openapi.yml new file mode 100644 index 00000000..615de64e --- /dev/null +++ b/.github/workflows/publish_openapi.yml @@ -0,0 +1,97 @@ +name: Publish OpenAPI specification + +on: + workflow_dispatch: + inputs: + tag: + description: "Release tag to update (e.g. llama-index-workflows@v2.12.0)" + required: true + type: string + repository_dispatch: + types: [publish-openapi] + +jobs: + publish-openapi: + if: github.repository == 'run-llama/workflows-py' + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install uv + uses: astral-sh/setup-uv@v6 + + - name: Resolve target tag + id: target + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + TAG="${{ github.event.inputs.tag }}" + else + TAG="${{ github.event.client_payload.tag }}" + fi + echo "tag=${TAG}" >> "$GITHUB_OUTPUT" + echo "TARGET_TAG=${TAG}" >> "$GITHUB_ENV" + + - name: Extract version information + id: version + run: > + uv run --package workflows-dev workflows-dev extract-tag-info + --tag "${{ env.TARGET_TAG }}" + --tag-prefix "llama-index-workflows@" + --output "$GITHUB_OUTPUT" + + - name: Sync dependencies and build OpenAPI spec + working-directory: packages/llama-index-workflows + run: | + uv sync --all-extras + uv run hatch run server:openapi + + - name: Upload OpenAPI to release + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ env.TARGET_TAG }} + files: packages/llama-index-workflows/openapi.json + fail_on_unmatched_files: true + append_body: false + + - name: Detect change type automatically + id: detect_change + env: + GITHUB_REF: refs/tags/${{ env.TARGET_TAG }} + run: > + uv run --package workflows-dev workflows-dev detect-change-type + --tag-glob "llama-index-workflows@v*" + --tag-prefix "llama-index-workflows@" + + - name: Resolve change metadata + id: metadata + run: | + echo "change_type=${{ steps.detect_change.outputs.change_type }}" >> "$GITHUB_OUTPUT" + echo "change_description=" >> "$GITHUB_OUTPUT" + + - name: Generate GitHub App token + id: app-token + uses: actions/create-github-app-token@v1 + with: + app-id: ${{ secrets.CI_BOT_APP_ID }} + private-key: ${{ secrets.CI_BOT_PRIVATE_KEY }} + owner: run-llama + + - name: Trigger SDK update + if: > + steps.metadata.outputs.change_type != '' && + steps.metadata.outputs.change_type != 'none' + uses: peter-evans/repository-dispatch@v3 + with: + token: ${{ steps.app-token.outputs.token }} + repository: run-llama/llama-ui + event-type: workflows-sdk-update + client-payload: >- + {"version": "${{ steps.version.outputs.semver }}", + "openapi_url": "https://github.com/run-llama/workflows-py/releases/download/${{ env.TARGET_TAG }}/openapi.json", + "change_type": "${{ steps.metadata.outputs.change_type }}", + "change_description": "${{ steps.metadata.outputs.change_description }}"} diff --git a/.github/workflows/publish_release.yml b/.github/workflows/publish_release.yml deleted file mode 100644 index 9e545cb5..00000000 --- a/.github/workflows/publish_release.yml +++ /dev/null @@ -1,100 +0,0 @@ -name: Publish to PyPI and GitHub - -on: - workflow_dispatch: - inputs: - server_change_type: - description: 'Server/API change type, this will be used to generate the client SDK' - required: false - default: 'none' - type: choice - options: - - none - - patch - - minor - - major - change_description: - description: 'Description of server/API changes (optional), this will be used to generate the client SDK' - required: false - type: string - push: - tags: - - "v*" - -jobs: - build-n-publish: - name: Build and publish to PyPI - if: github.repository == 'run-llama/workflows-py' - runs-on: ubuntu-latest - permissions: - contents: write - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Need full history for tag comparison - - - name: Install uv - uses: astral-sh/setup-uv@v6 - - - name: Validate version - if: startsWith(github.ref, 'refs/tags/') - run: uv run python scripts/validate_version.py - - - name: Set version output - id: version - run: | - VERSION=$(uv run python -c "from importlib.metadata import version; print(version('llama-index-workflows'))") - echo "Version: $VERSION" - echo "version=$VERSION" >> $GITHUB_OUTPUT - - - name: Build and publish - env: - UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} - run: | - uv build - uv publish - - - name: Generate OpenAPI spec - run: | - uv sync --all-extras - uv run hatch run server:openapi - - - name: Create GitHub Release - uses: ncipollo/release-action@v1 - with: - artifacts: "dist/*,openapi.json" - generateReleaseNotes: true - - - name: Detect change type for tags - id: detect_change - if: startsWith(github.ref, 'refs/tags/') - run: uv run python scripts/detect_change_type.py - - - name: Set SDK trigger parameters - id: sdk_params - run: | - if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then - echo "change_type=${{ github.event.inputs.server_change_type }}" >> $GITHUB_OUTPUT - echo "change_description=${{ github.event.inputs.change_description }}" >> $GITHUB_OUTPUT - else - echo "change_type=${{ steps.detect_change.outputs.change_type }}" >> $GITHUB_OUTPUT - echo "change_description=" >> $GITHUB_OUTPUT - fi - - - name: Generate GitHub App Token - id: app-token - uses: actions/create-github-app-token@v1 - with: - app-id: ${{ secrets.CI_BOT_APP_ID }} - private-key: ${{ secrets.CI_BOT_PRIVATE_KEY }} - owner: run-llama - - - name: Trigger SDK Update - if: steps.sdk_params.outputs.change_type != 'none' && steps.sdk_params.outputs.change_type != '' - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ steps.app-token.outputs.token }} - repository: run-llama/llama-ui - event-type: workflows-sdk-update - client-payload: '{"version": "${{ steps.version.outputs.version }}", "openapi_url": "https://github.com/run-llama/workflows-py/releases/download/v${{ steps.version.outputs.version }}/openapi.json", "change_type": "${{ steps.sdk_params.outputs.change_type }}", "change_description": "${{ steps.sdk_params.outputs.change_description }}"}' diff --git a/.github/workflows/publish_release_utils.yml b/.github/workflows/publish_release_utils.yml deleted file mode 100644 index aa7ff1d7..00000000 --- a/.github/workflows/publish_release_utils.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Publish utils to PyPI and GitHub - -on: - workflow_dispatch: - push: - tags: - - "llama-index-utils-workflow@v*" - -jobs: - build-n-publish-utils: - name: Build and publish utils to PyPI - if: github.repository == 'run-llama/workflows-py' - runs-on: ubuntu-latest - permissions: - contents: write - - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Install uv - uses: astral-sh/setup-uv@v6 - - - name: Build and publish utils package - env: - UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} - run: | - uv build --package llama-index-utils-workflow - uv publish diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0e4818c9..ef45db9d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -17,14 +17,25 @@ jobs: fail-fast: false matrix: python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] - package: [llama-index-workflows, llama-index-utils-workflow] + package: [llama-index-workflows, llama-index-utils-workflow, workflows-dev] + exclude: + - package: workflows-dev + python-version: "3.9" + - package: workflows-dev + python-version: "3.10" + - package: workflows-dev + python-version: "3.11" + - package: workflows-dev + python-version: "3.12" + - package: workflows-dev + python-version: "3.13" include: - package: llama-index-workflows - is_root: true - coverage_report: true + package_dir: packages/llama-index-workflows - package: llama-index-utils-workflow - is_root: false - coverage_report: false + package_dir: packages/llama-index-utils-workflow + - package: workflows-dev + package_dir: packages/workflows-dev steps: - uses: actions/checkout@v4 with: @@ -36,22 +47,32 @@ jobs: python-version: ${{ matrix.python-version }} enable-cache: true - - name: Compute directory flag - run: | - if [ "${{ matrix.is_root }}" = "true" ]; then - echo "DIR_FLAG=" >> $GITHUB_ENV - else - echo "DIR_FLAG=--directory packages/${{ matrix.package }}" >> $GITHUB_ENV - fi - - - name: Sync dev dependencies for selected package - run: uv sync --all-extras $DIR_FLAG --group dev + - name: Run tests + if: matrix.python-version != env.COV_PYTHON_VERSION + run: uv run --all-extras --directory ${{ matrix.package_dir }} -- pytest - name: Run tests with coverage - run: uv run --all-extras $DIR_FLAG -- pytest --cov --cov-report=xml + if: matrix.python-version == env.COV_PYTHON_VERSION + run: uv run --all-extras --directory ${{ matrix.package_dir }} -- pytest --cov --cov-report=xml - name: Report Coveralls - if: matrix.coverage_report == true && matrix.python-version == env.COV_PYTHON_VERSION + if: matrix.python-version == env.COV_PYTHON_VERSION uses: coverallsapp/github-action@v2 + with: + file: ${{ matrix.package_dir }}/coverage.xml + flag-name: ${{ matrix.package }} + parallel: true + env: + COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} + + coveralls-finish: + needs: test + runs-on: ubuntu-latest + if: github.repository == 'run-llama/workflows-py' + steps: + - name: Finalize Coveralls parallel jobs + uses: coverallsapp/github-action@v2 + with: + parallel-finished: true env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} diff --git a/.github/workflows/update_debugger_assets.yml b/.github/workflows/update_debugger_assets.yml index 83ad5b09..dc1eec73 100644 --- a/.github/workflows/update_debugger_assets.yml +++ b/.github/workflows/update_debugger_assets.yml @@ -25,10 +25,8 @@ jobs: with: token: ${{ secrets.GITHUB_TOKEN }} - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.11' + - name: Install uv + uses: astral-sh/setup-uv@v6 - name: Extract payload data id: payload @@ -43,10 +41,26 @@ jobs: fi - name: Update index.html + run: > + uv run --package workflows-dev workflows-dev update-index-html + --js-url "${{ steps.payload.outputs.js_url }}" + --css-url "${{ steps.payload.outputs.css_url }}" + + - name: Create changeset for patch version run: | - python scripts/update_index_html.py \ - --js-url "${{ steps.payload.outputs.js_url }}" \ - --css-url "${{ steps.payload.outputs.css_url }}" + # Create a changeset file with a unique name + CHANGESET_FILE=".changeset/update-debugger-assets-$(date +%s).md" + cat > "$CHANGESET_FILE" << 'EOF' + --- + "llama-index-workflows": patch + --- + + Update debugger assets + + - JavaScript: ${{ steps.payload.outputs.js_url }} + - CSS: ${{ steps.payload.outputs.css_url }} + EOF + echo "Created changeset: $CHANGESET_FILE" - name: Create Pull Request uses: peter-evans/create-pull-request@v6 @@ -59,10 +73,10 @@ jobs: body: | ## Update Debugger Assets - This PR updates the workflow debugger assets. + This PR updates the workflow debugger assets and creates a changeset for a patch version bump. ### Changes - - Updated `src/workflows/server/static/index.html` + - Updated `packages/llama-index-workflows/src/workflows/server/static/index.html` - JavaScript: ${{ steps.payload.outputs.js_url }} - CSS: ${{ steps.payload.outputs.css_url }} diff --git a/.gitignore b/.gitignore index 8e07f9f2..471d6214 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,5 @@ cython_debug/ # Generated files openapi.json +workflow_all_flows.mermaid +node_modules/ diff --git a/AGENTS.md b/AGENTS.md index c81d2478..d9390bfa 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -15,28 +15,28 @@ This is the LlamaIndex Workflows library - an event-driven, async-first framewor ### Testing ```bash # Run all tests -uv run pytest +uv run --directory packages/llama-index-workflows pytest # Run tests with coverage -uv run pytest --cov=src/workflows --cov-report=html +uv run --directory packages/llama-index-workflows pytest --cov=src/workflows --cov-report=html # Run specific test files -uv run pytest tests/test_server.py tests/test_server_utils.py +uv run --directory packages/llama-index-workflows pytest tests/test_server.py tests/test_server_utils.py # Run tests in verbose mode -uv run pytest -v +uv run --directory packages/llama-index-workflows pytest -v ``` ### Linting & Formatting ```bash # Run pre-commit hooks -uv run pre-commit run -a +uv run --directory packages/llama-index-workflows pre-commit run -a ``` ## Project Structure -- `src/workflows/` - Main library code -- `src/workflows/server/` - Web server implementation -- `tests/` - Test suite +- `packages/llama-index-workflows/src/workflows/` - Main library code +- `packages/llama-index-workflows/src/workflows/server/` - Web server implementation +- `packages/llama-index-workflows/tests/` - Test suite - `examples/` - Usage examples ## Key Components @@ -46,7 +46,7 @@ uv run pre-commit run -a - **WorkflowServer** - HTTP server for serving workflows as web services ## Notes for Claude -- Always run tests after making changes: `uv run pytest` +- Always run tests after making changes: `uv run --directory packages/llama-index-workflows pytest` - Never use classes for tests, only use pytest functions - Always annotate with types function arguments and return values - The project uses async/await extensively @@ -67,10 +67,10 @@ Always run test tests and pre-commit commands before committing. They run very f Tests: ```bash -uv run pytest -nauto --timeout=1 +uv run --directory packages/llama-index-workflows pytest -nauto --timeout=1 ``` Linting, typechecking, and formatting: ```bash -uv run pre-commit run -a +uv run --directory packages/llama-index-workflows pre-commit run -a ``` diff --git a/package.json b/package.json new file mode 100644 index 00000000..73531131 --- /dev/null +++ b/package.json @@ -0,0 +1,11 @@ +{ + "name": "llama-agents-workspace", + "version": "1.0.0", + "private": "true", + "license": "MIT", + "scripts": { + "pre-commit-version": "pnpm changeset", + "version": "uv run workflows-dev changeset-version", + "release": "uv run workflows-dev changeset-publish --tag" + } +} diff --git a/packages/llama-index-utils-workflow/package.json b/packages/llama-index-utils-workflow/package.json new file mode 100644 index 00000000..db2d086b --- /dev/null +++ b/packages/llama-index-utils-workflow/package.json @@ -0,0 +1,8 @@ +{ + "name": "llama-index-utils-workflow", + "version": "0.5.0", + "private": "false", + "license": "MIT", + "scripts": { + } +} diff --git a/packages/llama-index-workflows/README.md b/packages/llama-index-workflows/README.md new file mode 100644 index 00000000..5fdbd2bd --- /dev/null +++ b/packages/llama-index-workflows/README.md @@ -0,0 +1 @@ +This is the llama-index-workflows package. See the root [README](../../README.md) for more information. diff --git a/packages/llama-index-workflows/package.json b/packages/llama-index-workflows/package.json new file mode 100644 index 00000000..9fb36af6 --- /dev/null +++ b/packages/llama-index-workflows/package.json @@ -0,0 +1,8 @@ +{ + "name": "llama-index-workflows", + "version": "2.11.1", + "private": "false", + "license": "MIT", + "scripts": { + } +} diff --git a/packages/llama-index-workflows/pyproject.toml b/packages/llama-index-workflows/pyproject.toml new file mode 100644 index 00000000..dc954d28 --- /dev/null +++ b/packages/llama-index-workflows/pyproject.toml @@ -0,0 +1,52 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[dependency-groups] +dev = [ + "pre-commit>=4.2.0", + "pytest>=8.4.0", + "pytest-asyncio>=1.0.0", + "pytest-cov>=6.1.1", + "httpx>=0.25.0", + "hatch>=1.14.1", + "pyyaml>=6.0.2", + "packaging>=21.0", + "pytest-xdist>=3.8.0", + "pytest-timeout>=2.4.0", + "structlog>=25.5.0", + "starlette>=0.39.0", + "uvicorn>=0.32.0" +] + +[project] +name = "llama-index-workflows" +version = "2.11.1" +description = "An event-driven, async-first, step-based way to control the execution flow of AI applications like Agents." +readme = "README.md" +license = "MIT" +requires-python = ">=3.9" +dependencies = [ + "eval-type-backport>=0.2.2 ; python_full_version < '3.10'", + "llama-index-instrumentation>=0.1.0", + "pydantic>=2.11.5", + "typing-extensions>=4.6.0" +] + +[project.optional-dependencies] +server = ["starlette>=0.39.0", "uvicorn>=0.32.0"] +client = ["httpx>=0.28.1,<1"] + +[tool.hatch.build.targets.wheel] +packages = ["src/workflows"] + +[tool.hatch.envs.server] +features = ["server"] +dependencies = ["pyyaml>=6.0.2"] + +[tool.hatch.envs.server.scripts] +openapi = "python -m workflows.server.server --output openapi.json" + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] diff --git a/src/workflows/__init__.py b/packages/llama-index-workflows/src/workflows/__init__.py similarity index 100% rename from src/workflows/__init__.py rename to packages/llama-index-workflows/src/workflows/__init__.py diff --git a/src/workflows/client/__init__.py b/packages/llama-index-workflows/src/workflows/client/__init__.py similarity index 100% rename from src/workflows/client/__init__.py rename to packages/llama-index-workflows/src/workflows/client/__init__.py diff --git a/src/workflows/client/client.py b/packages/llama-index-workflows/src/workflows/client/client.py similarity index 100% rename from src/workflows/client/client.py rename to packages/llama-index-workflows/src/workflows/client/client.py diff --git a/src/workflows/context/__init__.py b/packages/llama-index-workflows/src/workflows/context/__init__.py similarity index 100% rename from src/workflows/context/__init__.py rename to packages/llama-index-workflows/src/workflows/context/__init__.py diff --git a/src/workflows/context/context.py b/packages/llama-index-workflows/src/workflows/context/context.py similarity index 100% rename from src/workflows/context/context.py rename to packages/llama-index-workflows/src/workflows/context/context.py diff --git a/src/workflows/context/context_types.py b/packages/llama-index-workflows/src/workflows/context/context_types.py similarity index 100% rename from src/workflows/context/context_types.py rename to packages/llama-index-workflows/src/workflows/context/context_types.py diff --git a/src/workflows/context/py.typed b/packages/llama-index-workflows/src/workflows/context/py.typed similarity index 100% rename from src/workflows/context/py.typed rename to packages/llama-index-workflows/src/workflows/context/py.typed diff --git a/src/workflows/context/serializers.py b/packages/llama-index-workflows/src/workflows/context/serializers.py similarity index 100% rename from src/workflows/context/serializers.py rename to packages/llama-index-workflows/src/workflows/context/serializers.py diff --git a/src/workflows/context/state_store.py b/packages/llama-index-workflows/src/workflows/context/state_store.py similarity index 100% rename from src/workflows/context/state_store.py rename to packages/llama-index-workflows/src/workflows/context/state_store.py diff --git a/src/workflows/context/utils.py b/packages/llama-index-workflows/src/workflows/context/utils.py similarity index 100% rename from src/workflows/context/utils.py rename to packages/llama-index-workflows/src/workflows/context/utils.py diff --git a/src/workflows/decorators.py b/packages/llama-index-workflows/src/workflows/decorators.py similarity index 100% rename from src/workflows/decorators.py rename to packages/llama-index-workflows/src/workflows/decorators.py diff --git a/src/workflows/errors.py b/packages/llama-index-workflows/src/workflows/errors.py similarity index 100% rename from src/workflows/errors.py rename to packages/llama-index-workflows/src/workflows/errors.py diff --git a/src/workflows/events.py b/packages/llama-index-workflows/src/workflows/events.py similarity index 100% rename from src/workflows/events.py rename to packages/llama-index-workflows/src/workflows/events.py diff --git a/src/workflows/handler.py b/packages/llama-index-workflows/src/workflows/handler.py similarity index 100% rename from src/workflows/handler.py rename to packages/llama-index-workflows/src/workflows/handler.py diff --git a/src/workflows/plugins/basic.py b/packages/llama-index-workflows/src/workflows/plugins/basic.py similarity index 100% rename from src/workflows/plugins/basic.py rename to packages/llama-index-workflows/src/workflows/plugins/basic.py diff --git a/src/workflows/protocol/__init__.py b/packages/llama-index-workflows/src/workflows/protocol/__init__.py similarity index 100% rename from src/workflows/protocol/__init__.py rename to packages/llama-index-workflows/src/workflows/protocol/__init__.py diff --git a/src/workflows/protocol/serializable_events.py b/packages/llama-index-workflows/src/workflows/protocol/serializable_events.py similarity index 100% rename from src/workflows/protocol/serializable_events.py rename to packages/llama-index-workflows/src/workflows/protocol/serializable_events.py diff --git a/src/workflows/py.typed b/packages/llama-index-workflows/src/workflows/py.typed similarity index 100% rename from src/workflows/py.typed rename to packages/llama-index-workflows/src/workflows/py.typed diff --git a/src/workflows/resource.py b/packages/llama-index-workflows/src/workflows/resource.py similarity index 100% rename from src/workflows/resource.py rename to packages/llama-index-workflows/src/workflows/resource.py diff --git a/src/workflows/retry_policy.py b/packages/llama-index-workflows/src/workflows/retry_policy.py similarity index 100% rename from src/workflows/retry_policy.py rename to packages/llama-index-workflows/src/workflows/retry_policy.py diff --git a/src/workflows/runtime/broker.py b/packages/llama-index-workflows/src/workflows/runtime/broker.py similarity index 100% rename from src/workflows/runtime/broker.py rename to packages/llama-index-workflows/src/workflows/runtime/broker.py diff --git a/src/workflows/runtime/control_loop.py b/packages/llama-index-workflows/src/workflows/runtime/control_loop.py similarity index 100% rename from src/workflows/runtime/control_loop.py rename to packages/llama-index-workflows/src/workflows/runtime/control_loop.py diff --git a/src/workflows/runtime/types/_identity_weak_ref.py b/packages/llama-index-workflows/src/workflows/runtime/types/_identity_weak_ref.py similarity index 100% rename from src/workflows/runtime/types/_identity_weak_ref.py rename to packages/llama-index-workflows/src/workflows/runtime/types/_identity_weak_ref.py diff --git a/src/workflows/runtime/types/commands.py b/packages/llama-index-workflows/src/workflows/runtime/types/commands.py similarity index 100% rename from src/workflows/runtime/types/commands.py rename to packages/llama-index-workflows/src/workflows/runtime/types/commands.py diff --git a/src/workflows/runtime/types/internal_state.py b/packages/llama-index-workflows/src/workflows/runtime/types/internal_state.py similarity index 100% rename from src/workflows/runtime/types/internal_state.py rename to packages/llama-index-workflows/src/workflows/runtime/types/internal_state.py diff --git a/src/workflows/runtime/types/plugin.py b/packages/llama-index-workflows/src/workflows/runtime/types/plugin.py similarity index 100% rename from src/workflows/runtime/types/plugin.py rename to packages/llama-index-workflows/src/workflows/runtime/types/plugin.py diff --git a/src/workflows/runtime/types/results.py b/packages/llama-index-workflows/src/workflows/runtime/types/results.py similarity index 100% rename from src/workflows/runtime/types/results.py rename to packages/llama-index-workflows/src/workflows/runtime/types/results.py diff --git a/src/workflows/runtime/types/step_function.py b/packages/llama-index-workflows/src/workflows/runtime/types/step_function.py similarity index 100% rename from src/workflows/runtime/types/step_function.py rename to packages/llama-index-workflows/src/workflows/runtime/types/step_function.py diff --git a/src/workflows/runtime/types/ticks.py b/packages/llama-index-workflows/src/workflows/runtime/types/ticks.py similarity index 100% rename from src/workflows/runtime/types/ticks.py rename to packages/llama-index-workflows/src/workflows/runtime/types/ticks.py diff --git a/src/workflows/runtime/workflow_registry.py b/packages/llama-index-workflows/src/workflows/runtime/workflow_registry.py similarity index 100% rename from src/workflows/runtime/workflow_registry.py rename to packages/llama-index-workflows/src/workflows/runtime/workflow_registry.py diff --git a/src/workflows/server/__init__.py b/packages/llama-index-workflows/src/workflows/server/__init__.py similarity index 100% rename from src/workflows/server/__init__.py rename to packages/llama-index-workflows/src/workflows/server/__init__.py diff --git a/src/workflows/server/__main__.py b/packages/llama-index-workflows/src/workflows/server/__main__.py similarity index 100% rename from src/workflows/server/__main__.py rename to packages/llama-index-workflows/src/workflows/server/__main__.py diff --git a/src/workflows/server/abstract_workflow_store.py b/packages/llama-index-workflows/src/workflows/server/abstract_workflow_store.py similarity index 100% rename from src/workflows/server/abstract_workflow_store.py rename to packages/llama-index-workflows/src/workflows/server/abstract_workflow_store.py diff --git a/src/workflows/server/memory_workflow_store.py b/packages/llama-index-workflows/src/workflows/server/memory_workflow_store.py similarity index 100% rename from src/workflows/server/memory_workflow_store.py rename to packages/llama-index-workflows/src/workflows/server/memory_workflow_store.py diff --git a/src/workflows/server/representation_utils.py b/packages/llama-index-workflows/src/workflows/server/representation_utils.py similarity index 100% rename from src/workflows/server/representation_utils.py rename to packages/llama-index-workflows/src/workflows/server/representation_utils.py diff --git a/src/workflows/server/server.py b/packages/llama-index-workflows/src/workflows/server/server.py similarity index 100% rename from src/workflows/server/server.py rename to packages/llama-index-workflows/src/workflows/server/server.py diff --git a/src/workflows/server/sqlite/__init__.py b/packages/llama-index-workflows/src/workflows/server/sqlite/__init__.py similarity index 100% rename from src/workflows/server/sqlite/__init__.py rename to packages/llama-index-workflows/src/workflows/server/sqlite/__init__.py diff --git a/src/workflows/server/sqlite/migrate.py b/packages/llama-index-workflows/src/workflows/server/sqlite/migrate.py similarity index 100% rename from src/workflows/server/sqlite/migrate.py rename to packages/llama-index-workflows/src/workflows/server/sqlite/migrate.py diff --git a/src/workflows/server/sqlite/migrations/0001_init.sql b/packages/llama-index-workflows/src/workflows/server/sqlite/migrations/0001_init.sql similarity index 100% rename from src/workflows/server/sqlite/migrations/0001_init.sql rename to packages/llama-index-workflows/src/workflows/server/sqlite/migrations/0001_init.sql diff --git a/src/workflows/server/sqlite/migrations/0002_extend_handlers.sql b/packages/llama-index-workflows/src/workflows/server/sqlite/migrations/0002_extend_handlers.sql similarity index 100% rename from src/workflows/server/sqlite/migrations/0002_extend_handlers.sql rename to packages/llama-index-workflows/src/workflows/server/sqlite/migrations/0002_extend_handlers.sql diff --git a/src/workflows/server/sqlite/migrations/__init__.py b/packages/llama-index-workflows/src/workflows/server/sqlite/migrations/__init__.py similarity index 100% rename from src/workflows/server/sqlite/migrations/__init__.py rename to packages/llama-index-workflows/src/workflows/server/sqlite/migrations/__init__.py diff --git a/src/workflows/server/sqlite/sqlite_workflow_store.py b/packages/llama-index-workflows/src/workflows/server/sqlite/sqlite_workflow_store.py similarity index 100% rename from src/workflows/server/sqlite/sqlite_workflow_store.py rename to packages/llama-index-workflows/src/workflows/server/sqlite/sqlite_workflow_store.py diff --git a/src/workflows/server/static/index.html b/packages/llama-index-workflows/src/workflows/server/static/index.html similarity index 100% rename from src/workflows/server/static/index.html rename to packages/llama-index-workflows/src/workflows/server/static/index.html diff --git a/src/workflows/testing/__init__.py b/packages/llama-index-workflows/src/workflows/testing/__init__.py similarity index 100% rename from src/workflows/testing/__init__.py rename to packages/llama-index-workflows/src/workflows/testing/__init__.py diff --git a/src/workflows/testing/runner.py b/packages/llama-index-workflows/src/workflows/testing/runner.py similarity index 100% rename from src/workflows/testing/runner.py rename to packages/llama-index-workflows/src/workflows/testing/runner.py diff --git a/src/workflows/types.py b/packages/llama-index-workflows/src/workflows/types.py similarity index 100% rename from src/workflows/types.py rename to packages/llama-index-workflows/src/workflows/types.py diff --git a/src/workflows/utils.py b/packages/llama-index-workflows/src/workflows/utils.py similarity index 100% rename from src/workflows/utils.py rename to packages/llama-index-workflows/src/workflows/utils.py diff --git a/src/workflows/workflow.py b/packages/llama-index-workflows/src/workflows/workflow.py similarity index 100% rename from src/workflows/workflow.py rename to packages/llama-index-workflows/src/workflows/workflow.py diff --git a/tests/__init__.py b/packages/llama-index-workflows/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to packages/llama-index-workflows/tests/__init__.py diff --git a/tests/client/__init__.py b/packages/llama-index-workflows/tests/client/__init__.py similarity index 100% rename from tests/client/__init__.py rename to packages/llama-index-workflows/tests/client/__init__.py diff --git a/tests/client/client_workflows.py b/packages/llama-index-workflows/tests/client/client_workflows.py similarity index 100% rename from tests/client/client_workflows.py rename to packages/llama-index-workflows/tests/client/client_workflows.py diff --git a/tests/client/test_client.py b/packages/llama-index-workflows/tests/client/test_client.py similarity index 100% rename from tests/client/test_client.py rename to packages/llama-index-workflows/tests/client/test_client.py diff --git a/tests/conftest.py b/packages/llama-index-workflows/tests/conftest.py similarity index 100% rename from tests/conftest.py rename to packages/llama-index-workflows/tests/conftest.py diff --git a/tests/context/__init__.py b/packages/llama-index-workflows/tests/context/__init__.py similarity index 100% rename from tests/context/__init__.py rename to packages/llama-index-workflows/tests/context/__init__.py diff --git a/tests/context/test_context.py b/packages/llama-index-workflows/tests/context/test_context.py similarity index 100% rename from tests/context/test_context.py rename to packages/llama-index-workflows/tests/context/test_context.py diff --git a/tests/context/test_serializers.py b/packages/llama-index-workflows/tests/context/test_serializers.py similarity index 100% rename from tests/context/test_serializers.py rename to packages/llama-index-workflows/tests/context/test_serializers.py diff --git a/tests/context/test_utils.py b/packages/llama-index-workflows/tests/context/test_utils.py similarity index 100% rename from tests/context/test_utils.py rename to packages/llama-index-workflows/tests/context/test_utils.py diff --git a/tests/protocol/test_serializable_events.py b/packages/llama-index-workflows/tests/protocol/test_serializable_events.py similarity index 100% rename from tests/protocol/test_serializable_events.py rename to packages/llama-index-workflows/tests/protocol/test_serializable_events.py diff --git a/tests/runtime/__init__.py b/packages/llama-index-workflows/tests/runtime/__init__.py similarity index 100% rename from tests/runtime/__init__.py rename to packages/llama-index-workflows/tests/runtime/__init__.py diff --git a/tests/runtime/conftest.py b/packages/llama-index-workflows/tests/runtime/conftest.py similarity index 100% rename from tests/runtime/conftest.py rename to packages/llama-index-workflows/tests/runtime/conftest.py diff --git a/tests/runtime/test_control_loop.py b/packages/llama-index-workflows/tests/runtime/test_control_loop.py similarity index 100% rename from tests/runtime/test_control_loop.py rename to packages/llama-index-workflows/tests/runtime/test_control_loop.py diff --git a/tests/runtime/test_control_loop_transformations.py b/packages/llama-index-workflows/tests/runtime/test_control_loop_transformations.py similarity index 100% rename from tests/runtime/test_control_loop_transformations.py rename to packages/llama-index-workflows/tests/runtime/test_control_loop_transformations.py diff --git a/tests/runtime/test_identity_weak_ref.py b/packages/llama-index-workflows/tests/runtime/test_identity_weak_ref.py similarity index 100% rename from tests/runtime/test_identity_weak_ref.py rename to packages/llama-index-workflows/tests/runtime/test_identity_weak_ref.py diff --git a/tests/runtime/test_state.py b/packages/llama-index-workflows/tests/runtime/test_state.py similarity index 100% rename from tests/runtime/test_state.py rename to packages/llama-index-workflows/tests/runtime/test_state.py diff --git a/tests/server/__init__.py b/packages/llama-index-workflows/tests/server/__init__.py similarity index 100% rename from tests/server/__init__.py rename to packages/llama-index-workflows/tests/server/__init__.py diff --git a/tests/server/conftest.py b/packages/llama-index-workflows/tests/server/conftest.py similarity index 100% rename from tests/server/conftest.py rename to packages/llama-index-workflows/tests/server/conftest.py diff --git a/tests/server/test_handler_serialization.py b/packages/llama-index-workflows/tests/server/test_handler_serialization.py similarity index 100% rename from tests/server/test_handler_serialization.py rename to packages/llama-index-workflows/tests/server/test_handler_serialization.py diff --git a/tests/server/test_main.py b/packages/llama-index-workflows/tests/server/test_main.py similarity index 100% rename from tests/server/test_main.py rename to packages/llama-index-workflows/tests/server/test_main.py diff --git a/tests/server/test_memory_workflow_store.py b/packages/llama-index-workflows/tests/server/test_memory_workflow_store.py similarity index 100% rename from tests/server/test_memory_workflow_store.py rename to packages/llama-index-workflows/tests/server/test_memory_workflow_store.py diff --git a/tests/server/test_migrations.py b/packages/llama-index-workflows/tests/server/test_migrations.py similarity index 100% rename from tests/server/test_migrations.py rename to packages/llama-index-workflows/tests/server/test_migrations.py diff --git a/tests/server/test_openapi_schema.py b/packages/llama-index-workflows/tests/server/test_openapi_schema.py similarity index 100% rename from tests/server/test_openapi_schema.py rename to packages/llama-index-workflows/tests/server/test_openapi_schema.py diff --git a/tests/server/test_persistent_handler_serialization.py b/packages/llama-index-workflows/tests/server/test_persistent_handler_serialization.py similarity index 100% rename from tests/server/test_persistent_handler_serialization.py rename to packages/llama-index-workflows/tests/server/test_persistent_handler_serialization.py diff --git a/tests/server/test_server.py b/packages/llama-index-workflows/tests/server/test_server.py similarity index 100% rename from tests/server/test_server.py rename to packages/llama-index-workflows/tests/server/test_server.py diff --git a/tests/server/test_server_endpoints.py b/packages/llama-index-workflows/tests/server/test_server_endpoints.py similarity index 100% rename from tests/server/test_server_endpoints.py rename to packages/llama-index-workflows/tests/server/test_server_endpoints.py diff --git a/tests/server/test_server_live_http.py b/packages/llama-index-workflows/tests/server/test_server_live_http.py similarity index 100% rename from tests/server/test_server_live_http.py rename to packages/llama-index-workflows/tests/server/test_server_live_http.py diff --git a/tests/server/test_server_persistence.py b/packages/llama-index-workflows/tests/server/test_server_persistence.py similarity index 100% rename from tests/server/test_server_persistence.py rename to packages/llama-index-workflows/tests/server/test_server_persistence.py diff --git a/tests/server/test_sqlite_workflow_store.py b/packages/llama-index-workflows/tests/server/test_sqlite_workflow_store.py similarity index 100% rename from tests/server/test_sqlite_workflow_store.py rename to packages/llama-index-workflows/tests/server/test_sqlite_workflow_store.py diff --git a/tests/server/util.py b/packages/llama-index-workflows/tests/server/util.py similarity index 100% rename from tests/server/util.py rename to packages/llama-index-workflows/tests/server/util.py diff --git a/tests/test_decorator.py b/packages/llama-index-workflows/tests/test_decorator.py similarity index 100% rename from tests/test_decorator.py rename to packages/llama-index-workflows/tests/test_decorator.py diff --git a/tests/test_event.py b/packages/llama-index-workflows/tests/test_event.py similarity index 100% rename from tests/test_event.py rename to packages/llama-index-workflows/tests/test_event.py diff --git a/tests/test_handler.py b/packages/llama-index-workflows/tests/test_handler.py similarity index 100% rename from tests/test_handler.py rename to packages/llama-index-workflows/tests/test_handler.py diff --git a/tests/test_nanoid.py b/packages/llama-index-workflows/tests/test_nanoid.py similarity index 100% rename from tests/test_nanoid.py rename to packages/llama-index-workflows/tests/test_nanoid.py diff --git a/tests/test_resources.py b/packages/llama-index-workflows/tests/test_resources.py similarity index 100% rename from tests/test_resources.py rename to packages/llama-index-workflows/tests/test_resources.py diff --git a/tests/test_retry_policy.py b/packages/llama-index-workflows/tests/test_retry_policy.py similarity index 100% rename from tests/test_retry_policy.py rename to packages/llama-index-workflows/tests/test_retry_policy.py diff --git a/tests/test_state_manager.py b/packages/llama-index-workflows/tests/test_state_manager.py similarity index 100% rename from tests/test_state_manager.py rename to packages/llama-index-workflows/tests/test_state_manager.py diff --git a/tests/test_streaming.py b/packages/llama-index-workflows/tests/test_streaming.py similarity index 100% rename from tests/test_streaming.py rename to packages/llama-index-workflows/tests/test_streaming.py diff --git a/tests/test_testing_utils.py b/packages/llama-index-workflows/tests/test_testing_utils.py similarity index 100% rename from tests/test_testing_utils.py rename to packages/llama-index-workflows/tests/test_testing_utils.py diff --git a/tests/test_utils.py b/packages/llama-index-workflows/tests/test_utils.py similarity index 100% rename from tests/test_utils.py rename to packages/llama-index-workflows/tests/test_utils.py diff --git a/tests/test_workflow.py b/packages/llama-index-workflows/tests/test_workflow.py similarity index 100% rename from tests/test_workflow.py rename to packages/llama-index-workflows/tests/test_workflow.py diff --git a/tests/test_workflow_internal_events.py b/packages/llama-index-workflows/tests/test_workflow_internal_events.py similarity index 100% rename from tests/test_workflow_internal_events.py rename to packages/llama-index-workflows/tests/test_workflow_internal_events.py diff --git a/tests/test_workflow_postponed_annotations.py b/packages/llama-index-workflows/tests/test_workflow_postponed_annotations.py similarity index 100% rename from tests/test_workflow_postponed_annotations.py rename to packages/llama-index-workflows/tests/test_workflow_postponed_annotations.py diff --git a/tests/test_workflow_typed_state.py b/packages/llama-index-workflows/tests/test_workflow_typed_state.py similarity index 100% rename from tests/test_workflow_typed_state.py rename to packages/llama-index-workflows/tests/test_workflow_typed_state.py diff --git a/packages/workflows-dev/README.md b/packages/workflows-dev/README.md new file mode 100644 index 00000000..e6fe71fa --- /dev/null +++ b/packages/workflows-dev/README.md @@ -0,0 +1 @@ +This is the workflows-dev package. It is used to develop and test the workflows mono repo. diff --git a/packages/workflows-dev/pyproject.toml b/packages/workflows-dev/pyproject.toml new file mode 100644 index 00000000..27d634c4 --- /dev/null +++ b/packages/workflows-dev/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = ["uv_build>=0.9.7,<0.10.0"] +build-backend = "uv_build" + +[dependency-groups] +dev = [ + "pytest>=8.4.2", + "pytest-cov>=7.0.0" +] + +[project] +name = "workflows-dev" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +authors = [ + {name = "Adrian Lyjak", email = "adrianlyjak@gmail.com"} +] +requires-python = ">=3.9" +dependencies = [ + "click>=8.1.7", + "httpx>=0.28.1", + "packaging>=24.1", + "pydantic>=2.12.3", + "tomlkit>=0.13.3" +] + +[project.scripts] +workflows-dev = "workflows_dev:main" diff --git a/packages/workflows-dev/src/workflows_dev/__init__.py b/packages/workflows-dev/src/workflows_dev/__init__.py new file mode 100644 index 00000000..a1ec1668 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/__init__.py @@ -0,0 +1,10 @@ +from __future__ import annotations + +from .cli import cli + +__all__ = ["cli", "main"] + + +def main() -> None: + """Console script entry point.""" + cli() diff --git a/packages/workflows-dev/src/workflows_dev/changesets.py b/packages/workflows-dev/src/workflows_dev/changesets.py new file mode 100644 index 00000000..336ba691 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/changesets.py @@ -0,0 +1,198 @@ +""" +This is a script called by the changeset bot. Normally changeset can do the following things, but this is a mixed ts and python repo, so we need to do some extra things. + +There's 2 things this does: +- Versioning: Makes changes that may be committed with the newest version. +- Releasing/Tagging: After versions are changed, we check each package to see if its released, and if not, we release it and tag it. + +""" + +from __future__ import annotations + +from dataclasses import dataclass +import json +import os +import subprocess +from pathlib import Path +from typing import Any, Generator, List, cast +import urllib.request +import urllib.error +from pydantic import BaseModel + +import click +import tomlkit +from packaging.version import Version + + +def run_command( + cmd: List[str], cwd: Path | None = None, env: dict[str, str] | None = None +) -> None: + """Run a command, streaming output to the console, and raise on failure.""" + subprocess.run(cmd, check=True, text=True, cwd=cwd or Path.cwd(), env=env) + + +def run_and_capture( + cmd: List[str], cwd: Path | None = None, env: dict[str, str] | None = None +) -> str: + """Run a command and return stdout as text, raising on failure.""" + result = subprocess.run( + cmd, + check=True, + text=True, + cwd=cwd or Path.cwd(), + env=env, + capture_output=True, + ) + return result.stdout + + +@dataclass +class PackageJson: + name: str + version: str + path: Path + private: bool + + +def get_pnpm_workspace_packages() -> list[PackageJson]: + """Return directories for all workspace packages from pnpm list JSON output.""" + output = run_and_capture(["pnpm", "list", "-r", "--depth=-1", "--json"]) + + data = cast(list[dict[str, Any]], json.loads(output)) + packages: list[PackageJson] = [ + PackageJson( + name=data["name"], + version=data["version"], + path=Path(data["path"]), + private=data["private"], + ) + for data in data + ] + return packages + + +def sync_package_version_with_pyproject( + package_dir: Path, packages: dict[str, PackageJson], js_package_name: str +) -> None: + """Sync version from package.json to pyproject.toml. + + Returns True if pyproject was changed, else False. + """ + pyproject_path = package_dir / "pyproject.toml" + if not pyproject_path.exists(): + return + + package_version = packages[js_package_name].version + toml_doc, py_doc = PyProjectContainer.parse(pyproject_path.read_text()) + current_version = py_doc.project.version + + # update workspace dependency strings by replacing the first version after == or >= + # Perhaps sync dependency versions some day + changed = False + if current_version != package_version: + toml_doc["project"]["version"] = package_version + changed = True + + if changed: + pyproject_path.write_text(tomlkit.dumps(toml_doc)) + click.echo( + f"Updated {pyproject_path} version to {package_version} and synced dependency specs" + ) + + +def _publishable_packages() -> Generator[Path, None, None]: + """Finds all paths to pyproject.toml that also have a package.json with private: false.""" + packages = get_pnpm_workspace_packages() + for package in packages: + if not package.private: + pyproject = package.path.parent / "pyproject.toml" + if pyproject.exists(): + yield pyproject + + +def lock_python_dependencies() -> None: + """Lock Python dependencies.""" + try: + run_command(["uv", "lock"]) + click.echo("Locked Python dependencies") + except subprocess.CalledProcessError as e: + click.echo(f"Warning: Failed to lock Python dependencies: {e}", err=True) + + +@click.group() +def cli() -> None: + """Changeset-based version management for llama-cloud-services.""" + pass + + +def maybe_publish_pypi(dry_run: bool) -> None: + """Publish the py packages if they need to be published.""" + any = False + for package in _publishable_packages(): + name, version = current_version(package) + if is_published(name, version): + click.echo(f"PyPI package {name}@{version} already published, skipping") + continue + any = True + click.echo(f"Publishing PyPI package {name}@{version}") + + token = os.environ["UV_PUBLISH_TOKEN"] + if dry_run: + summary = (token[:3] + "***") if len(token) <= 6 else token[:6] + "****" + click.echo( + f"Dry run, skipping publish. Would run with publish token {summary}:" + ) + click.echo(" uv build") + click.echo(" uv publish") + else: + run_command(["uv", "build"], cwd=package.parent) + if any: + if dry_run: + click.echo("Dry run, skipping publish. Would run:") + click.echo(" uv publish") + else: + run_command(["uv", "publish"]) + + +def current_version(pyproject: Path) -> tuple[str, str]: + """Return (package_name, version_str) taken from the given pyproject.toml.""" + toml_doc, py_doc = PyProjectContainer.parse(pyproject.read_text()) + name = py_doc.project.name + version = str(Version(py_doc.project.version)) # normalise + return name, version + + +def is_published( + name: str, version: str, index_url: str = "https://pypi.org/pypi" +) -> bool: + """ + True → `==` exists on the given index + False → package missing *or* version missing + """ + url = f"{index_url.rstrip('/')}/{name}/json" + try: + data = json.load(urllib.request.urlopen(url)) + except urllib.error.HTTPError as e: # 404 → package not published at all + if e.code == 404: + return False + raise # any other error should surface + return version in data["releases"] # keys are version strings + + +if __name__ == "__main__": + cli() + + +class PyProjectContainer(BaseModel): + project: PyProject + + @classmethod + def parse(cls, text: str) -> tuple[Any, PyProjectContainer]: + doc = tomlkit.parse(text) + return doc, PyProjectContainer.model_validate(doc) + + +class PyProject(BaseModel): + name: str + version: str + dependencies: list[str] diff --git a/packages/workflows-dev/src/workflows_dev/cli.py b/packages/workflows-dev/src/workflows_dev/cli.py new file mode 100644 index 00000000..75d5707b --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/cli.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +import os +from pathlib import Path +from typing import Optional + +import click + +from . import git_utils, gha, index_html, versioning, changesets + + +def _resolve_tag(explicit_tag: Optional[str], github_ref: Optional[str]) -> str: + if explicit_tag: + return versioning.strip_refs_prefix(explicit_tag) + if github_ref and github_ref.startswith("refs/tags/"): + return versioning.strip_refs_prefix(github_ref) + raise click.BadParameter( + "Unable to determine tag. Provide --tag or set GITHUB_REF/GITHUB_REF_NAME to a tag value." + ) + + +@click.group() +def cli() -> None: + """Developer tooling for the workflows repository.""" + + +@cli.command("detect-change-type") +@click.option("--tag-glob", default="v*", show_default=True) +@click.option("--tag-prefix", default="", show_default=True) +@click.option("--current-tag", envvar="GITHUB_REF_NAME") +@click.option("--github-ref", envvar="GITHUB_REF") +@click.option("--output", type=click.Path(), default=None) +def detect_change_type( + tag_glob: str, + tag_prefix: str, + current_tag: Optional[str], + github_ref: Optional[str], + output: Optional[Path], +) -> None: + """Compute the semantic change between the current tag and the previous release.""" + target_tag = _resolve_tag(current_tag, github_ref) + tags = git_utils.list_tags(Path.cwd(), tag_glob) + previous = git_utils.previous_tag(target_tag, tags) + + current_version = versioning.extract_semver(target_tag, tag_prefix) + previous_version = ( + versioning.extract_semver(previous, tag_prefix) if previous else None + ) + change_type = versioning.detect_change_type(current_version, previous_version) + + click.echo(f"Current tag: {target_tag}") + if previous: + click.echo(f"Previous tag: {previous}") + else: + click.echo("No previous tag found") + click.echo(f"Change type: {change_type}") + + gha.write_outputs({"change_type": change_type}, output_path=output) + + +@cli.command("extract-tag-info") +@click.option( + "--tag", required=True, help="Full git tag (e.g. llama-index-workflows@v1.2.3)." +) +@click.option("--tag-prefix", required=True, help="Expected prefix for the tag.") +@click.option("--output", type=click.Path(), default=None) +def extract_tag_info(tag: str, tag_prefix: str, output: Optional[str]) -> None: + """Extract suffix and semantic version metadata from a tag.""" + suffix, semver = versioning.compute_suffix_and_version(tag, tag_prefix) + gha.write_outputs({"tag_suffix": suffix, "semver": semver}, output_path=output) + + +@cli.command("find-previous-tag") +@click.option("--tag-prefix", required=True) +@click.option("--current-tag", required=True) +@click.option("--output", type=click.Path(), default=None) +def find_previous_tag(tag_prefix: str, current_tag: str, output: Optional[str]) -> None: + """Find the most recent tag for a package other than the current tag.""" + previous = git_utils.find_previous_tag(Path.cwd(), tag_prefix, current_tag) + gha.write_outputs({"previous": previous}, output_path=output) + + +@cli.command("update-index-html") +@click.option("--js-url", required=True, help="URL for the JavaScript bundle.") +@click.option("--css-url", required=True, help="URL for the CSS bundle.") +@click.option( + "--index-path", + type=click.Path(), + default=None, + help="Optional custom index.html path.", +) +def update_index_html_cmd(js_url: str, css_url: str, index_path: Optional[str]) -> None: + """Update debugger asset URLs in the server index.html file.""" + try: + index_html.update_index_html(js_url, css_url, index_path) + except Exception as exc: # pragma: no cover - Click renders traceback + raise click.ClickException(str(exc)) from exc + click.echo("✅ Updated index.html") + click.echo(f" JavaScript: {js_url}") + click.echo(f" CSS: {css_url}") + + +@cli.command("changeset-version") +def changeset_version() -> None: + """Apply changeset versions, then sync versions for co-located Python packages. + + - Runs changesets to bump package.json versions. + - Discovers all workspace packages via pnpm. + - For any directory containing both package.json and pyproject.toml, and with + package.json private: false, set pyproject [project].version to match the JS version. + - If a pyproject is updated, run `uv sync` in that directory to update its lock file. + """ + # Ensure we're at the repo root + os.chdir(Path(__file__).parents[4]) + + # First, run changeset version to update all package.json files + changesets.run_command(["npx", "@changesets/cli", "version"]) + + # Enumerate workspace packages and perform syncs + packages = changesets.get_pnpm_workspace_packages() + version_map = {pkg.name: pkg for pkg in packages} + for pkg in packages: + changesets.sync_package_version_with_pyproject(pkg.path, version_map, pkg.name) + + +@cli.command("changeset-publish") +@click.option("--tag", is_flag=True, help="Tag the packages after publishing") +@click.option("--dry-run", is_flag=True, help="Dry run the publish") +def publish(tag: bool, dry_run: bool) -> None: + """Publish all packages.""" + # move to the root + os.chdir(Path(__file__).parents[4]) + + changesets.maybe_publish_pypi(dry_run) + + if tag: + if dry_run: + click.echo("Dry run, skipping tag. Would run:") + click.echo(" npx @changesets/cli tag") + click.echo(" git push --tags") + else: + # Let changesets create JS-related tags as usual + changesets.run_command(["npx", "@changesets/cli", "tag"]) + changesets.run_command(["git", "push", "--tags"]) diff --git a/packages/workflows-dev/src/workflows_dev/gha.py b/packages/workflows-dev/src/workflows_dev/gha.py new file mode 100644 index 00000000..763bebd0 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/gha.py @@ -0,0 +1,21 @@ +from __future__ import annotations + +import os +from pathlib import Path +from typing import Mapping + +import click + + +def write_outputs( + outputs: Mapping[str, str], output_path: str | Path | None = None +) -> None: + """Write GitHub Actions step outputs.""" + target = output_path or os.environ.get("GITHUB_OUTPUT") + if target: + with open(target, "a", encoding="utf-8") as handle: + for key, value in outputs.items(): + handle.write(f"{key}={value}\n") + else: + for key, value in outputs.items(): + click.echo(f"{key}={value}") diff --git a/packages/workflows-dev/src/workflows_dev/git_utils.py b/packages/workflows-dev/src/workflows_dev/git_utils.py new file mode 100644 index 00000000..d883f8e7 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/git_utils.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import subprocess +from pathlib import Path +from typing import Iterable, Optional + + +def list_tags(repo: str | Path, tag_glob: str) -> list[str]: + """Return tags that match the provided glob sorted newest first.""" + repo_path = Path(repo) + result = subprocess.run( + [ + "git", + "-C", + str(repo_path), + "tag", + "-l", + tag_glob, + "--sort=-version:refname", + ], + capture_output=True, + text=True, + check=True, + ) + + return [line.strip() for line in result.stdout.splitlines() if line.strip()] + + +def previous_tag(current_tag: str, tags: Iterable[str]) -> Optional[str]: + """Return the tag immediately after the current entry in the sorted list.""" + tags_list = list(tags) + if current_tag in tags_list: + idx = tags_list.index(current_tag) + if idx + 1 < len(tags_list): + return tags_list[idx + 1] + return None + return tags_list[0] if tags_list else None + + +def find_previous_tag(repo: str | Path, tag_prefix: str, current_tag: str) -> str: + """Locate the latest tag that matches a prefix but differs from the current tag.""" + matches = list_tags(repo, f"{tag_prefix}*") + for candidate in matches: + if candidate != current_tag: + return candidate + return "" diff --git a/packages/workflows-dev/src/workflows_dev/index_html.py b/packages/workflows-dev/src/workflows_dev/index_html.py new file mode 100644 index 00000000..f8c37582 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/index_html.py @@ -0,0 +1,49 @@ +from __future__ import annotations + +import re +from pathlib import Path + + +class IndexHtmlError(RuntimeError): + """Raised when index.html cannot be updated.""" + + +SCRIPT_PATTERN = re.compile( + r']*>' +) +CSS_PATTERN = re.compile(r']*>') + + +def default_index_path() -> Path: + """Return the default path to the debugger index.html file.""" + return ( + Path(__file__).resolve().parents[3] + / "src" + / "workflows" + / "server" + / "static" + / "index.html" + ) + + +def update_index_html( + js_url: str, css_url: str, index_path: str | Path | None = None +) -> None: + """Replace the debugger asset URLs in index.html.""" + target = Path(index_path) if index_path is not None else default_index_path() + if not target.exists(): + raise FileNotFoundError(f"index.html not found at {target}") + + content = target.read_text(encoding="utf-8") + + new_script = f'' + updated_content, script_count = SCRIPT_PATTERN.subn(new_script, content) + if script_count == 0: + raise IndexHtmlError("Could not find script tag in index.html") + + new_css = f'' + updated_content, css_count = CSS_PATTERN.subn(new_css, updated_content) + if css_count == 0: + raise IndexHtmlError("Could not find link tag in index.html") + + target.write_text(updated_content, encoding="utf-8") diff --git a/packages/workflows-dev/src/workflows_dev/versioning.py b/packages/workflows-dev/src/workflows_dev/versioning.py new file mode 100644 index 00000000..f0fd6d57 --- /dev/null +++ b/packages/workflows-dev/src/workflows_dev/versioning.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +from pathlib import Path +from typing import Optional + +from packaging.version import Version + +import tomllib + + +class VersionMismatchError(ValueError): + """Raised when two versions do not match.""" + + +def read_pyproject_version(pyproject_path: str) -> str: + """Read the project version from a pyproject.toml file.""" + path = Path(pyproject_path) + data = tomllib.loads(path.read_text(encoding="utf-8")) + try: + project = data["project"] + except KeyError as exc: # pragma: no cover - invalid pyproject structure + raise ValueError("Missing [project] section in pyproject.toml") from exc + + try: + version = project["version"] + except KeyError as exc: + raise ValueError("Missing version metadata in pyproject.toml") from exc + + if not isinstance(version, str): # pragma: no cover - defensive guard + raise ValueError("Project version must be a string.") + return version + + +def strip_refs_prefix(tag: str) -> str: + """Remove the refs/tags/ prefix when present.""" + return tag.replace("refs/tags/", "") if tag.startswith("refs/tags/") else tag + + +def remove_tag_prefix(tag: str, tag_prefix: str) -> str: + """Remove a package-specific prefix and optional leading v.""" + if tag_prefix: + if not tag.startswith(tag_prefix): + raise ValueError(f"Tag {tag} does not match expected prefix {tag_prefix}") + tag = tag[len(tag_prefix) :] + return tag + + +def extract_semver(tag: str, tag_prefix: str) -> str: + """Return the semantic version encoded in a git tag.""" + suffix = remove_tag_prefix(strip_refs_prefix(tag), tag_prefix) + return suffix[1:] if suffix.startswith("v") else suffix + + +def compute_suffix_and_version(tag: str, tag_prefix: str) -> tuple[str, str]: + """Return the suffix after the prefix and the semantic version.""" + suffix = remove_tag_prefix(strip_refs_prefix(tag), tag_prefix) + semver = suffix[1:] if suffix.startswith("v") else suffix + return suffix, semver + + +def detect_change_type(current_version: str, previous_version: Optional[str]) -> str: + """Return the semantic change classification between two versions.""" + if not previous_version: + return "major" + + current = Version(current_version) + previous = Version(previous_version) + + if current <= previous: + return "none" + + current_release = (current.release + (0, 0, 0))[:3] + previous_release = (previous.release + (0, 0, 0))[:3] + + if current_release[0] > previous_release[0]: + return "major" + if current_release[1] > previous_release[1]: + return "minor" + if current_release[2] > previous_release[2]: + return "patch" + return "minor" + + +def ensure_versions_match(expected: str, actual: str, tag_name: str) -> None: + """Raise when two version strings differ.""" + if Version(expected) != Version(actual): + raise VersionMismatchError( + f"Tag {tag_name} (version {actual}) doesn't match pyproject.toml version {expected}" + ) diff --git a/packages/workflows-dev/tests/test_workflows_dev_cli.py b/packages/workflows-dev/tests/test_workflows_dev_cli.py new file mode 100644 index 00000000..fc3aa841 --- /dev/null +++ b/packages/workflows-dev/tests/test_workflows_dev_cli.py @@ -0,0 +1,414 @@ +from __future__ import annotations + +import json +import subprocess +from pathlib import Path +from unittest.mock import Mock, patch +from urllib.error import HTTPError + +from click.testing import CliRunner + +from workflows_dev.changesets import ( + PackageJson, + PyProjectContainer, + current_version, + is_published, + sync_package_version_with_pyproject, +) +from workflows_dev.cli import cli + + +def _write_pyproject(path: Path, version: str) -> None: + path.write_text( + f""" +[project] +name = "example" +version = "{version}" +""".strip() + ) + + +def _init_git_repo(repo_path: Path) -> None: + subprocess.run(["git", "init"], cwd=repo_path, check=True, capture_output=True) + subprocess.run( + ["git", "config", "user.email", "dev@example.com"], cwd=repo_path, check=True + ) + subprocess.run( + ["git", "config", "user.name", "Dev User"], cwd=repo_path, check=True + ) + + +def _commit_and_tag(repo_path: Path, filename: str, content: str, tag: str) -> None: + file_path = repo_path / filename + file_path.write_text(content) + subprocess.run(["git", "add", filename], cwd=repo_path, check=True) + subprocess.run( + ["git", "commit", "-m", f"Add {tag}"], + cwd=repo_path, + check=True, + capture_output=True, + ) + subprocess.run(["git", "tag", tag], cwd=repo_path, check=True) + + +def test_detect_change_type_patch() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + repo = Path.cwd() + _init_git_repo(repo) + _commit_and_tag(repo, "file.txt", "v1.0.0", "v1.0.0") + _commit_and_tag(repo, "file.txt", "v1.0.1", "v1.0.1") + + output_file = Path("out.txt") + result = runner.invoke( + cli, + [ + "detect-change-type", + "--tag-glob", + "v*", + "--current-tag", + "v1.0.1", + "--output", + str(output_file), + ], + env={}, + ) + assert result.exit_code == 0 + assert "Change type: patch" in result.output + assert "change_type=patch" in output_file.read_text() + + +def test_detect_change_type_minor() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + repo = Path.cwd() + _init_git_repo(repo) + _commit_and_tag(repo, "file.txt", "v1.0.0", "v1.0.0") + _commit_and_tag(repo, "file.txt", "v1.1.0", "v1.1.0") + + result = runner.invoke( + cli, + [ + "detect-change-type", + "--tag-glob", + "v*", + "--current-tag", + "v1.1.0", + ], + env={}, + ) + assert result.exit_code == 0 + assert "Change type: minor" in result.output + + +def test_detect_change_type_major() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + repo = Path.cwd() + _init_git_repo(repo) + _commit_and_tag(repo, "file.txt", "v1.0.0", "v1.0.0") + _commit_and_tag(repo, "file.txt", "v2.0.0", "v2.0.0") + + result = runner.invoke( + cli, + [ + "detect-change-type", + "--tag-glob", + "v*", + "--current-tag", + "v2.0.0", + ], + env={}, + ) + assert result.exit_code == 0 + assert "Change type: major" in result.output + + +def test_detect_change_type_with_prefix() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + repo = Path.cwd() + _init_git_repo(repo) + _commit_and_tag(repo, "file.txt", "pkg@v1.0.0", "pkg@v1.0.0") + _commit_and_tag(repo, "file.txt", "pkg@v1.0.1", "pkg@v1.0.1") + + result = runner.invoke( + cli, + [ + "detect-change-type", + "--tag-glob", + "pkg@v*", + "--tag-prefix", + "pkg@", + "--current-tag", + "pkg@v1.0.1", + ], + env={}, + ) + assert result.exit_code == 0 + assert "Change type: patch" in result.output + + +def test_detect_change_type_requires_tag() -> None: + runner = CliRunner() + env = {"GITHUB_REF": "", "GITHUB_REF_NAME": ""} + result = runner.invoke(cli, ["detect-change-type"], env=env) + assert result.exit_code != 0 + assert "Unable to determine tag" in result.output + + +def test_extract_tag_info_outputs_suffix() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + out_file = Path("tag.txt") + result = runner.invoke( + cli, + [ + "extract-tag-info", + "--tag", + "pkg@v1.2.3", + "--tag-prefix", + "pkg@", + "--output", + str(out_file), + ], + ) + assert result.exit_code == 0 + contents = out_file.read_text() + assert "tag_suffix=v1.2.3" in contents + assert "semver=1.2.3" in contents + + +def test_find_previous_tag_returns_match() -> None: + runner = CliRunner() + with runner.isolated_filesystem(): + repo = Path.cwd() + _init_git_repo(repo) + _commit_and_tag(repo, "file.txt", "pkg@v1.0.0", "pkg@v1.0.0") + _commit_and_tag(repo, "file.txt", "pkg@v1.1.0", "pkg@v1.1.0") + + out_file = Path("prev.txt") + result = runner.invoke( + cli, + [ + "find-previous-tag", + "--tag-prefix", + "pkg@", + "--current-tag", + "pkg@v1.1.0", + "--output", + str(out_file), + ], + ) + assert result.exit_code == 0 + assert out_file.read_text().strip() == "previous=pkg@v1.0.0" + + +def test_update_index_html_success(tmp_path: Path) -> None: + runner = CliRunner() + index_path = tmp_path / "index.html" + index_path.write_text( + """ + + + + + + +""".strip() + ) + result = runner.invoke( + cli, + [ + "update-index-html", + "--js-url", + "https://cdn/js", + "--css-url", + "https://cdn/css", + "--index-path", + str(index_path), + ], + ) + assert result.exit_code == 0 + updated = index_path.read_text() + assert 'src="https://cdn/js"' in updated + assert 'href="https://cdn/css"' in updated + + +def test_update_index_html_missing_file(tmp_path: Path) -> None: + runner = CliRunner() + result = runner.invoke( + cli, + [ + "update-index-html", + "--js-url", + "https://cdn/js", + "--css-url", + "https://cdn/css", + "--index-path", + str(tmp_path / "missing.html"), + ], + ) + assert result.exit_code != 0 + assert "not found" in result.output + + +# Tests for changesets.py functionality + + +def test_current_version(tmp_path: Path) -> None: + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text( + """ +[project] +name = "test-package" +version = "1.2.3" +dependencies = [] +""".strip() + ) + name, version = current_version(pyproject) + assert name == "test-package" + assert version == "1.2.3" + + +def test_current_version_normalizes_version(tmp_path: Path) -> None: + pyproject = tmp_path / "pyproject.toml" + pyproject.write_text( + """ +[project] +name = "test-package" +version = "01.02.03" +dependencies = [] +""".strip() + ) + name, version = current_version(pyproject) + assert name == "test-package" + assert version == "1.2.3" + + +def test_pyproject_container_parse() -> None: + toml_text = """ +[project] +name = "my-package" +version = "0.1.0" +dependencies = ["requests>=2.0.0"] +""".strip() + toml_doc, py_doc = PyProjectContainer.parse(toml_text) + assert py_doc.project.name == "my-package" + assert py_doc.project.version == "0.1.0" + assert py_doc.project.dependencies == ["requests>=2.0.0"] + + +def test_is_published_returns_true_when_version_exists() -> None: + mock_response = Mock() + mock_response.read.return_value = json.dumps( + {"releases": {"1.0.0": [], "1.1.0": []}} + ).encode() + + with patch("urllib.request.urlopen", return_value=mock_response): + result = is_published("test-package", "1.0.0") + assert result is True + + +def test_is_published_returns_false_when_version_missing() -> None: + mock_response = Mock() + mock_response.read.return_value = json.dumps({"releases": {"1.0.0": []}}).encode() + + with patch("urllib.request.urlopen", return_value=mock_response): + result = is_published("test-package", "1.1.0") + assert result is False + + +def test_is_published_returns_false_when_package_not_found() -> None: + mock_error = HTTPError("url", 404, "Not Found", {}, None) # type: ignore + + with patch("urllib.request.urlopen", side_effect=mock_error): + result = is_published("nonexistent-package", "1.0.0") + assert result is False + + +def test_is_published_raises_on_other_http_errors() -> None: + mock_error = HTTPError("url", 500, "Internal Server Error", {}, None) # type: ignore + + with patch("urllib.request.urlopen", side_effect=mock_error): + try: + is_published("test-package", "1.0.0") + assert False, "Expected HTTPError to be raised" + except HTTPError as e: + assert e.code == 500 + + +def test_sync_package_version_with_pyproject_updates_version(tmp_path: Path) -> None: + package_dir = tmp_path / "package" + package_dir.mkdir() + pyproject = package_dir / "pyproject.toml" + pyproject.write_text( + """ +[project] +name = "test-package" +version = "1.0.0" +dependencies = [] +""".strip() + ) + + packages = { + "test-js-package": PackageJson( + name="test-js-package", + version="2.0.0", + path=package_dir, + private=False, + ) + } + + sync_package_version_with_pyproject(package_dir, packages, "test-js-package") + + toml_doc, py_doc = PyProjectContainer.parse(pyproject.read_text()) + assert py_doc.project.version == "2.0.0" + + +def test_sync_package_version_with_pyproject_skips_when_no_pyproject( + tmp_path: Path, +) -> None: + package_dir = tmp_path / "package" + package_dir.mkdir() + + packages = { + "test-js-package": PackageJson( + name="test-js-package", + version="2.0.0", + path=package_dir, + private=False, + ) + } + + # Should not raise an error + sync_package_version_with_pyproject(package_dir, packages, "test-js-package") + + +def test_sync_package_version_with_pyproject_skips_when_versions_match( + tmp_path: Path, +) -> None: + package_dir = tmp_path / "package" + package_dir.mkdir() + pyproject = package_dir / "pyproject.toml" + original_content = """ +[project] +name = "test-package" +version = "2.0.0" +dependencies = [] +""".strip() + pyproject.write_text(original_content) + + packages = { + "test-js-package": PackageJson( + name="test-js-package", + version="2.0.0", + path=package_dir, + private=False, + ) + } + + sync_package_version_with_pyproject(package_dir, packages, "test-js-package") + + # Content should be unchanged + assert pyproject.read_text() == original_content diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 00000000..922b4c14 --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,834 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@changesets/cli': + specifier: ^2.29.5 + version: 2.29.7 + changesets: + specifier: ^1.0.2 + version: 1.0.2 + + packages/llama-index-utils-workflow: + devDependencies: + '@changesets/cli': + specifier: ^2.29.5 + version: 2.29.7 + changesets: + specifier: ^1.0.2 + version: 1.0.2 + + packages/llama-index-workflows: + devDependencies: + '@changesets/cli': + specifier: ^2.29.5 + version: 2.29.7 + changesets: + specifier: ^1.0.2 + version: 1.0.2 + + packages/workflows-dev: + devDependencies: + '@changesets/cli': + specifier: ^2.29.5 + version: 2.29.7 + changesets: + specifier: ^1.0.2 + version: 1.0.2 + +packages: + + '@babel/runtime@7.28.4': + resolution: {integrity: sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==} + engines: {node: '>=6.9.0'} + + '@changesets/apply-release-plan@7.0.13': + resolution: {integrity: sha512-BIW7bofD2yAWoE8H4V40FikC+1nNFEKBisMECccS16W1rt6qqhNTBDmIw5HaqmMgtLNz9e7oiALiEUuKrQ4oHg==} + + '@changesets/assemble-release-plan@6.0.9': + resolution: {integrity: sha512-tPgeeqCHIwNo8sypKlS3gOPmsS3wP0zHt67JDuL20P4QcXiw/O4Hl7oXiuLnP9yg+rXLQ2sScdV1Kkzde61iSQ==} + + '@changesets/changelog-git@0.2.1': + resolution: {integrity: sha512-x/xEleCFLH28c3bQeQIyeZf8lFXyDFVn1SgcBiR2Tw/r4IAWlk1fzxCEZ6NxQAjF2Nwtczoen3OA2qR+UawQ8Q==} + + '@changesets/cli@2.29.7': + resolution: {integrity: sha512-R7RqWoaksyyKXbKXBTbT4REdy22yH81mcFK6sWtqSanxUCbUi9Uf+6aqxZtDQouIqPdem2W56CdxXgsxdq7FLQ==} + hasBin: true + + '@changesets/config@3.1.1': + resolution: {integrity: sha512-bd+3Ap2TKXxljCggI0mKPfzCQKeV/TU4yO2h2C6vAihIo8tzseAn2e7klSuiyYYXvgu53zMN1OeYMIQkaQoWnA==} + + '@changesets/errors@0.2.0': + resolution: {integrity: sha512-6BLOQUscTpZeGljvyQXlWOItQyU71kCdGz7Pi8H8zdw6BI0g3m43iL4xKUVPWtG+qrrL9DTjpdn8eYuCQSRpow==} + + '@changesets/get-dependents-graph@2.1.3': + resolution: {integrity: sha512-gphr+v0mv2I3Oxt19VdWRRUxq3sseyUpX9DaHpTUmLj92Y10AGy+XOtV+kbM6L/fDcpx7/ISDFK6T8A/P3lOdQ==} + + '@changesets/get-release-plan@4.0.13': + resolution: {integrity: sha512-DWG1pus72FcNeXkM12tx+xtExyH/c9I1z+2aXlObH3i9YA7+WZEVaiHzHl03thpvAgWTRaH64MpfHxozfF7Dvg==} + + '@changesets/get-version-range-type@0.4.0': + resolution: {integrity: sha512-hwawtob9DryoGTpixy1D3ZXbGgJu1Rhr+ySH2PvTLHvkZuQ7sRT4oQwMh0hbqZH1weAooedEjRsbrWcGLCeyVQ==} + + '@changesets/git@3.0.4': + resolution: {integrity: sha512-BXANzRFkX+XcC1q/d27NKvlJ1yf7PSAgi8JG6dt8EfbHFHi4neau7mufcSca5zRhwOL8j9s6EqsxmT+s+/E6Sw==} + + '@changesets/logger@0.1.1': + resolution: {integrity: sha512-OQtR36ZlnuTxKqoW4Sv6x5YIhOmClRd5pWsjZsddYxpWs517R0HkyiefQPIytCVh4ZcC5x9XaG8KTdd5iRQUfg==} + + '@changesets/parse@0.4.1': + resolution: {integrity: sha512-iwksMs5Bf/wUItfcg+OXrEpravm5rEd9Bf4oyIPL4kVTmJQ7PNDSd6MDYkpSJR1pn7tz/k8Zf2DhTCqX08Ou+Q==} + + '@changesets/pre@2.0.2': + resolution: {integrity: sha512-HaL/gEyFVvkf9KFg6484wR9s0qjAXlZ8qWPDkTyKF6+zqjBe/I2mygg3MbpZ++hdi0ToqNUF8cjj7fBy0dg8Ug==} + + '@changesets/read@0.6.5': + resolution: {integrity: sha512-UPzNGhsSjHD3Veb0xO/MwvasGe8eMyNrR/sT9gR8Q3DhOQZirgKhhXv/8hVsI0QpPjR004Z9iFxoJU6in3uGMg==} + + '@changesets/should-skip-package@0.1.2': + resolution: {integrity: sha512-qAK/WrqWLNCP22UDdBTMPH5f41elVDlsNyat180A33dWxuUDyNpg6fPi/FyTZwRriVjg0L8gnjJn2F9XAoF0qw==} + + '@changesets/types@4.1.0': + resolution: {integrity: sha512-LDQvVDv5Kb50ny2s25Fhm3d9QSZimsoUGBsUioj6MC3qbMUCuC8GPIvk/M6IvXx3lYhAs0lwWUQLb+VIEUCECw==} + + '@changesets/types@6.1.0': + resolution: {integrity: sha512-rKQcJ+o1nKNgeoYRHKOS07tAMNd3YSN0uHaJOZYjBAgxfV7TUE7JE+z4BzZdQwb5hKaYbayKN5KrYV7ODb2rAA==} + + '@changesets/write@0.4.0': + resolution: {integrity: sha512-CdTLvIOPiCNuH71pyDu3rA+Q0n65cmAbXnwWH84rKGiFumFzkmHNT8KHTMEchcxN+Kl8I54xGUhJ7l3E7X396Q==} + + '@inquirer/external-editor@1.0.3': + resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@manypkg/find-root@1.1.0': + resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} + + '@manypkg/get-packages@1.1.3': + resolution: {integrity: sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A==} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@types/node@12.20.55': + resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + + ansi-colors@4.1.3: + resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} + engines: {node: '>=6'} + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + better-path-resolve@1.0.0: + resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} + engines: {node: '>=4'} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + changesets@1.0.2: + resolution: {integrity: sha512-lnXvvqJEcK0z/6RtwKNLbejazl+Hxd1bocMcNgfLHWb4rGxuqkO/LdeGNzwIx3jHj+fNWZ6AGgK5AqNBwva4Xg==} + + chardet@2.1.1: + resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} + + ci-info@3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + detect-indent@6.1.0: + resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} + engines: {node: '>=8'} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + enquirer@2.4.1: + resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==} + engines: {node: '>=8.6'} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + extendable-error@0.1.7: + resolution: {integrity: sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fastq@1.19.1: + resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + + fs-extra@7.0.1: + resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} + engines: {node: '>=6 <7 || >=8'} + + fs-extra@8.1.0: + resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} + engines: {node: '>=6 <7 || >=8'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + human-id@4.1.2: + resolution: {integrity: sha512-v/J+4Z/1eIJovEBdlV5TYj1IR+ZiohcYGRY+qN/oC9dAfKzVT023N/Bgw37hrKCoVRBvk3bqyzpr2PP5YeTMSg==} + hasBin: true + + iconv-lite@0.7.0: + resolution: {integrity: sha512-cf6L2Ds3h57VVmkZe+Pn+5APsT7FpqJtEhhieDCvrE2MK5Qk9MyffgQyuxQTm6BChfeZNtcOLHp9IcWRVcIcBQ==} + engines: {node: '>=0.10.0'} + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-subdir@1.2.0: + resolution: {integrity: sha512-2AT6j+gXe/1ueqbW6fLZJiIw3F8iXGJtt0yDrZaBhAZEG1raiTxKWU+IPqMCzQAXOUCKdA4UDMgacKH25XG2Cw==} + engines: {node: '>=4'} + + is-windows@1.0.2: + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + js-yaml@3.14.2: + resolution: {integrity: sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==} + hasBin: true + + jsonfile@4.0.0: + resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} + + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + + lodash.startcase@4.4.0: + resolution: {integrity: sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mri@1.2.0: + resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} + engines: {node: '>=4'} + + outdent@0.5.0: + resolution: {integrity: sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q==} + + p-filter@2.1.0: + resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} + engines: {node: '>=8'} + + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-map@2.1.0: + resolution: {integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==} + engines: {node: '>=6'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + package-manager-detector@0.2.11: + resolution: {integrity: sha512-BEnLolu+yuz22S56CU1SUKq3XC3PkwD5wv4ikR4MfGvnRVcmzXR9DwSlW2fEamyTPyXHomBJRzgapeuBvRNzJQ==} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + pify@4.0.1: + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + + prettier@2.8.8: + resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} + engines: {node: '>=10.13.0'} + hasBin: true + + quansync@0.2.11: + resolution: {integrity: sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + read-yaml-file@1.1.0: + resolution: {integrity: sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA==} + engines: {node: '>=6'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} + engines: {node: '>=10'} + hasBin: true + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + spawndamnit@3.0.1: + resolution: {integrity: sha512-MmnduQUuHCoFckZoWnXsTg7JaiLBJrKFj9UI2MbRPGaJeVpsLcVBu6P/IGZovziM/YBsellCmsprgNA+w0CzVg==} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + + term-size@2.2.1: + resolution: {integrity: sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==} + engines: {node: '>=8'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + universalify@0.1.2: + resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} + engines: {node: '>= 4.0.0'} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + +snapshots: + + '@babel/runtime@7.28.4': {} + + '@changesets/apply-release-plan@7.0.13': + dependencies: + '@changesets/config': 3.1.1 + '@changesets/get-version-range-type': 0.4.0 + '@changesets/git': 3.0.4 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + detect-indent: 6.1.0 + fs-extra: 7.0.1 + lodash.startcase: 4.4.0 + outdent: 0.5.0 + prettier: 2.8.8 + resolve-from: 5.0.0 + semver: 7.7.3 + + '@changesets/assemble-release-plan@6.0.9': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + semver: 7.7.3 + + '@changesets/changelog-git@0.2.1': + dependencies: + '@changesets/types': 6.1.0 + + '@changesets/cli@2.29.7': + dependencies: + '@changesets/apply-release-plan': 7.0.13 + '@changesets/assemble-release-plan': 6.0.9 + '@changesets/changelog-git': 0.2.1 + '@changesets/config': 3.1.1 + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/get-release-plan': 4.0.13 + '@changesets/git': 3.0.4 + '@changesets/logger': 0.1.1 + '@changesets/pre': 2.0.2 + '@changesets/read': 0.6.5 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@changesets/write': 0.4.0 + '@inquirer/external-editor': 1.0.3 + '@manypkg/get-packages': 1.1.3 + ansi-colors: 4.1.3 + ci-info: 3.9.0 + enquirer: 2.4.1 + fs-extra: 7.0.1 + mri: 1.2.0 + p-limit: 2.3.0 + package-manager-detector: 0.2.11 + picocolors: 1.1.1 + resolve-from: 5.0.0 + semver: 7.7.3 + spawndamnit: 3.0.1 + term-size: 2.2.1 + transitivePeerDependencies: + - '@types/node' + + '@changesets/config@3.1.1': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/logger': 0.1.1 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + fs-extra: 7.0.1 + micromatch: 4.0.8 + + '@changesets/errors@0.2.0': + dependencies: + extendable-error: 0.1.7 + + '@changesets/get-dependents-graph@2.1.3': + dependencies: + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + picocolors: 1.1.1 + semver: 7.7.3 + + '@changesets/get-release-plan@4.0.13': + dependencies: + '@changesets/assemble-release-plan': 6.0.9 + '@changesets/config': 3.1.1 + '@changesets/pre': 2.0.2 + '@changesets/read': 0.6.5 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + + '@changesets/get-version-range-type@0.4.0': {} + + '@changesets/git@3.0.4': + dependencies: + '@changesets/errors': 0.2.0 + '@manypkg/get-packages': 1.1.3 + is-subdir: 1.2.0 + micromatch: 4.0.8 + spawndamnit: 3.0.1 + + '@changesets/logger@0.1.1': + dependencies: + picocolors: 1.1.1 + + '@changesets/parse@0.4.1': + dependencies: + '@changesets/types': 6.1.0 + js-yaml: 3.14.2 + + '@changesets/pre@2.0.2': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + fs-extra: 7.0.1 + + '@changesets/read@0.6.5': + dependencies: + '@changesets/git': 3.0.4 + '@changesets/logger': 0.1.1 + '@changesets/parse': 0.4.1 + '@changesets/types': 6.1.0 + fs-extra: 7.0.1 + p-filter: 2.1.0 + picocolors: 1.1.1 + + '@changesets/should-skip-package@0.1.2': + dependencies: + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + + '@changesets/types@4.1.0': {} + + '@changesets/types@6.1.0': {} + + '@changesets/write@0.4.0': + dependencies: + '@changesets/types': 6.1.0 + fs-extra: 7.0.1 + human-id: 4.1.2 + prettier: 2.8.8 + + '@inquirer/external-editor@1.0.3': + dependencies: + chardet: 2.1.1 + iconv-lite: 0.7.0 + + '@manypkg/find-root@1.1.0': + dependencies: + '@babel/runtime': 7.28.4 + '@types/node': 12.20.55 + find-up: 4.1.0 + fs-extra: 8.1.0 + + '@manypkg/get-packages@1.1.3': + dependencies: + '@babel/runtime': 7.28.4 + '@changesets/types': 4.1.0 + '@manypkg/find-root': 1.1.0 + fs-extra: 8.1.0 + globby: 11.1.0 + read-yaml-file: 1.1.0 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.19.1 + + '@types/node@12.20.55': {} + + ansi-colors@4.1.3: {} + + ansi-regex@5.0.1: {} + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + array-union@2.1.0: {} + + better-path-resolve@1.0.0: + dependencies: + is-windows: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + changesets@1.0.2: {} + + chardet@2.1.1: {} + + ci-info@3.9.0: {} + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + detect-indent@6.1.0: {} + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + enquirer@2.4.1: + dependencies: + ansi-colors: 4.1.3 + strip-ansi: 6.0.1 + + esprima@4.0.1: {} + + extendable-error@0.1.7: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fastq@1.19.1: + dependencies: + reusify: 1.1.0 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + + fs-extra@7.0.1: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 4.0.0 + universalify: 0.1.2 + + fs-extra@8.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 4.0.0 + universalify: 0.1.2 + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.3 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + graceful-fs@4.2.11: {} + + human-id@4.1.2: {} + + iconv-lite@0.7.0: + dependencies: + safer-buffer: 2.1.2 + + ignore@5.3.2: {} + + is-extglob@2.1.1: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + is-subdir@1.2.0: + dependencies: + better-path-resolve: 1.0.0 + + is-windows@1.0.2: {} + + isexe@2.0.0: {} + + js-yaml@3.14.2: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + jsonfile@4.0.0: + optionalDependencies: + graceful-fs: 4.2.11 + + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + + lodash.startcase@4.4.0: {} + + merge2@1.4.1: {} + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mri@1.2.0: {} + + outdent@0.5.0: {} + + p-filter@2.1.0: + dependencies: + p-map: 2.1.0 + + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-map@2.1.0: {} + + p-try@2.2.0: {} + + package-manager-detector@0.2.11: + dependencies: + quansync: 0.2.11 + + path-exists@4.0.0: {} + + path-key@3.1.1: {} + + path-type@4.0.0: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + pify@4.0.1: {} + + prettier@2.8.8: {} + + quansync@0.2.11: {} + + queue-microtask@1.2.3: {} + + read-yaml-file@1.1.0: + dependencies: + graceful-fs: 4.2.11 + js-yaml: 3.14.2 + pify: 4.0.1 + strip-bom: 3.0.0 + + resolve-from@5.0.0: {} + + reusify@1.1.0: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + safer-buffer@2.1.2: {} + + semver@7.7.3: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + signal-exit@4.1.0: {} + + slash@3.0.0: {} + + spawndamnit@3.0.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + sprintf-js@1.0.3: {} + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-bom@3.0.0: {} + + term-size@2.2.1: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + universalify@0.1.2: {} + + which@2.0.2: + dependencies: + isexe: 2.0.0 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml new file mode 100644 index 00000000..dee51e92 --- /dev/null +++ b/pnpm-workspace.yaml @@ -0,0 +1,2 @@ +packages: + - "packages/*" diff --git a/pyproject.toml b/pyproject.toml index a2a00bdb..c3e40887 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,73 +2,50 @@ requires = ["hatchling"] build-backend = "hatchling.build" -[dependency-groups] -dev = [ - "pre-commit>=4.2.0", - "pytest>=8.4.0", - "pytest-asyncio>=1.0.0", - "pytest-cov>=6.1.1", - "httpx>=0.25.0", - "hatch>=1.14.1", - "pyyaml>=6.0.2", - "packaging>=21.0", - "pytest-xdist>=3.8.0", - "pytest-timeout>=2.4.0", - "structlog>=25.5.0" -] - [project] -name = "llama-index-workflows" -version = "2.11.1" -description = "An event-driven, async-first, step-based way to control the execution flow of AI applications like Agents." +name = "llama-agents-workspace" +version = "0.1.0" +description = "Monorepo workspace for the LlamaIndex Workflows packages." readme = "README.md" license = "MIT" requires-python = ">=3.9" dependencies = [ - "eval-type-backport>=0.2.2 ; python_full_version < '3.10'", - "llama-index-instrumentation>=0.1.0", - "pydantic>=2.11.5", - "typing-extensions>=4.6.0" + "tomli>=2.3.0" ] -[project.optional-dependencies] -server = ["starlette>=0.39.0", "uvicorn>=0.32.0"] -client = ["httpx>=0.28.1,<1"] - [tool.basedpyright] typeCheckingMode = "standard" [[tool.basedpyright.executionEnvironments]] -root = "src" +root = "packages/llama-index-workflows" pythonVersion = "3.9" [[tool.basedpyright.executionEnvironments]] -root = "tests" +root = "packages/llama-index-utils-workflow" pythonVersion = "3.9" +[[tool.basedpyright.executionEnvironments]] +root = "packages/workflows-dev" +pythonVersion = "3.14" + [[tool.basedpyright.executionEnvironments]] root = "examples" pythonVersion = "3.14" [tool.coverage.run] -omit = ["tests/*"] - -[tool.hatch.build.targets.wheel] -packages = ["src/workflows"] - -[tool.hatch.envs.server] -features = ["server"] -dependencies = ["pyyaml>=6.0.2"] - -[tool.hatch.envs.server.scripts] -openapi = "python -m workflows.server.server --output openapi.json" +omit = ["**/tests/*"] [tool.pytest.ini_options] asyncio_mode = "auto" -testpaths = ["tests"] [tool.uv.sources] llama-index-workflows = {workspace = true} +llama-index-utils-workflow = {workspace = true} +workflows-dev = {workspace = true} [tool.uv.workspace] -members = ["packages/llama-index-utils-workflow"] +members = [ + "packages/llama-index-workflows", + "packages/llama-index-utils-workflow", + "packages/workflows-dev" +] diff --git a/scripts/detect_change_type.py b/scripts/detect_change_type.py deleted file mode 100644 index 79590393..00000000 --- a/scripts/detect_change_type.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 -"""Detect the type of change based on version tags.""" - -import os -import subprocess -import sys -from typing import Optional - -from packaging.version import Version - - -def get_previous_tag() -> Optional[str]: - """Get the previous release tag.""" - try: - # Get all tags sorted by version - result = subprocess.run( - ["git", "tag", "-l", "v*", "--sort=-version:refname"], - capture_output=True, - text=True, - check=True, - ) - tags = result.stdout.strip().split("\n") - - # Current tag is the first one (if we're on a tag) - current_ref = os.environ.get("GITHUB_REF", "") - if current_ref.startswith("refs/tags/"): - current_tag = current_ref.replace("refs/tags/", "") - # Find current tag in list and return the next one - if current_tag in tags: - current_index = tags.index(current_tag) - if current_index + 1 < len(tags): - return tags[current_index + 1] - - # If not on a tag or no previous tag found - return tags[0] if tags else None - except subprocess.CalledProcessError: - return None - - -def detect_change_type(current_version: str, previous_version: Optional[str]) -> str: - """Detect the type of change based on version comparison.""" - if not previous_version: - # First release or no previous version - return "major" - - try: - # Remove 'v' prefix if present - current_clean = current_version.lstrip("v") - previous_clean = previous_version.lstrip("v") - - curr_version = Version(current_clean) - prev_version = Version(previous_clean) - - # Compare versions using packaging.version - if curr_version <= prev_version: - # Same or lower version (shouldn't happen in normal flow) - return "none" - - # Extract major.minor.patch components for semantic comparison - curr_release = curr_version.release - prev_release = prev_version.release - - # Ensure we have at least 3 components (major, minor, patch) - curr_major, curr_minor, curr_patch = (curr_release + (0, 0, 0))[:3] - prev_major, prev_minor, prev_patch = (prev_release + (0, 0, 0))[:3] - - if curr_major > prev_major: - return "major" - elif curr_minor > prev_minor: - return "minor" - elif curr_patch > prev_patch: - return "patch" - else: - # This shouldn't happen if curr_version > prev_version - return "minor" - except Exception: - # If we can't parse versions, default to minor - return "minor" - - -def main() -> None: - """Main function to detect change type.""" - # Get current tag from GITHUB_REF - github_ref = os.environ.get("GITHUB_REF", "") - if not github_ref.startswith("refs/tags/"): - print("Not a tag push, no change type detection needed") - sys.exit(0) - - current_tag = github_ref.replace("refs/tags/", "") - previous_tag = get_previous_tag() - - change_type = detect_change_type(current_tag, previous_tag) - - print(f"Current tag: {current_tag}") - if previous_tag: - print(f"Previous tag: {previous_tag}") - else: - print("No previous tag found") - print(f"Change type: {change_type}") - - # Output for GitHub Actions - if github_output := os.environ.get("GITHUB_OUTPUT"): - with open(github_output, "a") as f: - f.write(f"change_type={change_type}\n") - - -if __name__ == "__main__": - main() diff --git a/scripts/update_index_html.py b/scripts/update_index_html.py deleted file mode 100755 index dc3da932..00000000 --- a/scripts/update_index_html.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python3 -"""Update debugger asset URLs in index.html.""" - -import argparse -import re -import sys -from pathlib import Path -from typing import Optional - - -def update_index_html( - js_url: str, css_url: str, index_path: Optional[Path] = None -) -> None: - """Update the script and CSS URLs in index.html. - - Args: - js_url: New JavaScript file URL - css_url: New CSS file URL - index_path: Path to index.html (defaults to src/workflows/server/static/index.html) - - Raises: - FileNotFoundError: If index.html doesn't exist - ValueError: If required elements not found in HTML - """ - if index_path is None: - index_path = ( - Path(__file__).parent.parent - / "src" - / "workflows" - / "server" - / "static" - / "index.html" - ) - - if not index_path.exists(): - raise FileNotFoundError(f"index.html not found at {index_path}") - - # Read the file - content = index_path.read_text() - - # Update script src - script_pattern = ( - r']*>' - ) - new_script = f'' - - updated_content, script_count = re.subn(script_pattern, new_script, content) - if script_count == 0: - raise ValueError("Could not find script tag in index.html") - - # Update CSS href - css_pattern = r']*>' - new_css = f'' - - updated_content, css_count = re.subn(css_pattern, new_css, updated_content) - if css_count == 0: - raise ValueError("Could not find link tag in index.html") - - # Write back - index_path.write_text(updated_content) - - print("✅ Updated index.html:") - print(f" JavaScript: {js_url}") - print(f" CSS: {css_url}") - - -def main() -> None: - """Main function to update index.html from command line.""" - parser = argparse.ArgumentParser( - description="Update debugger asset URLs in index.html" - ) - parser.add_argument("--js-url", required=True, help="URL for the JavaScript file") - parser.add_argument("--css-url", required=True, help="URL for the CSS file") - parser.add_argument( - "--index-path", - type=Path, - help="Path to index.html (optional, defaults to src/workflows/server/static/index.html)", - ) - - args = parser.parse_args() - - try: - update_index_html(args.js_url, args.css_url, args.index_path) - except Exception as e: - print(f"Error: {e}", file=sys.stderr) - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/scripts/validate_version.py b/scripts/validate_version.py deleted file mode 100644 index 3203f23d..00000000 --- a/scripts/validate_version.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 -"""Validate that release tag matches pyproject.toml version.""" - -import os -import re -import sys -from pathlib import Path - -from packaging.version import Version - - -def get_pyproject_version() -> str: - """Extract version from pyproject.toml.""" - pyproject_path = Path(__file__).parent.parent / "pyproject.toml" - with open(pyproject_path, "r") as f: - content = f.read() - # Look for version = "x.x.x" in the [project] section - # First find the [project] section - project_match = re.search(r"\[project\]", content) - if not project_match: - raise ValueError("Could not find [project] section in pyproject.toml") - - # Then find the version line after [project] - version_pattern = r'version\s*=\s*["\']([^"\']+)["\']' - version_match = re.search(version_pattern, content[project_match.start() :]) - if not version_match: - raise ValueError("Could not find version in pyproject.toml") - - return version_match.group(1) - - -def main() -> None: - # Get pyproject.toml version - try: - pyproject_version = get_pyproject_version() - except Exception as e: - print(f"Error: {e}") - sys.exit(1) - - # Get tag from GitHub ref - github_ref = os.environ.get("GITHUB_REF", "") - if not github_ref.startswith("refs/tags/"): - print("Error: Not a tag push") - sys.exit(1) - - tag = github_ref.replace("refs/tags/", "") - tag_version = tag[1:] if tag.startswith("v") else tag - - # Validate versions match using packaging.version for robust comparison - try: - pyproject_ver = Version(pyproject_version) - tag_ver = Version(tag_version) - - if pyproject_ver != tag_ver: - print( - f"Error: Tag {tag} (version {tag_version}) doesn't match pyproject.toml version {pyproject_version}" - ) - sys.exit(1) - - print(f"✅ Version validated: {pyproject_version} (tag: {tag})") - except Exception as e: - print(f"Error: Invalid version format - {e}") - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/src/llama_agents_workspace/__init__.py b/src/llama_agents_workspace/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_scripts.py b/tests/test_scripts.py deleted file mode 100644 index f8d97448..00000000 --- a/tests/test_scripts.py +++ /dev/null @@ -1,491 +0,0 @@ -"""Tests for scripts in the scripts/ directory.""" - -import os -import subprocess -import sys -import tempfile -from pathlib import Path - - -def test_validate_version_matching() -> None: - """Test the script succeeds when versions match.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create mock pyproject.toml - pyproject_path = Path(tmpdir) / "pyproject.toml" - pyproject_path.write_text(""" -[project] -name = "test-package" -version = "1.2.3" -description = "Test package" -""") - - # Create scripts directory and copy the script - scripts_dir = Path(tmpdir) / "scripts" - scripts_dir.mkdir() - script_path = scripts_dir / "validate_version.py" - original_script = Path("scripts/validate_version.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v1.2.3" # Matches the mock version - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - assert result.returncode == 0 - assert "Version validated: 1.2.3" in result.stdout - - -def test_validate_version_not_matching() -> None: - """Test the script fails when versions don't match.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create mock pyproject.toml - pyproject_path = Path(tmpdir) / "pyproject.toml" - pyproject_path.write_text(""" -[project] -name = "test-package" -version = "1.2.3" -description = "Test package" -""") - - # Create scripts directory and copy the script - scripts_dir = Path(tmpdir) / "scripts" - scripts_dir.mkdir() - script_path = scripts_dir / "validate_version.py" - original_script = Path("scripts/validate_version.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v9.9.9" # Doesn't match - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - assert result.returncode == 1 - assert "doesn't match pyproject.toml version" in result.stdout - - -def test_validate_version_not_a_tag() -> None: - """Test the script exits gracefully when not a tag push.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create mock pyproject.toml - pyproject_path = Path(tmpdir) / "pyproject.toml" - pyproject_path.write_text(""" -[project] -name = "test-package" -version = "1.2.3" -""") - - # Create scripts directory and copy the script - scripts_dir = Path(tmpdir) / "scripts" - scripts_dir.mkdir() - script_path = scripts_dir / "validate_version.py" - original_script = Path("scripts/validate_version.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/heads/main" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - assert result.returncode == 1 - assert "Not a tag push" in result.stdout - - -def test_validate_version_no_version() -> None: - """Test the script handles missing version gracefully.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create mock pyproject.toml without version - pyproject_path = Path(tmpdir) / "pyproject.toml" - pyproject_path.write_text(""" -[project] -name = "test-package" -description = "Test package" -""") - - # Create scripts directory and copy the script - scripts_dir = Path(tmpdir) / "scripts" - scripts_dir.mkdir() - script_path = scripts_dir / "validate_version.py" - original_script = Path("scripts/validate_version.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v1.0.0" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - assert result.returncode == 1 - # The error message will vary based on tomllib vs line parsing - - -def test_detect_change_type_patch() -> None: - """Test detecting patch version change.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Initialize git repo with tags - subprocess.run(["git", "init"], cwd=tmpdir, check=True, capture_output=True) - subprocess.run( - ["git", "config", "user.email", "test@example.com"], cwd=tmpdir, check=True - ) - subprocess.run( - ["git", "config", "user.name", "Test User"], cwd=tmpdir, check=True - ) - - # Create a dummy file and make commits with tags - dummy_file = Path(tmpdir) / "dummy.txt" - dummy_file.write_text("v1.0.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.0"], cwd=tmpdir, check=True) - - dummy_file.write_text("v1.0.1") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.1"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.1"], cwd=tmpdir, check=True) - - # Copy the script - script_path = Path(tmpdir) / "detect_change_type.py" - original_script = Path("scripts/detect_change_type.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v1.0.1" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - - assert result.returncode == 0 - assert "Current tag: v1.0.1" in result.stdout - assert "Previous tag: v1.0.0" in result.stdout - assert "Change type: patch" in result.stdout - - -def test_detect_change_type_minor() -> None: - """Test detecting minor version change.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Initialize git repo with tags - subprocess.run(["git", "init"], cwd=tmpdir, check=True, capture_output=True) - subprocess.run( - ["git", "config", "user.email", "test@example.com"], cwd=tmpdir, check=True - ) - subprocess.run( - ["git", "config", "user.name", "Test User"], cwd=tmpdir, check=True - ) - - # Create tags - dummy_file = Path(tmpdir) / "dummy.txt" - dummy_file.write_text("v1.0.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.0"], cwd=tmpdir, check=True) - - dummy_file.write_text("v1.1.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.1.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.1.0"], cwd=tmpdir, check=True) - - # Copy the script - script_path = Path(tmpdir) / "detect_change_type.py" - original_script = Path("scripts/detect_change_type.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v1.1.0" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - - assert result.returncode == 0 - assert "Change type: minor" in result.stdout - - -def test_detect_change_type_major() -> None: - """Test detecting major version change.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Initialize git repo with tags - subprocess.run(["git", "init"], cwd=tmpdir, check=True, capture_output=True) - subprocess.run( - ["git", "config", "user.email", "test@example.com"], cwd=tmpdir, check=True - ) - subprocess.run( - ["git", "config", "user.name", "Test User"], cwd=tmpdir, check=True - ) - - # Create tags - dummy_file = Path(tmpdir) / "dummy.txt" - dummy_file.write_text("v1.0.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.0"], cwd=tmpdir, check=True) - - dummy_file.write_text("v2.0.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v2.0.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v2.0.0"], cwd=tmpdir, check=True) - - # Copy the script - script_path = Path(tmpdir) / "detect_change_type.py" - original_script = Path("scripts/detect_change_type.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v2.0.0" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - - assert result.returncode == 0 - assert "Change type: major" in result.stdout - - -def test_detect_change_type_not_a_tag() -> None: - """Test the script handles non-tag refs gracefully.""" - with tempfile.TemporaryDirectory() as tmpdir: - script_path = Path(tmpdir) / "detect_change_type.py" - original_script = Path("scripts/detect_change_type.py") - script_path.write_text(original_script.read_text()) - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/heads/main" - - result = subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - - assert result.returncode == 0 - assert "Not a tag push" in result.stdout - - -def test_detect_change_type_github_output() -> None: - """Test that the script writes to GITHUB_OUTPUT when set.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Initialize git repo with tags - subprocess.run(["git", "init"], cwd=tmpdir, check=True, capture_output=True) - subprocess.run( - ["git", "config", "user.email", "test@example.com"], cwd=tmpdir, check=True - ) - subprocess.run( - ["git", "config", "user.name", "Test User"], cwd=tmpdir, check=True - ) - - dummy_file = Path(tmpdir) / "dummy.txt" - dummy_file.write_text("v1.0.0") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.0"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.0"], cwd=tmpdir, check=True) - - dummy_file.write_text("v1.0.1") - subprocess.run(["git", "add", "."], cwd=tmpdir, check=True) - subprocess.run( - ["git", "commit", "-m", "v1.0.1"], - cwd=tmpdir, - check=True, - capture_output=True, - ) - subprocess.run(["git", "tag", "v1.0.1"], cwd=tmpdir, check=True) - - # Copy the script - script_path = Path(tmpdir) / "detect_change_type.py" - original_script = Path("scripts/detect_change_type.py") - script_path.write_text(original_script.read_text()) - - # Create output file - output_file = Path(tmpdir) / "github_output.txt" - - env = os.environ.copy() - env["GITHUB_REF"] = "refs/tags/v1.0.1" - env["GITHUB_OUTPUT"] = str(output_file) - - subprocess.run( - [sys.executable, str(script_path)], - capture_output=True, - text=True, - env=env, - cwd=tmpdir, - ) - - # Check output file - output_content = output_file.read_text() - assert "change_type=patch" in output_content - - -def test_update_index_html_success() -> None: - """Test updating index.html with new URLs.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create a mock index.html - index_path = Path(tmpdir) / "index.html" - index_path.write_text(""" - - - - - workflow-debugger - - - - -
- - -""") - - # Copy the script - script_path = Path(tmpdir) / "update_index_html.py" - original_script = Path("scripts/update_index_html.py") - script_path.write_text(original_script.read_text()) - - result = subprocess.run( - [ - sys.executable, - str(script_path), - "--js-url", - "https://example.com/new-app.js", - "--css-url", - "https://example.com/new-app.css", - "--index-path", - str(index_path), - ], - capture_output=True, - text=True, - cwd=tmpdir, - ) - - assert result.returncode == 0 - assert "Updated index.html" in result.stdout - - # Check that the file was updated - updated_content = index_path.read_text() - assert "https://example.com/new-app.js" in updated_content - assert "https://example.com/new-app.css" in updated_content - # Old URLs should not be present - assert "@llamaindex/workflow-debugger@latest" not in updated_content - - -def test_update_index_html_missing_file() -> None: - """Test that script handles missing index.html gracefully.""" - with tempfile.TemporaryDirectory() as tmpdir: - script_path = Path(tmpdir) / "update_index_html.py" - original_script = Path("scripts/update_index_html.py") - script_path.write_text(original_script.read_text()) - - result = subprocess.run( - [ - sys.executable, - str(script_path), - "--js-url", - "https://example.com/app.js", - "--css-url", - "https://example.com/app.css", - "--index-path", - str(Path(tmpdir) / "nonexistent.html"), - ], - capture_output=True, - text=True, - cwd=tmpdir, - ) - - assert result.returncode == 1 - assert "Error" in result.stderr - - -def test_update_index_html_invalid_html() -> None: - """Test that script handles invalid HTML structure gracefully.""" - with tempfile.TemporaryDirectory() as tmpdir: - # Create invalid HTML without required tags - index_path = Path(tmpdir) / "index.html" - index_path.write_text("Invalid") - - script_path = Path(tmpdir) / "update_index_html.py" - original_script = Path("scripts/update_index_html.py") - script_path.write_text(original_script.read_text()) - - result = subprocess.run( - [ - sys.executable, - str(script_path), - "--js-url", - "https://example.com/app.js", - "--css-url", - "https://example.com/app.css", - "--index-path", - str(index_path), - ], - capture_output=True, - text=True, - cwd=tmpdir, - ) - - assert result.returncode == 1 - assert "Error" in result.stderr diff --git a/uv.lock b/uv.lock index 8549ae4c..6804a9ac 100644 --- a/uv.lock +++ b/uv.lock @@ -9,8 +9,10 @@ resolution-markers = [ [manifest] members = [ + "llama-agents-workspace", "llama-index-utils-workflow", "llama-index-workflows", + "workflows-dev", ] [[package]] @@ -1469,6 +1471,17 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" }, ] +[[package]] +name = "llama-agents-workspace" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "tomli" }, +] + +[package.metadata] +requires-dist = [{ name = "tomli", specifier = ">=2.3.0" }] + [[package]] name = "llama-index-core" version = "0.14.6" @@ -1547,7 +1560,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "llama-index-core", specifier = ">=0.14,<0.15.0" }, - { name = "llama-index-workflows", editable = "." }, + { name = "llama-index-workflows", editable = "packages/llama-index-workflows" }, { name = "pyvis", specifier = ">=0.3.2" }, ] @@ -1561,7 +1574,7 @@ dev = [ [[package]] name = "llama-index-workflows" version = "2.11.1" -source = { editable = "." } +source = { editable = "packages/llama-index-workflows" } dependencies = [ { name = "eval-type-backport", marker = "python_full_version < '3.10'" }, { name = "llama-index-instrumentation" }, @@ -1590,7 +1603,9 @@ dev = [ { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "pyyaml" }, + { name = "starlette" }, { name = "structlog" }, + { name = "uvicorn" }, ] [package.metadata] @@ -1617,7 +1632,9 @@ dev = [ { name = "pytest-timeout", specifier = ">=2.4.0" }, { name = "pytest-xdist", specifier = ">=3.8.0" }, { name = "pyyaml", specifier = ">=6.0.2" }, + { name = "starlette", specifier = ">=0.39.0" }, { name = "structlog", specifier = ">=25.5.0" }, + { name = "uvicorn", specifier = ">=0.32.0" }, ] [[package]] @@ -3632,6 +3649,40 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, ] +[[package]] +name = "workflows-dev" +version = "0.1.0" +source = { editable = "packages/workflows-dev" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "tomlkit" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-cov" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.1.7" }, + { name = "httpx", specifier = ">=0.28.1" }, + { name = "packaging", specifier = ">=24.1" }, + { name = "pydantic", specifier = ">=2.12.3" }, + { name = "tomlkit", specifier = ">=0.13.3" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pytest", specifier = ">=8.4.2" }, + { name = "pytest-cov", specifier = ">=7.0.0" }, +] + [[package]] name = "wrapt" version = "2.0.0"