Skip to content

Add skip_noisy_assignment to dataset.cluster #2382

Add skip_noisy_assignment to dataset.cluster

Add skip_noisy_assignment to dataset.cluster #2382

Workflow file for this run

name: Backend
on:
pull_request:
types: [opened, reopened, synchronize]
paths:
- 'lilac/**'
- '**/*.py'
- 'poetry.lock'
- 'poetry.toml'
- 'pyproject.toml'
- 'mypy.ini'
- 'pytest.ini'
- '.python-version'
- '.env'
- '.coveragerc'
- '.github/workflows/python.yml'
- 'scripts/**'
jobs:
build:
name: 'Python lint and test'
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.9', '3.11']
timeout-minutes: 15
steps:
- uses: actions/checkout@v3
- name: Install poetry
run: pipx install poetry
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: 'poetry'
- name: Install dependencies
# Re-install pytorch for cpu to avoid errors that cuda was not found
run: |
./scripts/setup_py.sh
poetry run pip3 install --force-reinstall --pre torch==2.* --index-url https://download.pytorch.org/whl/nightly/cpu
poetry run pip3 install typing_extensions==4.9.0 --force-reinstall
- name: Test CLI
run: |
./integration_tests/run_integration_tests.sh
# We run testing before linting since testing, surprisingly, is much faster.
- name: Test python
run: |
poetry run ./scripts/test_py.sh
- name: Lint python
run: |
poetry run ./scripts/lint_py.sh