diff --git a/.github/workflows/check_docs.yaml b/.github/workflows/check_docs.yaml index 76437c23..b3b1a90a 100644 --- a/.github/workflows/check_docs.yaml +++ b/.github/workflows/check_docs.yaml @@ -1,11 +1,12 @@ name: Check documentation status -on: push +on: + workflow_call: jobs: check_docs: name: Check whether documentation is up-to-date - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Checkout repository diff --git a/.github/workflows/integration_tests.yaml b/.github/workflows/integration_tests.yaml new file mode 100644 index 00000000..39282be2 --- /dev/null +++ b/.github/workflows/integration_tests.yaml @@ -0,0 +1,37 @@ +name: Integration tests + +on: + workflow_call: + secrets: + APIFY_TEST_USER_API_TOKEN: + description: API token of the Python SDK testing user on Apify + required: true + +concurrency: # This is to make sure that only one run of this workflow is running at the same time, to not overshoot the test user limits + group: integration_tests + +jobs: + integration_tests: + name: Run integration tests + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + max-parallel: 1 # no concurrency on this level, to not overshoot the test user limits + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: make install-dev + + - name: Run integration tests + run: make INTEGRATION_TESTS_CONCURRENCY=8 integration-tests + env: + APIFY_TEST_USER_API_TOKEN: ${{ secrets.APIFY_TEST_USER_API_TOKEN }} diff --git a/.github/workflows/lint_and_test.yaml b/.github/workflows/lint_and_test.yaml index 19fa01ad..704c50e4 100644 --- a/.github/workflows/lint_and_test.yaml +++ b/.github/workflows/lint_and_test.yaml @@ -1,11 +1,12 @@ name: Lint and test -on: push +on: + workflow_call: jobs: lint_and_test: name: Lint, check types and run unit tests - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11"] diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 284646d7..6107e9e2 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,6 +5,8 @@ on: push: branches: - master + tags-ignore: + - '**' # A release via GitHub releases will publish a stable version release: types: [published] @@ -23,56 +25,22 @@ on: jobs: lint_and_test: - name: Lint and run unit tests - runs-on: ubuntu-20.04 - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + name: Run lint and unit tests + uses: ./.github/workflows/lint_and_test.yaml - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: Install dependencies - run: make install-dev - - - name: Lint - run: make lint - - - name: Type check - run: make type-check - - - name: Unit tests - run: make unit-tests + integration_tests: + name: Run integration tests + uses: ./.github/workflows/integration_tests.yaml + secrets: inherit check_docs: name: Check whether the documentation is up to date - runs-on: ubuntu-20.04 - - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.8 - - - name: Install dependencies - run: make install-dev - - - name: Check whether docs are built from the latest code - run: make check-docs + uses: ./.github/workflows/check_docs.yaml deploy: name: Publish to PyPI - needs: [lint_and_test, check_docs] - runs-on: ubuntu-20.04 + needs: [lint_and_test, integration_tests, check_docs] + runs-on: ubuntu-latest steps: - name: Checkout repository diff --git a/.github/workflows/run_checks.yaml b/.github/workflows/run_checks.yaml new file mode 100644 index 00000000..a1ad38c3 --- /dev/null +++ b/.github/workflows/run_checks.yaml @@ -0,0 +1,19 @@ +name: Code quality checks + +on: + pull_request: + +jobs: + lint_and_test: + name: Run lint and unit tests + uses: ./.github/workflows/lint_and_test.yaml + + check_docs: + name: Check whether the documentation is up to date + uses: ./.github/workflows/check_docs.yaml + + integration_tests: + name: Run integration tests + needs: [lint_and_test, check_docs] + uses: ./.github/workflows/integration_tests.yaml + secrets: inherit diff --git a/Makefile b/Makefile index ae870940..198dc9ae 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,8 @@ .PHONY: clean install-dev lint test type-check check-code format docs check-docs +# This is default for local testing, but GitHub workflows override it to a higher value in CI +INTEGRATION_TESTS_CONCURRENCY = 1 + clean: rm -rf build dist .mypy_cache .pytest_cache src/*.egg-info __pycache__ @@ -16,7 +19,7 @@ unit-tests: python3 -m pytest -n auto -ra tests/unit integration-tests: - python3 -m pytest -ra tests/integration + python3 -m pytest -n $(INTEGRATION_TESTS_CONCURRENCY) -ra tests/integration type-check: python3 -m mypy diff --git a/setup.py b/setup.py index 692df04b..fc425344 100644 --- a/setup.py +++ b/setup.py @@ -63,6 +63,7 @@ extras_require={ 'dev': [ 'autopep8 ~= 2.0.0', + 'filelock ~= 3.9.0', 'flake8 ~= 5.0.4', 'flake8-bugbear ~= 22.10.27', 'flake8-commas ~= 2.1.0', diff --git a/tests/integration/actor_source_base/Dockerfile b/tests/integration/actor_source_base/Dockerfile index d1672bd8..ac7c6243 100644 --- a/tests/integration/actor_source_base/Dockerfile +++ b/tests/integration/actor_source_base/Dockerfile @@ -1,6 +1,5 @@ -# TODO: make the Python version a parameter -# so we can run integration tests in GitHub with a matrix of Python versions -FROM apify/actor-python:3.9 +# The test fixture will put the right Python version here +FROM apify/actor-python:BASE_IMAGE_VERSION_PLACEHOLDER COPY . ./ diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 9a752df3..d7e5c936 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -2,11 +2,13 @@ import inspect import os import subprocess +import sys import textwrap from pathlib import Path from typing import AsyncIterator, Awaitable, Callable, Dict, List, Mapping, Optional, Protocol, Union import pytest +from filelock import FileLock from apify_client import ApifyClientAsync from apify_client.clients.resource_clients import ActorClientAsync @@ -16,6 +18,7 @@ TOKEN_ENV_VAR = 'APIFY_TEST_USER_API_TOKEN' API_URL_ENV_VAR = 'APIFY_INTEGRATION_TESTS_API_URL' +SDK_ROOT_PATH = Path(__file__).parent.parent.parent.resolve() # This fixture can't be session-scoped, @@ -34,8 +37,40 @@ def apify_client_async() -> ApifyClientAsync: return ApifyClientAsync(api_token, api_url=api_url) +# Build the package wheel if it hasn't been built yet, and return the path to the wheel @pytest.fixture(scope='session') -def actor_base_source_files() -> Dict[str, Union[str, bytes]]: +def sdk_wheel_path(tmp_path_factory: pytest.TempPathFactory, testrun_uid: str) -> Path: + # Make sure the wheel is not being built concurrently across all the pytest-xdist runners, + # through locking the building process with a temp file + with FileLock(tmp_path_factory.getbasetemp().parent / 'sdk_wheel_build.lock'): + # Make sure the wheel is built exactly once across across all the pytest-xdist runners, + # through an indicator file saying that the wheel was already built + was_wheel_built_this_test_run_file = tmp_path_factory.getbasetemp() / f'wheel_was_built_in_run_{testrun_uid}' + if not was_wheel_built_this_test_run_file.exists(): + subprocess.run('python setup.py bdist_wheel', cwd=SDK_ROOT_PATH, shell=True, check=True, capture_output=True) + was_wheel_built_this_test_run_file.touch() + + # Read the current package version, necessary for getting the right wheel filename + version_file = (SDK_ROOT_PATH / 'src/apify/_version.py').read_text(encoding='utf-8') + sdk_version = None + for line in version_file.splitlines(): + if line.startswith('__version__'): + delim = '"' if '"' in line else "'" + sdk_version = line.split(delim)[1] + break + else: + raise RuntimeError('Unable to find version string.') + + wheel_path = SDK_ROOT_PATH / 'dist' / f'apify-{sdk_version}-py3-none-any.whl' + + # Just to be sure + assert wheel_path.exists() + + return wheel_path + + +@pytest.fixture(scope='session') +def actor_base_source_files(sdk_wheel_path: Path) -> Dict[str, Union[str, bytes]]: """Create a dictionary of the base source files for a testing actor. It takes the files from `tests/integration/actor_source_base`, @@ -57,24 +92,14 @@ def actor_base_source_files() -> Dict[str, Union[str, bytes]]: except ValueError: source_files[relative_path] = path.read_bytes() - # Then build the SDK and the wheel to the source files - subprocess.run('python setup.py bdist_wheel', cwd=sdk_root_path, shell=True, check=True, capture_output=True) - - version_file = (sdk_root_path / 'src/apify/_version.py').read_text(encoding='utf-8') - sdk_version = None - for line in version_file.splitlines(): - if line.startswith('__version__'): - delim = '"' if '"' in line else "'" - sdk_version = line.split(delim)[1] - break - else: - raise RuntimeError('Unable to find version string.') + sdk_wheel_file_name = sdk_wheel_path.name + source_files[sdk_wheel_file_name] = sdk_wheel_path.read_bytes() - wheel_file_name = f'apify-{sdk_version}-py3-none-any.whl' - wheel_path = sdk_root_path / 'dist' / wheel_file_name + source_files['requirements.txt'] = str(source_files['requirements.txt']).replace('APIFY_SDK_WHEEL_PLACEHOLDER', f'./{sdk_wheel_file_name}') - source_files[wheel_file_name] = wheel_path.read_bytes() - source_files['requirements.txt'] = str(source_files['requirements.txt']).replace('APIFY_SDK_WHEEL_PLACEHOLDER', f'./{wheel_file_name}') + current_major_minor_python_version = '.'.join([str(x) for x in sys.version_info[:2]]) + integration_tests_python_version = os.getenv('INTEGRATION_TESTS_PYTHON_VERSION') or current_major_minor_python_version + source_files['Dockerfile'] = str(source_files['Dockerfile']).replace('BASE_IMAGE_VERSION_PLACEHOLDER', integration_tests_python_version) return source_files