From d2aa0af7f3e36385831868363fefc710de49b323 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Arturo=20Filast=C3=B2?= Date: Tue, 27 Feb 2024 09:35:48 +0100 Subject: [PATCH] Address feedback from spec review: https://github.com/ooni/spec/pull/292 * Add support for expiration date * Compute is_expired from expiration date at eval time * Add support for color * Drop creator_account_id from return value * Drop v string * Small bugfixing * Add alembic db migration * Fix tests and some bugs related to expiration time * Add more tests for expiration * Add more tests for OONI Run v2 * Reach 100% code coverage * Bump minor version number * Interpolate the OONI_PG_PASSWORD * Convert OONI Run link ID column to string * Handle the _intl fields being set to None * Bump API version to 0.4.1 * Add tests for filtering by only_latest * Add click to dev deps * Fix types of oonirun_link_id * Add a smoketest for oonirun v2 * Add a smoketest stage before pushing the docker tags * Rename test_new_api to test_legacy_ooniapi * Move mypy tests into test_legacy_ooniapi * Run alembic migration before starting pg host * bump dataapi version --- .github/workflows/build_dataapi.yml | 77 ++- .github/workflows/mypy.yml | 30 - .github/workflows/test_dataapi.yml | 1 + ...est_new_api.yml => test_legacy_ooniapi.py} | 25 +- api/fastapi/oonidataapi/alembic.ini | 2 +- api/fastapi/oonidataapi/alembic/Readme.md | 2 +- api/fastapi/oonidataapi/alembic/env.py | 15 +- ...841cb9549_make_oonirun_link_id_a_string.py | 37 + ...add_expiration_date_color_columns_drop_.py | 32 + api/fastapi/oonidataapi/models.py | 19 +- api/fastapi/oonidataapi/routers/oonirun.py | 165 +++-- api/fastapi/oonidataapi/tests/conftest.py | 10 +- .../oonidataapi/tests/run_smoketest.py | 51 ++ api/fastapi/oonidataapi/tests/test_oonirun.py | 635 ++++++++++++------ api/fastapi/poetry.lock | 2 +- api/fastapi/pyproject.toml | 3 +- 16 files changed, 768 insertions(+), 338 deletions(-) delete mode 100644 .github/workflows/mypy.yml rename .github/workflows/{test_new_api.yml => test_legacy_ooniapi.py} (70%) create mode 100644 api/fastapi/oonidataapi/alembic/versions/7d5841cb9549_make_oonirun_link_id_a_string.py create mode 100644 api/fastapi/oonidataapi/alembic/versions/836b3451a168_add_expiration_date_color_columns_drop_.py create mode 100644 api/fastapi/oonidataapi/tests/run_smoketest.py diff --git a/.github/workflows/build_dataapi.yml b/.github/workflows/build_dataapi.yml index d34dee95..848d8032 100644 --- a/.github/workflows/build_dataapi.yml +++ b/.github/workflows/build_dataapi.yml @@ -14,8 +14,28 @@ env: IMAGE_NAME: ooni/dataapi jobs: + test: + uses: ./.github/workflows/test_dataapi.yml + build_and_push: + name: Build and push + needs: [test] runs-on: ubuntu-latest + services: + postgres: + image: postgres + env: + POSTGRES_USER: oonipg + POSTGRES_PASSWORD: oonipg + POSTGRES_DB: oonipg + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: - name: Checkout Repository uses: actions/checkout@v2 @@ -37,6 +57,7 @@ jobs: echo "version_number=$VERSION_NUMBER" >> "$GITHUB_OUTPUT" - name: Build and Push Docker Image + id: dockerbuild env: DOCKERFILE_PATH: ${{ env.oonidataapi_dir }} run: | @@ -45,6 +66,11 @@ jobs: TAG_BUILD_LABEL=$IMAGE_NAME:${{ steps.version.outputs.build_label }} TAG_VERSION=$IMAGE_NAME:v${{ steps.version.outputs.version_number }} + echo "tag_latest=$TAG_LATEST" >> $GITHUB_OUTPUT + echo "tag_environment=$TAG_ENVIRONMENT" >> $GITHUB_OUTPUT + echo "tag_build_label=$TAG_BUILD_LABEL" >> $GITHUB_OUTPUT + echo "tag_version=$TAG_VERSION" >> $GITHUB_OUTPUT + # Build Docker image with multiple tags docker build --build-arg BUILD_LABEL=${{ steps.version.outputs.build_label }} \ -t $TAG_BUILD_LABEL \ @@ -52,12 +78,55 @@ jobs: -t $TAG_LATEST \ -t $TAG_VERSION \ $DOCKERFILE_PATH + # Setup python + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: 3.11 + - name: Install poetry + run: | + curl -fsS https://install.python-poetry.org | python - --preview -y + + - name: Add poetry to PATH + run: echo "$HOME/.local/bin" >> $GITHUB_PATH + + - name: Set up poetry cache + uses: actions/cache@v3 + with: + path: "$HOME/.cache/pypoetry/virtualenvs" + key: venv-${{ runner.os }}-${{ hashFiles('**/api/fastapi/poetry.lock') }} + + - name: Install dependencies + run: poetry install + working-directory: ./api/fastapi/ + + # Configure database and docker + - name: Run alembic migrations + env: + OONI_PG_PASSWORD: oonipg + OONI_PG_HOST: localhost + run: poetry run alembic upgrade head + working-directory: ./api/fastapi/oonidataapi/ + + - name: Start Docker container with PostgreSQL + run: | + docker run -d --name oonidataapi -p 8000:80 \ + -e POSTGRESQL_URL="postgresql://oonipg:oonipg@localhost/oonipg" \ + ${{ steps.dockerbuild.outputs.tag_version }} + + # Run smoke test + #- name: Run smoketest against the built docker image + # run: poetry run python oonidataapi/tests/run_smoketest.py --backend-base-url=http://localhost:8000/ + # working-directory: ./api/fastapi/ + + - name: Push docker tags + run: | # Push all tags - docker push $TAG_BUILD_LABEL - docker push $TAG_ENVIRONMENT - docker push $TAG_LATEST - docker push $TAG_VERSION + docker push ${{ steps.dockerbuild.outputs.tag_latest }} + docker push ${{ steps.dockerbuild.outputs.tag_environment }} + docker push ${{ steps.dockerbuild.outputs.tag_build_label }} + docker push ${{ steps.dockerbuild.outputs.tag_version }} #- name: Checkout ooni/devops # uses: actions/checkout@v2 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml deleted file mode 100644 index e851f5a9..00000000 --- a/.github/workflows/mypy.yml +++ /dev/null @@ -1,30 +0,0 @@ -name: Run mypy on API -on: - pull_request: - paths: - - 'api/**' - -jobs: - test: - runs-on: ubuntu-latest - container: debian:11 - - steps: - - name: Check out repository code - uses: actions/checkout@v2 - - - name: Setup APT - run: | - apt-get update - apt-get install --no-install-recommends -y ca-certificates gnupg - echo "deb http://deb-ci.ooni.org unstable main" >> /etc/apt/sources.list - apt-key adv --verbose --keyserver hkp://keyserver.ubuntu.com --recv-keys "B5A08F01796E7F521861B449372D1FF271F2DD50" - - - name: Install dependencies - run: | - apt-get update - apt-get install --no-install-recommends -qy mypy - - - name: Run tests - # see the mypy.ini file - run: cd api && mypy **/*.py diff --git a/.github/workflows/test_dataapi.yml b/.github/workflows/test_dataapi.yml index dc830fd3..ea95c5a7 100644 --- a/.github/workflows/test_dataapi.yml +++ b/.github/workflows/test_dataapi.yml @@ -6,6 +6,7 @@ on: pull_request: branches: - "*" + workflow_call: jobs: run_tests: runs-on: ubuntu-latest diff --git a/.github/workflows/test_new_api.yml b/.github/workflows/test_legacy_ooniapi.py similarity index 70% rename from .github/workflows/test_new_api.yml rename to .github/workflows/test_legacy_ooniapi.py index 69757324..a355a0b6 100644 --- a/.github/workflows/test_new_api.yml +++ b/.github/workflows/test_legacy_ooniapi.py @@ -1,4 +1,4 @@ -name: Test API +name: Test Legacy API on: pull_request: workflow_dispatch: @@ -9,6 +9,29 @@ default: false jobs: + mypy: + runs-on: ubuntu-latest + container: debian:11 + steps: + - name: Check out repository code + uses: actions/checkout@v2 + + - name: Setup APT + run: | + apt-get update + apt-get install --no-install-recommends -y ca-certificates gnupg + echo "deb http://deb-ci.ooni.org unstable main" >> /etc/apt/sources.list + apt-key adv --verbose --keyserver hkp://keyserver.ubuntu.com --recv-keys "B5A08F01796E7F521861B449372D1FF271F2DD50" + + - name: Install dependencies + run: | + apt-get update + apt-get install --no-install-recommends -qy mypy + + - name: Run tests + # see the mypy.ini file + run: cd api && mypy **/*.py + integration_test: runs-on: ubuntu-latest steps: diff --git a/api/fastapi/oonidataapi/alembic.ini b/api/fastapi/oonidataapi/alembic.ini index 2fc20c26..ce134c2f 100644 --- a/api/fastapi/oonidataapi/alembic.ini +++ b/api/fastapi/oonidataapi/alembic.ini @@ -60,7 +60,7 @@ version_path_separator = os # Use os.pathsep. Default configuration used for ne # are written from script.py.mako # output_encoding = utf-8 -sqlalchemy.url = postgresql://oonipg@postgres.tier0.prod.ooni.nu/oonipg +sqlalchemy.url = postgresql://oonipg:%(OONI_PG_PASSWORD)s@%(OONI_PG_HOST)s/oonipg [post_write_hooks] diff --git a/api/fastapi/oonidataapi/alembic/Readme.md b/api/fastapi/oonidataapi/alembic/Readme.md index 0ea06a45..ad30644d 100644 --- a/api/fastapi/oonidataapi/alembic/Readme.md +++ b/api/fastapi/oonidataapi/alembic/Readme.md @@ -11,7 +11,7 @@ poetry run alembic revision -m "name of the revision" 2. Edit the newly created python file and fill out the `upgrade()` and `downgrade()` function with the relevant code bits 3. You can now run the migration like so: ``` -poetry run alembic upgrade head +OONI_PG_PASSWORD=XXXX poetry run alembic upgrade head ``` diff --git a/api/fastapi/oonidataapi/alembic/env.py b/api/fastapi/oonidataapi/alembic/env.py index 279d1dcd..a99ece64 100644 --- a/api/fastapi/oonidataapi/alembic/env.py +++ b/api/fastapi/oonidataapi/alembic/env.py @@ -1,3 +1,5 @@ +import os + from logging.config import fileConfig from sqlalchemy import engine_from_config @@ -19,8 +21,17 @@ # from myapp import mymodel # target_metadata = mymodel.Base.metadata from oonidataapi import models + target_metadata = models.Base.metadata +section = config.config_ini_section +config.set_section_option( + section, "OONI_PG_PASSWORD", os.environ.get("OONI_PG_PASSWORD", "") +) +config.set_section_option( + section, "OONI_PG_HOST", os.environ.get("OONI_PG_HOST", "postgres.tier0.prod.ooni.nu") +) + # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") @@ -65,9 +76,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/api/fastapi/oonidataapi/alembic/versions/7d5841cb9549_make_oonirun_link_id_a_string.py b/api/fastapi/oonidataapi/alembic/versions/7d5841cb9549_make_oonirun_link_id_a_string.py new file mode 100644 index 00000000..f7dccf63 --- /dev/null +++ b/api/fastapi/oonidataapi/alembic/versions/7d5841cb9549_make_oonirun_link_id_a_string.py @@ -0,0 +1,37 @@ +"""make oonirun link id a string + +Revision ID: 7d5841cb9549 +Revises: 836b3451a168 +Create Date: 2024-02-28 15:41:53.811746 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "7d5841cb9549" +down_revision: Union[str, None] = "836b3451a168" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.execute( + """ + ALTER TABLE oonirun + ALTER COLUMN oonirun_link_id TYPE TEXT USING oonirun_link_id::TEXT + """ + ) + + +def downgrade() -> None: + op.execute( + """ + ALTER TABLE oonirun + ALTER COLUMN oonirun TYPE INTEGER USING oonirun::INTEGER + """ + ) diff --git a/api/fastapi/oonidataapi/alembic/versions/836b3451a168_add_expiration_date_color_columns_drop_.py b/api/fastapi/oonidataapi/alembic/versions/836b3451a168_add_expiration_date_color_columns_drop_.py new file mode 100644 index 00000000..2ac09b5c --- /dev/null +++ b/api/fastapi/oonidataapi/alembic/versions/836b3451a168_add_expiration_date_color_columns_drop_.py @@ -0,0 +1,32 @@ +"""Add expiration_date, color columns. Drop is_archived column. + +Revision ID: 836b3451a168 +Revises: f96cf47f2791 +Create Date: 2024-02-27 09:44:26.833238 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "836b3451a168" +down_revision: Union[str, None] = "f96cf47f2791" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.add_column( + "oonirun", sa.Column("expiration_date", sa.DateTime(), nullable=False) + ) + op.add_column("oonirun", sa.Column("color", sa.String(), nullable=True)) + op.drop_column("oonirun", "is_archived") + + +def downgrade() -> None: + op.drop_column("oonirun", "expiration_date") + op.drop_column("oonirun", "color") diff --git a/api/fastapi/oonidataapi/models.py b/api/fastapi/oonidataapi/models.py index f78cec97..b8ef1ddc 100644 --- a/api/fastapi/oonidataapi/models.py +++ b/api/fastapi/oonidataapi/models.py @@ -1,3 +1,4 @@ +from datetime import timezone from sqlalchemy import Boolean, Column, Integer, String, DateTime, JSON from .postgresql import Base @@ -6,12 +7,26 @@ class OONIRunLink(Base): __tablename__ = "oonirun" - oonirun_link_id = Column(Integer, primary_key=True) + oonirun_link_id = Column(String, primary_key=True) revision = Column(Integer, default=1, primary_key=True) date_updated = Column(DateTime) date_created = Column(DateTime) creator_account_id = Column(String) + expiration_date = Column(DateTime, nullable=False) + + # Timezones are kind of tricky. We assume everything is always in UTC, + # but python, rightfully complains, if that encoding is not specified in + # the object itself since more modern versions of python. + # To avoid making this a DB specific change, we don't introduce the + # TIMESTAMP column which would allow us to retrieve timezone native + # objects, but instead do casting to the timezone native equivalent in + # the code. + # See: https://stackoverflow.com/questions/414952/sqlalchemy-datetime-timezone + @property + def expiration_date_dt_native(self): + return self.expiration_date.replace(tzinfo=timezone.utc) + name = Column(String) name_intl = Column(JSON, nullable=True) short_description = Column(String) @@ -20,5 +35,5 @@ class OONIRunLink(Base): description_intl = Column(JSON, nullable=True) author = Column(String) icon = Column(String) + color = Column(String) nettests = Column(JSON) - is_archived = Column(Boolean, default=False) diff --git a/api/fastapi/oonidataapi/routers/oonirun.py b/api/fastapi/oonidataapi/routers/oonirun.py index eea80b51..ea213c1c 100644 --- a/api/fastapi/oonidataapi/routers/oonirun.py +++ b/api/fastapi/oonidataapi/routers/oonirun.py @@ -4,7 +4,7 @@ https://github.com/ooni/spec/blob/master/backends/bk-005-ooni-run-v2.md """ -from datetime import datetime +from datetime import datetime, timedelta, timezone from os import urandom from sys import byteorder from typing import Dict, Any, List, Optional @@ -12,7 +12,7 @@ import logging from fastapi import APIRouter, Depends, Query, HTTPException, Header -from pydantic import constr, Field, validator +from pydantic import computed_field, constr, Field, validator from pydantic import BaseModel as PydandicBaseModel from typing_extensions import Annotated @@ -45,7 +45,9 @@ class Config: class OONIRunLinkBase(BaseModel): - name: str = Field(default="", title="name of the ooni run link", min_length=2) + name: str = Field( + default="", title="name of the ooni run link", min_length=2, max_length=50 + ) short_description: str = Field( default="", title="short description of the ooni run link", @@ -58,7 +60,7 @@ class OONIRunLinkBase(BaseModel): ) author: str = Field( default="", - title="public author name of ooni run link", + title="public email address of the author name of the ooni run link", min_length=2, max_length=100, ) @@ -80,37 +82,52 @@ class OONIRunLinkBase(BaseModel): @validator("name_intl", "short_description_intl", "description_intl") def validate_intl(cls, v): + # None is also a valid type + if v is None: + return v for value in v.values(): if len(value) < 2: raise ValueError("must be at least 2 characters") return v - icon: Optional[str] = "" + icon: Optional[str] = Field( + default=None, + description="icon to use for the ooni run link", + ) + color: Optional[str] = Field( + default=None, + description="color to use for the ooni run link as a hex value prefixed with #", + pattern="^#(?:[0-9a-fA-F]{6})$", + ) + expiration_date: datetime = Field( + default_factory=lambda: datetime.now(timezone.utc) + timedelta(days=30 * 6), + description="future date after which the ooni run link will be considered expired and no longer editable or usable (defaults to 6 months from now)", + ) class OONIRunLink(OONIRunLinkBase): - is_archived: Optional[bool] = False oonirun_link_id: int date_created: datetime date_updated: datetime - creator_account_id: str revision: int is_mine: Optional[bool] = False - v: int = 1 + @computed_field + @property + def is_expired(self) -> bool: + # See docstring of models.OONIRunLink.expiration_date_dt_native + return self.expiration_date.replace(tzinfo=timezone.utc) < datetime.now( + timezone.utc + ) class Config: orm_mode = True -class OONIRunLinkCreate(OONIRunLinkBase): +class OONIRunLinkCreateEdit(OONIRunLinkBase): pass -class OONIRunLinkEdit(OONIRunLinkBase): - is_archived: Optional[bool] = False - - def generate_random_intuid() -> int: collector_id = 0 randint = int.from_bytes(urandom(4), byteorder) @@ -119,12 +136,12 @@ def generate_random_intuid() -> int: @router.post( "/v2/oonirun", - tags=["oonirunv2"], + tags=["oonirun"], dependencies=[Depends(role_required(["admin", "user"]))], response_model=OONIRunLink, ) def create_oonirun_link( - create_request: OONIRunLinkCreate, + create_request: OONIRunLinkCreateEdit, authorization: str = Header("authorization"), db=Depends(get_postgresql_session), ): @@ -133,7 +150,7 @@ def create_oonirun_link( account_id = get_account_id_or_raise(authorization) assert create_request - now = datetime.utcnow().replace(microsecond=0) + now = datetime.now(timezone.utc).replace(microsecond=0) oonirun_link = models.OONIRunLink( oonirun_link_id=generate_random_intuid(), @@ -147,7 +164,8 @@ def create_oonirun_link( author=create_request.author, nettests=create_request.nettests, icon=create_request.icon, - is_archived=False, + color=create_request.color, + expiration_date=create_request.expiration_date, date_created=now, date_updated=now, ) @@ -166,8 +184,8 @@ def create_oonirun_link( response_model=OONIRunLink, ) def edit_oonirun_link( - oonirun_link_id: int, - edit_request: OONIRunLinkEdit, + oonirun_link_id: str, + edit_request: OONIRunLinkCreateEdit, authorization: str = Header("authorization"), db=Depends(get_postgresql_session), ): @@ -175,7 +193,7 @@ def edit_oonirun_link( log.debug(f"edit oonirun {oonirun_link_id}") account_id = get_account_id_or_raise(authorization) - now = datetime.utcnow().replace(microsecond=0) + now = datetime.now(timezone.utc).replace(microsecond=0) q = db.query(models.OONIRunLink).filter( models.OONIRunLink.oonirun_link_id == oonirun_link_id @@ -186,6 +204,24 @@ def edit_oonirun_link( if not oonirun_link: raise HTTPException(status_code=404, detail="OONI Run link not found") + if oonirun_link.expiration_date_dt_native < now: + raise HTTPException( + status_code=403, + detail="OONI Run link has expired and cannot be edited", + ) + + if edit_request.expiration_date is not None: + q = db.query(models.OONIRunLink).filter( + models.OONIRunLink.oonirun_link_id == oonirun_link_id, + # Timezones in python are a mess... + models.OONIRunLink.expiration_date > now.replace(tzinfo=None), + ) + if get_client_role(authorization) != "admin": + q = q.filter(models.OONIRunLink.creator_account_id == account_id) + + q.update({"expiration_date": edit_request.expiration_date}) + db.commit() + current_nettests = oonirun_link.nettests if current_nettests != edit_request.nettests: new_oonirun_link = models.OONIRunLink( @@ -200,7 +236,8 @@ def edit_oonirun_link( author=edit_request.author, nettests=edit_request.nettests, icon=edit_request.icon, - is_archived=edit_request.is_archived, + color=edit_request.color, + expiration_date=edit_request.expiration_date, revision=int(oonirun_link.revision + 1), date_created=now, date_updated=now, @@ -218,18 +255,19 @@ def edit_oonirun_link( oonirun_link.author = edit_request.author oonirun_link.nettests = edit_request.nettests oonirun_link.icon = edit_request.icon - oonirun_link.is_archived = edit_request.is_archived + oonirun_link.color = edit_request.color + oonirun_link.expiration_date = edit_request.expiration_date oonirun_link.date_updated = now db.commit() return oonirun_link -@metrics.timer("fetch_oonirun_descriptor") +@metrics.timer("fetch_oonirun_link") @router.get( "/v2/oonirun/{oonirun_link_id}", tags=["oonirun"], response_model=OONIRunLink ) -def fetch_oonirun_descriptor( - oonirun_link_id: int, +def fetch_oonirun_link( + oonirun_link_id: str, revision: Annotated[ Optional[int], Query( @@ -258,17 +296,16 @@ def fetch_oonirun_descriptor( return oonirun_link -class OONIRunDescriptorList(BaseModel): - descriptors: List[OONIRunLink] - v: int = 1 +class OONIRunLinkList(BaseModel): + links: List[OONIRunLink] class Config: orm_mode = True -@router.get("/v2/oonirun/", tags=["oonirun"]) -def list_oonirun_descriptors( - ooni_run_link_id: Annotated[ +@router.get("/v2/oonirun_links", tags=["oonirun"]) +def list_oonirun_links( + oonirun_link_id: Annotated[ Optional[str], Query(description="OONI Run descriptors comma separated"), ] = None, @@ -280,53 +317,47 @@ def list_oonirun_descriptors( Optional[bool], Query(description="List only the my descriptors"), ] = None, - include_archived: Annotated[ + include_expired: Annotated[ Optional[bool], - Query(description="List also archived descriptors"), + Query(description="List also expired descriptors"), ] = None, authorization: str = Header("authorization"), db=Depends(get_postgresql_session), -) -> OONIRunDescriptorList: +) -> OONIRunLinkList: """List OONIRun descriptors""" log.debug("list oonirun") account_id = get_account_id_or_none(authorization) q = db.query(models.OONIRunLink) - try: - if only_latest: - subquery = ( - db.query( - models.OONIRunLink.oonirun_link_id, - sqlalchemy.func.max(models.OONIRunLink.revision).label("revision"), - ) - .group_by(models.OONIRunLink.oonirun_link_id) - .subquery("latest_link") - ) - q = q.filter( - sqlalchemy.tuple_( - models.OONIRunLink.oonirun_link_id, - models.OONIRunLink.revision, - ).in_(subquery) - ) - if not include_archived: - q = q.filter(models.OONIRunLink.is_archived == False) - if only_mine: - q = q.filter(models.OONIRunLink.creator_account_id == account_id) - - if ooni_run_link_id: - q = q.filter( - models.OONIRunLink.oonirun_link_id.in_(commasplit(ooni_run_link_id)) + if only_latest: + subquery = ( + db.query( + models.OONIRunLink.oonirun_link_id, + sqlalchemy.func.max(models.OONIRunLink.revision).label("revision"), ) + .group_by(models.OONIRunLink.oonirun_link_id) + .subquery("latest_link") + ) + q = q.filter( + sqlalchemy.tuple_( + models.OONIRunLink.oonirun_link_id, + models.OONIRunLink.revision, + ).in_(subquery) + ) + if not include_expired: + q = q.filter(models.OONIRunLink.expiration_date > datetime.now(timezone.utc)) + if only_mine: + q = q.filter(models.OONIRunLink.creator_account_id == account_id) - except Exception as e: - log.debug(f"list_oonirun_descriptors: invalid parameter. {e}") - raise HTTPException(status_code=400, detail="Incorrect parameter used") + if oonirun_link_id: + q = q.filter( + models.OONIRunLink.oonirun_link_id.in_(commasplit(oonirun_link_id)) + ) - descriptors = [] + links = [] for row in q.all(): oonirun_link = OONIRunLink( oonirun_link_id=row.oonirun_link_id, - creator_account_id=row.creator_account_id, name=row.name, name_intl=row.name_intl, short_description=row.short_description, @@ -336,12 +367,12 @@ def list_oonirun_descriptors( author=row.author, nettests=row.nettests, icon=row.icon, - is_archived=row.is_archived, + expiration_date=row.expiration_date, revision=row.revision, date_created=row.date_created, date_updated=row.date_updated, is_mine=account_id == row.creator_account_id, ) - descriptors.append(oonirun_link) - log.debug(f"Returning {len(descriptors)} descriptor[s]") - return OONIRunDescriptorList(v=1, descriptors=descriptors) + links.append(oonirun_link) + log.debug(f"Returning {len(links)} ooni run links") + return OONIRunLinkList(links=links) diff --git a/api/fastapi/oonidataapi/tests/conftest.py b/api/fastapi/oonidataapi/tests/conftest.py index 15f2d068..4f01cabb 100644 --- a/api/fastapi/oonidataapi/tests/conftest.py +++ b/api/fastapi/oonidataapi/tests/conftest.py @@ -15,7 +15,7 @@ from sqlalchemy.orm import sessionmaker -def setup_db(db_url): +def setup_db_alembic(db_url): from alembic import command from alembic.config import Config @@ -24,13 +24,17 @@ def setup_db(db_url): alembic_cfg = Config() alembic_cfg.set_main_option("script_location", str(migrations_path)) alembic_cfg.set_main_option("sqlalchemy.url", db_url) - print(migrations_path) - print(db_url) ret = command.upgrade(alembic_cfg, "head") print(ret) +def setup_db(db_url): + engine = create_engine(db_url, connect_args={"check_same_thread": False}) + metadata = models.OONIRunLink.metadata + metadata.create_all(engine) + + def override_pg(db_url): def f(): engine = create_engine(db_url, connect_args={"check_same_thread": False}) diff --git a/api/fastapi/oonidataapi/tests/run_smoketest.py b/api/fastapi/oonidataapi/tests/run_smoketest.py new file mode 100644 index 00000000..95e0bc40 --- /dev/null +++ b/api/fastapi/oonidataapi/tests/run_smoketest.py @@ -0,0 +1,51 @@ +import httpx +import time +import click +import random + + +def test_oonirun(client): + r = client.get("/api/v2/oonirun_links") + j = r.json() + assert r.status_code == 200, j + desc = j["links"] + assert isinstance(desc, list) + if len(desc) > 0: + for _ in range(5): + d = random.choice(desc) + client.get(f'/api/v2/oonirun/{d["oonirun_link_id"]}').raise_for_status() + + +def wait_for_backend(backend_base_url, timeout=10): + start_time = time.time() + + while True: + try: + with httpx.Client(base_url=backend_base_url) as client: + r = client.get("/version") + if r.status_code == 200: + print("Service ready") + break + except Exception as e: + print(f"Connection failed: {e}") + + if time.time() - start_time > timeout: + raise TimeoutError("Service did not become available in time") + + time.sleep(1) + +@click.command() +@click.option( + "--backend-base-url", + default="http://localhost:8000", + help="Base URL of the backend", +) +def smoketest(backend_base_url): + """Run a smoke test against a running backend""" + wait_for_backend(backend_base_url) + + with httpx.Client(base_url=backend_base_url) as client: + test_oonirun(client) + +if __name__ == "__main__": + smoketest() diff --git a/api/fastapi/oonidataapi/tests/test_oonirun.py b/api/fastapi/oonidataapi/tests/test_oonirun.py index b573397f..45a68e74 100644 --- a/api/fastapi/oonidataapi/tests/test_oonirun.py +++ b/api/fastapi/oonidataapi/tests/test_oonirun.py @@ -2,223 +2,258 @@ Integration test for OONIRn API """ +from copy import deepcopy +from datetime import datetime, timedelta, timezone +import time -def test_oonirun_create_and_fetch( - client, client_with_user_role, client_with_admin_role -): - say = print - say("Reject empty name") - z = { - "name": "", - "name_intl": { - "it": "", - }, - "description": "integ-test description in English", - "description_intl": { - "es": "integ-test descripción en español", - }, - "short_description": "integ-test short description in English", - "short_description_intl": { - "it": "integ-test descrizione breve in italiano", - }, - "icon": "myicon", - "author": "integ-test author", - "nettests": [ - { - "inputs": ["https://example.com/", "https://ooni.org/"], - "options": { - "HTTP3Enabled": True, - }, - "test_name": "web_connectivity", +SAMPLE_OONIRUN = { + "name": "", + "name_intl": {}, + "description": "integ-test description in English", + "description_intl": { + "es": "integ-test descripción en español", + }, + "short_description": "integ-test short description in English", + "short_description_intl": { + "it": "integ-test descrizione breve in italiano", + }, + "icon": "myicon", + "author": "integ-test author", + "nettests": [ + { + "inputs": ["https://example.com/", "https://ooni.org/"], + "options": { + "HTTP3Enabled": True, }, - {"test_name": "dnscheck"}, - ], - } - say("Empty name") + "test_name": "web_connectivity", + }, + {"test_name": "dnscheck"}, + ], +} + +EXPECTED_OONIRUN_LINK_PUBLIC_KEYS = [ + "oonirun_link_id", + "date_created", + "date_updated", + "revision", + "is_mine", + "is_expired", + "name", + "short_description", + "description", + "author", + "nettests", + "name_intl", + "short_description_intl", + "description_intl", + "icon", + "color", + "expiration_date", +] + + +def test_oonirun_validation(client, client_with_user_role, client_with_admin_role): + z = deepcopy(SAMPLE_OONIRUN) + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 422, "empty name should be rejected" + + z["name"] = "integ-test name in English" + z["name_intl"] = {"it": ""} r = client_with_user_role.post("/api/v2/oonirun", json=z) - assert r.status_code == 422, r.json() + assert r.status_code == 422, "empty name_intl should be rejected" + + z = deepcopy(SAMPLE_OONIRUN) + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 422, "empty name should be rejected" - say("Empty name_intl->it") z["name"] = "integ-test name in English" + z["name_intl"] = None r = client_with_user_role.post("/api/v2/oonirun", json=z) - assert r.status_code == 422, r.json() + assert r.status_code == 200, "name_intl can be None" + +def test_oonirun_not_found(client, client_with_user_role, client_with_admin_role): + z = deepcopy(SAMPLE_OONIRUN) ### Create descriptor as user + z["name"] = "integ-test name in English" z["name_intl"]["it"] = "integ-test nome in italiano" r = client_with_user_role.post("/api/v2/oonirun", json=z) - print(r.json()) assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert str(r.json()["oonirun_link_id"]).endswith("00") - ooni_run_link_id = int(r.json()["oonirun_link_id"]) - - say("fetch latest") - r = client_with_user_role.get(f"/api/v2/oonirun/{ooni_run_link_id}") - assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - exp_fetch_fields = [ - "nettests", - "date_created", - "date_updated", - "is_archived", - "is_mine", - "name", - "v", - ] - missing_keys = set(exp_fetch_fields) - set(r.json().keys()) - assert len(missing_keys) == 0 - exp = { - "name": "integ-test name in English", - "name_intl": { - "it": "integ-test nome in italiano", - }, - "description": "integ-test description in English", - "description_intl": { - "es": "integ-test descripción en español", - }, - "short_description": "integ-test short description in English", - "short_description_intl": { - "it": "integ-test descrizione breve in italiano", - }, - "icon": "myicon", - "is_archived": False, - "author": "integ-test author", - "nettests": [ - { - "inputs": ["https://example.com/", "https://ooni.org/"], - "options": { - "HTTP3Enabled": True, - }, - "test_name": "web_connectivity", - }, - {"test_name": "dnscheck"}, - ], - } j = r.json() - for k, v in exp.items(): - assert j[k] == v, f"{k} {j[k]}!= {v}" + assert str(j["oonirun_link_id"]).endswith("00") + oonirun_link_id = int(r.json()["oonirun_link_id"]) + + j["expiration_date"] = ( + datetime.now(timezone.utc) + timedelta(minutes=-1) + ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=j) + assert r.status_code == 200, r.json() + + not_existing_link_id = "1234676871672836187" + r = client_with_user_role.put(f"/api/v2/oonirun/{not_existing_link_id}", json=j) + assert r.status_code == 404, r.json() + + r = client.get(f"/api/v2/oonirun/{not_existing_link_id}") + assert r.status_code == 404, r.json() - creation_time = r.json()["date_created"] - revision = r.json()["revision"] - assert creation_time.endswith("Z") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=j) + assert r.status_code == 403, "expired link cannot be edited" - say = print - say("fetch by creation_time") r = client_with_user_role.get( - f"/api/v2/oonirun/{ooni_run_link_id}?revision={revision}" + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}" ) + j = r.json() + assert r.status_code == 200, r.json() + assert j["links"] == [] + + +def test_oonirun_full_workflow(client, client_with_user_role, client_with_admin_role): + z = deepcopy(SAMPLE_OONIRUN) + ### Create 2 descriptors as user + z["name"] = "integ-test name in English" + z["name_intl"]["it"] = "integ-test nome in italiano" + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 200, r.json() + assert str(r.json()["oonirun_link_id"]).endswith("00") + oonirun_link_id = int(r.json()["oonirun_link_id"]) + + z["name"] = "second descriptor in English" + z["name_intl"]["it"] = "second integ-test nome in italiano" + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 200, r.json() + assert str(r.json()["oonirun_link_id"]).endswith("00") + oonirun_link_id = int(r.json()["oonirun_link_id"]) + + r = client_with_user_role.get(f"/api/v2/oonirun/{oonirun_link_id}") + assert r.status_code == 200, r.json() + + j = r.json() + assert j["name"] == z["name"] + assert j["name_intl"] == z["name_intl"] + assert j["description"] == z["description"] + assert j["nettests"] == z["nettests"] + date_created = datetime.strptime( + j["date_created"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_created < datetime.now(timezone.utc) + assert date_created > datetime.now(timezone.utc) + timedelta(hours=-1) + + date_updated = datetime.strptime( + j["date_updated"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_updated < datetime.now(timezone.utc) + assert date_updated > datetime.now(timezone.utc) + timedelta(hours=-1) + + assert j["is_mine"] == True + assert j["revision"] == 1 + + ## Fetch by revision + r = client_with_user_role.get(f"/api/v2/oonirun/{oonirun_link_id}?revision=1") + assert r.status_code == 200, r.json() + + j = r.json() + assert j["name"] == z["name"] + assert j["name_intl"] == z["name_intl"] + assert j["author"] == z["author"] + assert j["description"] == z["description"] + assert j["nettests"] == z["nettests"] + + date_created = datetime.strptime( + j["date_created"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_created < datetime.now(timezone.utc) + assert date_created > datetime.now(timezone.utc) + timedelta(hours=-1) + + date_updated = datetime.strptime( + j["date_updated"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_updated < datetime.now(timezone.utc) + assert date_updated > datetime.now(timezone.utc) + timedelta(hours=-1) + + assert j["is_mine"] == True + assert j["revision"] == 1 + + r = client_with_user_role.get("/api/v2/oonirun_links") + assert r.status_code == 200, r.json() + + j = r.json() + assert len(j["links"]) > 0 + + found = False + for d in j["links"]: + if d["oonirun_link_id"] == oonirun_link_id: + found = True + assert sorted(d.keys()) == sorted(EXPECTED_OONIRUN_LINK_PUBLIC_KEYS) + assert found == True + + ## list all items as admin + r = client_with_admin_role.get("/api/v2/oonirun_links") assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - missing_keys = set(exp_fetch_fields) - set(r.json().keys()) - assert len(missing_keys) == 0 + j = r.json() - for k, v in exp.items(): - assert j[k] == v, f"{k} {j[k]}!= {v}" - assert creation_time == r.json()["date_created"] - assert revision == r.json()["revision"] - - say("list my items") - exp_list_fields = [ - "author", - "date_updated", - "icon", - "is_archived", - "is_mine", - "name", - "oonirun_link_id", - "short_description", - "date_created", - ] - r = client_with_user_role.get("/api/v2/oonirun/") - assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert sorted(r.json()) == ["descriptors", "v"] - assert len(r.json()["descriptors"]) > 0 - missing_keys = set(exp_list_fields) - set(r.json()["descriptors"][0].keys()) - assert len(missing_keys) == 0 - found = [ - d for d in r.json()["descriptors"] if d["oonirun_link_id"] == ooni_run_link_id - ] - assert len(found) == 1 - - say("list all items as admin") - r = client_with_admin_role.get("/api/v2/oonirun/") - assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert sorted(r.json()) == ["descriptors", "v"] - assert len(r.json()["descriptors"]) > 0 - missing_keys = set(exp_list_fields) - set(r.json()["descriptors"][0].keys()) - assert len(missing_keys) == 0 - found = [ - d for d in r.json()["descriptors"] if d["oonirun_link_id"] == ooni_run_link_id - ] - assert len(found) == 1 + assert len(j["links"]) > 0 + + found = False + for d in j["links"]: + if d["oonirun_link_id"] == oonirun_link_id: + found = True + assert sorted(d.keys()) == sorted(EXPECTED_OONIRUN_LINK_PUBLIC_KEYS) + assert found == True ## find the item created by client_with_user_role above # fixme # assert desc[0]["name_intl"] == "integ-test" - say("list all items as anonymous") - r = client.get("/api/v2/oonirun/") - assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert sorted(r.json()) == ["descriptors", "v"] - assert len(r.json()["descriptors"]) > 0 - missing_keys = set(exp_list_fields) - set(r.json()["descriptors"][0].keys()) - assert len(missing_keys) == 0 - say("find the item created by client_with_user_role above") - desc = [ - d for d in r.json()["descriptors"] if d["oonirun_link_id"] == ooni_run_link_id - ][0] - exp_desc = { - "author": "integ-test author", - "date_created": creation_time, - "icon": "myicon", - "oonirun_link_id": ooni_run_link_id, - "is_archived": False, - "is_mine": False, - "name": "integ-test name in English", - "short_description": "integ-test short description in English", - } - for k, v in exp_desc.items(): - assert desc[k] == v, f"{k} {j[k]}!= {v}" + ## list all items as anonymous + r = client.get("/api/v2/oonirun_links") + assert r.status_code == 200, r.json() + + j = r.json() + assert len(j["links"]) > 0 + + found = False + for d in j["links"]: + if d["oonirun_link_id"] == oonirun_link_id: + found = True + assert d["is_mine"] == False + assert d["is_expired"] == False + + assert sorted(d.keys()) == sorted(EXPECTED_OONIRUN_LINK_PUBLIC_KEYS) + assert found == True ### "update" the oonirun by creating a new version, changing the inputs z["nettests"][0]["inputs"].append("https://foo.net/") - exp["nettests"][0]["inputs"].append("https://foo.net/") - r = client_with_user_role.put(f"/api/v2/oonirun/{ooni_run_link_id}", json=z) + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=z) assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert r.json()["oonirun_link_id"] == ooni_run_link_id + assert r.json()["oonirun_link_id"] == oonirun_link_id - say("Fetch it back") - r = client_with_user_role.get(f"/api/v2/oonirun/{ooni_run_link_id}") + ## Fetch it back + r = client_with_user_role.get(f"/api/v2/oonirun/{oonirun_link_id}") assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - assert r.json()["is_mine"] is True, r.json() - assert r.json()["is_archived"] is False, r.json() - say("revision has changed") - print(r.json()) - assert revision < r.json()["revision"] - creation_time = r.json()["date_created"] + j = r.json() + assert j["is_mine"] is True, r.json() + assert j["is_expired"] is False, r.json() + assert j["revision"] > 1, r.json() - say("List descriptors as admin and find we have 2 versions now") - r = client_with_admin_role.get(f"/api/v2/oonirun/?ids={ooni_run_link_id}") + ## List descriptors as admin and find we have 2 versions now + r = client_with_admin_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}" + ) assert r.status_code == 200, r.json() - descs = r.json()["descriptors"] + descs = r.json()["links"] assert len(descs) == 2, r.json() - say("List descriptors using more params") + ## List descriptors using more params r = client_with_user_role.get( - f"/api/v2/oonirun/?ids={ooni_run_link_id}&only_mine=True" + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}&only_mine=True" ) assert r.status_code == 200, r.json() - descs = r.json()["descriptors"] + descs = r.json()["links"] assert len(descs) == 2, r.json() for d in descs: assert d["is_mine"] is True - assert d["is_archived"] is False + assert d["is_expired"] is False # XXX this is wrong. Admin can do everything. # TODO(art): add test for trying to edit from a non-admin account @@ -227,56 +262,208 @@ def test_oonirun_create_and_fetch( # assert r.status_code == 400, r.json() # assert r.json() == {"error": "OONIRun descriptor not found"} - say("# Update translations without changing descriptor_creation_time") + # Update translations without changing descriptor_creation_time + + # We need to pause 1 second for the update time to be different + time.sleep(1) z["description_intl"]["it"] = "integ-test *nuova* descrizione in italiano" - r = client_with_user_role.put(f"/api/v2/oonirun/{ooni_run_link_id}", json=z) + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=z) assert r.status_code == 200, r.json() - say("previous id and descriptor_creation_time, not changed") - assert r.json()["oonirun_link_id"] == ooni_run_link_id + + ## previous id and descriptor_creation_time, not changed + assert r.json()["oonirun_link_id"] == oonirun_link_id # assert creation_time == r.json()["descriptor_creation_time"] - say("Fetch latest and find descriptor_creation_time has not changed") - r = client_with_user_role.get(f"/api/v2/oonirun/{ooni_run_link_id}") + ## Fetch latest and find descriptor_creation_time has not changed + r = client_with_user_role.get(f"/api/v2/oonirun/{oonirun_link_id}") assert r.status_code == 200, r.json() - assert r.json()["v"] == 1, r.json() - missing_keys = set(exp_fetch_fields) - set(r.json().keys()) - assert len(missing_keys) == 0 - say("Only the translation_creation_time increased") - assert creation_time == r.json()["date_updated"] - exp["description_intl"]["it"] = "integ-test *nuova* descrizione in italiano" + j = r.json() - for k, v in exp.items(): - assert j[k] == v, f"{k} {j[k]}!= {v}" - assert r.json()["is_mine"] is True, r.json() - assert r.json()["is_archived"] is False, r.json() - - # TODO(art): this test needs to be more correct - # say("Archive it") - # r = client_with_user_role.put(f"/api/v2/oonirun/{ooni_run_link_id}") - # assert r.status_code == 200, r.json() - # assert r.json()["v"] == 1, r.json() - - # say("List descriptors") - # r = client_with_user_role.get( - # f"/api/v2/oonirun/?ids={ooni_run_link_id}&include_archived=True" - # ) - # assert r.status_code == 200, r.json() - # descs = r.json()["descriptors"] - # assert len(descs) == 2, r.json() - - # say("List descriptors") - # r = client_with_user_role.get(f"/api/v2/oonirun/?ids={ooni_run_link_id}") - # assert r.status_code == 200, r.json() - # descs = r.json()["descriptors"] - # assert len(descs) == 0, r.json() - - # say("Fetch latest and find that it's archived") - # r = client_with_user_role.get(f"/api/v2/oonirun/{ooni_run_link_id}") - # assert r.status_code == 200, r.json() - # assert r.json()["is_archived"] == True, r.json() - - -def test_fetch_not_found(client_with_user_role): - r = client_with_user_role.get("/api/_/ooni_run/fetch/999999999999999") - assert r.status_code == 404, r.json() - assert "not found" in r.json()["detail"].lower() + + assert sorted(j.keys()) == sorted(EXPECTED_OONIRUN_LINK_PUBLIC_KEYS) + + date_created = datetime.strptime( + j["date_created"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_created < datetime.now(timezone.utc) + assert date_created > datetime.now(timezone.utc) + timedelta(hours=-1) + + date_updated = datetime.strptime( + j["date_updated"], "%Y-%m-%dT%H:%M:%S.%fZ" + ).replace(tzinfo=timezone.utc) + assert date_updated < datetime.now(timezone.utc) + assert date_updated > datetime.now(timezone.utc) + timedelta(hours=-1) + + assert date_updated > date_created + + assert j["description_intl"]["it"] == "integ-test *nuova* descrizione in italiano" + assert j["is_mine"] == True + + # Archive it + edit_req = deepcopy(j) + edit_req["expiration_date"] = ( + datetime.now(timezone.utc) + timedelta(minutes=-1) + ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=edit_req) + j = r.json() + assert r.status_code == 200, r.json() + assert j["is_expired"] == True + + ## List descriptors after expiration filtering by ID + r = client_with_user_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}&include_expired=True" + ) + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 2, r.json() + + ## List descriptors after expiration NOT filtering by ID + r = client_with_user_role.get(f"/api/v2/oonirun_links?include_expired=True") + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 3, r.json() + + ## List descriptors filtered by ID + r = client_with_user_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}" + ) + assert r.status_code == 200, r.json() + descs = r.json()["links"] + assert len(descs) == 0, r.json() + + ## List descriptors unfiltered by ID + r = client_with_user_role.get(f"/api/v2/oonirun_links") + assert r.status_code == 200, r.json() + descs = r.json()["links"] + assert len(descs) == 1, r.json() + + ## Fetch latest and find that it's archived + r = client_with_user_role.get(f"/api/v2/oonirun/{oonirun_link_id}") + assert r.status_code == 200, r.json() + assert r.json()["is_expired"] == True, r.json() + + +def test_oonirun_expiration(client, client_with_user_role): + z = deepcopy(SAMPLE_OONIRUN) + ### Create descriptor as user + z["name"] = "integ-test name in English" + z["name_intl"]["it"] = "integ-test nome in italiano" + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 200, r.json() + assert str(r.json()["oonirun_link_id"]).endswith("00") + oonirun_link_id = int(r.json()["oonirun_link_id"]) + + ## Fetch anonymously and check it's not expired + r = client.get(f"/api/v2/oonirun/{oonirun_link_id}") + j = r.json() + assert r.status_code == 200, r.json() + assert j["is_expired"] == False, r.json() + + ## Create new revision + j["nettests"][0]["inputs"].append("https://foo.net/") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=j) + assert r.status_code == 200, r.json() + + ## Fetch anonymously and check it's got the new revision + r = client.get(f"/api/v2/oonirun/{oonirun_link_id}") + j = r.json() + assert j["revision"] == 2, "revision did not change" + + ## Update expiry time + j["expiration_date"] = ( + datetime.now(timezone.utc) + timedelta(minutes=-1) + ).strftime("%Y-%m-%dT%H:%M:%S.%fZ") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id}", json=j) + assert r.status_code == 200, r.json() + assert r.json()["is_expired"] == True, r.json() + + ## Fetch anonymously and check it's expired + r = client.get(f"/api/v2/oonirun/{oonirun_link_id}") + assert r.status_code == 200, r.json() + assert r.json()["is_expired"] == True, r.json() + + ## List descriptors after expiration + r = client_with_user_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}" + ) + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 0, r.json() + + ## List descriptors after expiration + r = client_with_user_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}&include_expired=True" + ) + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 2, r.json() + for d in descs: + assert d["is_expired"] == True, "is_expired should be True" + + r = client_with_user_role.get( + f"/api/v2/oonirun_links?oonirun_link_id={oonirun_link_id}&include_expired=True&only_latest=True" + ) + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 1, r.json() + for d in descs: + assert d["is_expired"] == True, "is_expired should be True" + + +def test_oonirun_revisions(client, client_with_user_role): + z = deepcopy(SAMPLE_OONIRUN) + ### Create descriptor as user + z["name"] = "first descriptor" + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 200, r.json() + j = r.json() + oonirun_link_id_one = int(j["oonirun_link_id"]) + + ## Create two new revisions + j["nettests"][0]["inputs"].append("https://foo.net/") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id_one}", json=j) + assert r.status_code == 200, r.json() + j = r.json() + j["nettests"][0]["inputs"].append("https://foo2.net/") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id_one}", json=j) + assert r.status_code == 200, r.json() + j = r.json() + + ### Create another descriptor as user + z["name"] = "second descriptor" + r = client_with_user_role.post("/api/v2/oonirun", json=z) + assert r.status_code == 200, r.json() + j = r.json() + oonirun_link_id_two = int(j["oonirun_link_id"]) + + ## Create new revision + j["nettests"][0]["inputs"].append("https://foo.net/") + r = client_with_user_role.put(f"/api/v2/oonirun/{oonirun_link_id_two}", json=j) + assert r.status_code == 200, r.json() + + ## Fetch anonymously and check it's got the new revision + r = client.get(f"/api/v2/oonirun/{oonirun_link_id_one}") + j = r.json() + assert j["revision"] == 3, "revision is 3" + + r = client_with_user_role.get(f"/api/v2/oonirun_links") + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 5, r.json() + + r = client_with_user_role.get(f"/api/v2/oonirun_links?only_latest=True") + j = r.json() + assert r.status_code == 200, r.json() + descs = j["links"] + assert len(descs) == 2, r.json() + for d in descs: + if d["oonirun_link_id"] == oonirun_link_id_one: + assert d["revision"] == 3, "revision is 3" + if d["oonirun_link_id"] == oonirun_link_id_two: + assert d["revision"] == 2, "revision is 2" diff --git a/api/fastapi/poetry.lock b/api/fastapi/poetry.lock index 533c1634..850c5f74 100644 --- a/api/fastapi/poetry.lock +++ b/api/fastapi/poetry.lock @@ -1267,4 +1267,4 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "bda5c79bd0d59559ec22d56465bf4118e4e2205e6b7a1030062979451b17f9c2" +content-hash = "83e123671a74e164919c325c59ddaeafb0f720216c713e851d343694264af300" diff --git a/api/fastapi/pyproject.toml b/api/fastapi/pyproject.toml index 7243b648..10bbf7b3 100644 --- a/api/fastapi/pyproject.toml +++ b/api/fastapi/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "oonidataapi" -version = "0.3.0.dev1" +version = "0.4.2.dev1" description = "" authors = ["OONI "] readme = "Readme.md" @@ -26,6 +26,7 @@ alembic = "^1.13.1" pytest = "^7.4.4" docker = "^7.0.0" pytest-cov = "^4.1.0" +click = "^8.1.7" [build-system] requires = ["poetry-core"]