Skip to content

Commit

Permalink
First migration for archived meetings (#901)
Browse files Browse the repository at this point in the history
Added tests for migations
  • Loading branch information
FinnStutzenstein committed Aug 17, 2021
1 parent 6218992 commit 9fe6aad
Show file tree
Hide file tree
Showing 28 changed files with 531 additions and 41 deletions.
1 change: 1 addition & 0 deletions .github/startup_test/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ services:
- DATASTORE_WRITER_PORT=9011
depends_on:
- writer
- reader
reader:
build:
context: "https://github.com/OpenSlides/openslides-datastore-service.git"
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Development and testing inside docker container or without docker (only unit and integration tests)

paths = openslides_backend/ tests/ cli/ migrations/
paths = openslides_backend/ tests/ cli/

all: black autoflake isort flake8 mypy

Expand Down
2 changes: 1 addition & 1 deletion dev/entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ export DATASTORE_DATABASE_PASSWORD=${DATASTORE_DATABASE_PASSWORD:-openslides}
./wait.sh $DATASTORE_WRITER_HOST $DATASTORE_WRITER_PORT

printf "\nMigrations:\n"
python migrations/migrate.py migrate
python migrations/migrate.py finalize
printf "\n"

exec "$@"
3 changes: 3 additions & 0 deletions migrations/.coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[report]
fail_under=100
include=migrations/*
2 changes: 1 addition & 1 deletion migrations/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
FROM python:3.8.5-slim-buster

RUN apt-get update && apt-get install --yes bash-completion vim postgresql-client redis-tools wait-for-it gcc libc-dev libpq-dev
RUN apt-get update && apt-get install --yes bash-completion vim postgresql-client redis-tools wait-for-it gcc libc-dev libpq-dev curl

WORKDIR /app
RUN mkdir /datastore-service
Expand Down
19 changes: 12 additions & 7 deletions migrations/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,21 @@ This is done within the dockersetup in this folder. Note that the datastore need
- You can exit from it with `exit` and shut down the docker setup with `make stop-dev`
- You can write migrations in the backend and also adjust the Datastore at the same time since both are mounted into the container.

Scripts for initial data. Only the current dataset is exported, so after the export, only one position exists in the datastore. Also note that importing clears the old content
The following scripts can be used to make snapshots and trying out new migrations

- export-data-only.sh [to:export.json]
- import-data-only.sh [from:export.json]
#### Scripts for setting initial data
Only the current dataset is exported, so after a (re-)import, only one position exists in the datastore. Also note that importing clears the old content

Scripts for the full backup (Does a DB dump)
- export-events.sh [to:export.sql]
- import-events.sh [from:export.sql]
- `export-data-only.sh` [to:export.json]
- `import-data-only.sh` [from:export.json]

#### Scripts for the full backup (Does a DB dump)
- `export-events.sh` [to:export.sql]
- `import-events.sh` [from:export.sql]

#### Downloading example data
- `fetch-example-data.sh` [to:example-data.json]

These scripts can be used to make snapshots and trying out new migrations.

## 2) Migrations in dev mode

Expand Down
6 changes: 6 additions & 0 deletions migrations/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datastore.shared.di import injector
from datastore.shared.postgresql_backend import ConnectionHandler
from datastore.shared.services import ReadDatabase

from .migrate import load_migrations
Expand All @@ -13,6 +14,11 @@ class MisconfiguredMigrations(Exception):


def assert_migration_index() -> None:
connection = injector.get(ConnectionHandler)
with connection.get_connection_context():
if connection.query_single_value("select count(*) from positions", []) == 0:
return # Datastore is empty; nothing to check.

migration_classes = load_migrations()

backend_migration_index = 1
Expand Down
10 changes: 10 additions & 0 deletions migrations/cleanup.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash

printf "Black:\n"
black .
printf "\nIsort:\n"
isort .
printf "\nFlake8:\n"
flake8 .
printf "\nmypy:\n"
mypy .
3 changes: 3 additions & 0 deletions migrations/coverage.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
#!/bin/bash

pytest --cov --cov-report html
1 change: 1 addition & 0 deletions migrations/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ services:
- DATASTORE_WRITER_HOST=writer
- DATASTORE_ENABLE_DEV_ENVIRONMENT=1
- PYTHONPATH=/app:/datastore-service
- MYPYPATH=/app:/datastore-service
depends_on:
- postgresql
- redis
Expand Down
6 changes: 6 additions & 0 deletions migrations/fetch-example-data.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/bin/bash

set -e

file=${1:-example-data.json}
curl https://raw.githubusercontent.com/OpenSlides/OpenSlides/openslides4-dev/docs/example-data.json --output $file
19 changes: 0 additions & 19 deletions migrations/migrations/0001_first_migration.example

This file was deleted.

14 changes: 14 additions & 0 deletions migrations/migrations/0001_limit_of_meetings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
from datastore.migrations import AddFieldMigration


class Migration(AddFieldMigration):
"""
This migration adds `organization/limit_of_meetings` with a
default of 0 (no limit) to each organization.
"""

target_migration_index = 2

collection = "organization"
field = "limit_of_meetings"
default = 0
96 changes: 96 additions & 0 deletions migrations/migrations/0002_archive_meetings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from typing import Any, List, Optional, Set

from datastore.migrations import (
BaseEvent,
BaseMigration,
CreateEvent,
DeleteEvent,
ListUpdateEvent,
RestoreEvent,
)
from datastore.shared.util import collection_and_id_from_fqid


class Migration(BaseMigration):
"""
This migration adds the 1:N relation `organization/active_meeting_ids` <-> `meeting/is_active_in_organization_id`.
This relation must be set for every meeting and link them to the one organization.
Note that the field `organization/active_meeting_ids` is modified at the end of the migration. Why?
Remember the single assertion about consistency within the datastore: The data is consistent *after*
each position. When migrating single events, the consistency is not (necessarily) provided.
One seemingly simpler but wrong migration would be:
```
if isinstance(event, CreateEvent):
event.data["is_active_in_organization_id"] = 1
update_event = ListUpdateEvent("organization/1", {"add": {"active_meeting_ids": [id]})
new_event = [event, update_event]
elif isinstance(event, DeleteEvent):
update_event = ListUpdateEvent("organization/1", {"remove": {"active_meeting_ids": [id]})
new_event = [event, update_event]
elif isinstance(event, RestoreEvent):
update_event = ListUpdateEvent("organization/1", {"add": {"active_meeting_ids": [id]})
new_event = [event, update_event]
```
The problem is, that within the events of the one position it is not clear whether the organization exists!
The organization itself might be created with the last event, so doing a ListUpdateEvent before the creation
of the organization will fail. This is the reason `get_additional_events` is used. After migrating all events
we can be sure, that the content is consistent (with the exception of `meeting/is_active_in_organization_id
since we added just half of the relation). So when appending to the events we can be sure that the
organization exists.
Also note that The relation is not checked on the organization side since deleting (restoring, too) is
not yet supported and since we added two new fields they are cannot be affected in any update event.
"""

target_migration_index = 3

def position_init(self) -> None:
# Capture all meeting ids to add/remove from
# `organization/active_meeting_ids` in this position.
self.meeting_ids_to_add: Set[int] = set()
self.meeting_ids_to_remove: Set[int] = set()

def migrate_event(
self,
event: BaseEvent,
) -> Optional[List[BaseEvent]]:
collection, id = collection_and_id_from_fqid(event.fqid)

if collection != "meeting":
return None

if isinstance(event, CreateEvent):
event.data["is_active_in_organization_id"] = 1
self.meeting_ids_to_add.add(id)
return [event]
elif isinstance(event, DeleteEvent):
if id in self.meeting_ids_to_add:
self.meeting_ids_to_add.remove(id)
else:
self.meeting_ids_to_remove.add(id)
elif isinstance(event, RestoreEvent):
if id in self.meeting_ids_to_remove:
self.meeting_ids_to_remove.remove(id)
else:
self.meeting_ids_to_add.add(id)
return None

def get_additional_events(self) -> Optional[List[BaseEvent]]:
if not self.meeting_ids_to_add and not self.meeting_ids_to_remove:
return None

payload: Any = {}
if self.meeting_ids_to_add:
payload["add"] = {"active_meeting_ids": list(self.meeting_ids_to_add)}
if self.meeting_ids_to_remove:
payload["remove"] = {"active_meeting_ids": list(self.meeting_ids_to_remove)}

return [
ListUpdateEvent(
"organization/1",
payload,
)
]
2 changes: 2 additions & 0 deletions migrations/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,5 @@ black
isort
flake8
mypy
pytest
pytest-cov
28 changes: 28 additions & 0 deletions migrations/setup.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
[isort]
include_trailing_comma = true
multi_line_output = 3
force_grid_wrap = 0
use_parentheses = True
line_length = 88

[flake8]
extend-ignore = E501

[mypy]
disallow_untyped_defs = true
exclude = tests

[mypy-datastore.*]
disallow_untyped_defs = false
check_untyped_defs = true
ignore_missing_imports = true
strict_optional = true

[mypy-redis]
ignore_missing_imports = true

[mypy-psycopg2.*]
ignore_missing_imports = true

[mypy-fastjsonschema]
ignore_missing_imports = true
Empty file added migrations/tests/__init__.py
Empty file.
96 changes: 96 additions & 0 deletions migrations/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from importlib import import_module
from typing import Any, Dict

import pytest
from datastore.migrations import MigrationHandler
from datastore.migrations.core.setup import register_services
from datastore.reader.core import GetRequest, Reader
from datastore.shared.di import injector
from datastore.shared.postgresql_backend import ConnectionHandler
from datastore.shared.services import ReadDatabase
from datastore.shared.services.environment_service import (
DATASTORE_DEV_MODE_ENVIRONMENT_VAR,
EnvironmentService,
)
from datastore.shared.util import DeletedModelsBehaviour
from datastore.writer.core import Writer
from datastore.writer.flask_frontend.json_handlers import WriteHandler


@pytest.fixture(autouse=True)
def setup() -> None:
register_services()
env_service: EnvironmentService = injector.get(EnvironmentService)
env_service.set(DATASTORE_DEV_MODE_ENVIRONMENT_VAR, "1")


@pytest.fixture(autouse=True)
def clear_datastore(setup) -> None:
writer: Writer = injector.get(Writer)
writer.truncate_db()


@pytest.fixture()
def write(clear_datastore) -> None:
def _write(*events: Dict[str, Any]):
payload = {
"user_id": 1,
"information": {},
"locked_fields": {},
"events": events,
}
write_handler = WriteHandler()
write_handler.write(payload)

yield _write


@pytest.fixture()
def finalize(clear_datastore):
def _finalize(migration_module_name):
migration_module = import_module(f"migrations.{migration_module_name}")

class Migration(migration_module.Migration):
target_migration_index = 2

connection = injector.get(ConnectionHandler)
with connection.get_connection_context():
connection.execute("update positions set migration_index=%s", [1])

migration_handler = injector.get(MigrationHandler)
migration_handler.register_migrations(Migration)
migration_handler.finalize()

yield _finalize


@pytest.fixture()
def read_model(clear_datastore):
def _read_model(fqid, position=None):
reader: Reader = injector.get(Reader)
with reader.get_database_context():
request = GetRequest(
fqid=fqid,
position=position,
get_deleted_models=DeletedModelsBehaviour.ALL_MODELS,
)
return reader.get(request)

yield _read_model


@pytest.fixture()
def assert_model(read_model):
def _assert_model(fqid, expected, position=None):
if position is None:
assert read_model(fqid) == expected

# get max position
read_database: ReadDatabase = injector.get(ReadDatabase)
with read_database.get_context():
position = read_database.get_max_position()

# build model and check
assert read_model(fqid, position=position) == expected

yield _assert_model
Loading

0 comments on commit 9fe6aad

Please sign in to comment.