Skip to content

Commit

Permalink
test cleanup & speedup
Browse files Browse the repository at this point in the history
  • Loading branch information
aaxelb committed Dec 20, 2022
1 parent d960226 commit 21cd19c
Show file tree
Hide file tree
Showing 13 changed files with 637 additions and 552 deletions.
6 changes: 3 additions & 3 deletions tests/api/test_feeds.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
# TODO add tests for RSS


@pytest.mark.django_db
@pytest.mark.usefixtures('nested_django_db')
class TestFeed:

@pytest.fixture
def fake_items(self, settings, index_records):
@pytest.fixture(scope='class')
def fake_items(self, index_records, class_scoped_django_db):
records = [
f.CreativeWork(
identifiers=[f.WorkIdentifier()],
Expand Down
3 changes: 1 addition & 2 deletions tests/api/test_sources_endpoint.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import json
import pytest
import time

import httpretty

Expand All @@ -18,7 +17,7 @@


def exceptionCallback(request, uri, headers):
time.sleep(6)
# time.sleep(6)
return (400, headers, uri)


Expand Down
103 changes: 74 additions & 29 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
import datetime
import json
import logging
import random
import string

import pytest

from django.db import transaction
from django.utils import timezone

from oauth2_provider.models import AccessToken, Application
from urllib3.connection import ConnectionError
from elasticsearch.exceptions import ConnectionError as ElasticConnectionError

from project.settings import ELASTICSEARCH
from share.models import NormalizedData, RawDatum
from share.models import ShareUser
from share.models import SourceUniqueIdentifier
Expand All @@ -22,6 +25,9 @@
from tests.share.normalize.factories import GraphBuilder


logger = logging.getLogger(__name__)


@pytest.fixture
def client():
from django.test.client import Client
Expand Down Expand Up @@ -137,46 +143,47 @@ def expected_graph(*args, **kwargs):
return expected_graph


@pytest.fixture
@pytest.fixture(scope='session')
def elastic_test_index_name():
return 'test_share'


@pytest.fixture
def elastic_test_manager(settings, elastic_test_index_name):
@pytest.fixture(scope='class')
def elastic_test_manager(elastic_test_index_name):
# ideally these settings changes would be encapsulated by ElasticManager, but there's
# still code that uses the settings directly, so using the pytest-django fixture for now
settings.ELASTICSEARCH = {
**settings.ELASTICSEARCH,
'TIMEOUT': 5,
'PRIMARY_INDEX': elastic_test_index_name,
'LEGACY_INDEX': elastic_test_index_name,
'BACKCOMPAT_INDEX': elastic_test_index_name,
'ACTIVE_INDEXES': [elastic_test_index_name],
'INDEXES': {
elastic_test_index_name: {
'DEFAULT_QUEUE': f'{elastic_test_index_name}_queue',
'URGENT_QUEUE': f'{elastic_test_index_name}_queue.urgent',
'INDEX_SETUP': 'postrend_backcompat',
# still code that uses the settings directly, so using pytest.MonkeyPatch for now
with pytest.MonkeyPatch.context() as mp:
mp.setattr('share.search.elastic_manager.settings.ELASTICSEARCH', {
**ELASTICSEARCH,
'TIMEOUT': 5,
'PRIMARY_INDEX': elastic_test_index_name,
'LEGACY_INDEX': elastic_test_index_name,
'BACKCOMPAT_INDEX': elastic_test_index_name,
'ACTIVE_INDEXES': [elastic_test_index_name],
'INDEXES': {
elastic_test_index_name: {
'DEFAULT_QUEUE': f'{elastic_test_index_name}_queue',
'URGENT_QUEUE': f'{elastic_test_index_name}_queue.urgent',
'INDEX_SETUP': 'postrend_backcompat',
},
},
},
}
elastic_manager = ElasticManager()
try:
elastic_manager.delete_index(elastic_test_index_name)
elastic_manager.create_index(elastic_test_index_name)

})
elastic_manager = ElasticManager()
try:
yield elastic_manager
finally:
elastic_manager.delete_index(elastic_test_index_name)
elastic_manager.create_index(elastic_test_index_name)

except (ConnectionError, ElasticConnectionError):
raise pytest.skip('Elasticsearch unavailable')
try:
yield elastic_manager
finally:
elastic_manager.delete_index(elastic_test_index_name)

except (ConnectionError, ElasticConnectionError):
raise pytest.skip('Elasticsearch unavailable')

@pytest.fixture
def index_records(elastic_test_manager):

@pytest.fixture(scope='class')
def index_records(elastic_test_manager, class_scoped_django_db):

def _index_records(normalized_graphs):
normalized_datums = [
Expand All @@ -200,3 +207,41 @@ def _index_records(normalized_graphs):
return normalized_datums

return _index_records


def rolledback_transaction(loglabel):
class ExpectedRollback(Exception):
pass
try:
with transaction.atomic():
print(f'{loglabel}: started transaction')
yield
raise ExpectedRollback('this is an expected rollback; all is well')
except ExpectedRollback:
print(f'{loglabel}: rolled back transaction (as planned)')
else:
raise ExpectedRollback('expected a rollback but did not get one; something is wrong')


@pytest.fixture(scope='class')
def class_scoped_django_db(django_db_setup, django_db_blocker, request):
"""a class-scoped version of the `django_db` mark
(so we can use class-scoped fixtures to set up data
for use across several tests)
recommend using via the `nested_django_db` fixture,
or use directly in another class-scoped fixture.
"""
with django_db_blocker.unblock():
yield from rolledback_transaction(f'class_scoped_django_db({request.node})')


@pytest.fixture(scope='function')
def nested_django_db(class_scoped_django_db, request):
"""wrap each function and the entire class in transactions
(so fixtures can have scope='class' for reuse across tests,
but what happens in each test stays in that test)
recommend using via the `nested_django_db` mark
"""
yield from rolledback_transaction(f'nested_django_db({request.node})')
19 changes: 11 additions & 8 deletions tests/share/ingest/test_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from tests import factories


@pytest.mark.django_db
@pytest.mark.usefixtures('nested_django_db')
class TestIngestScheduler:

@pytest.fixture
Expand All @@ -22,6 +22,10 @@ def mock_ingest(self):
with mock.patch('share.ingest.scheduler.ingest') as mock_ingest:
yield mock_ingest

@pytest.fixture(scope='class')
def suid(self, class_scoped_django_db):
return factories.SourceUniqueIdentifierFactory()

@pytest.mark.parametrize('raw_ages, selected_raw', [
([0, 1, 2], 0),
([5, 4, 2, 3], 2),
Expand All @@ -40,8 +44,7 @@ def mock_ingest(self):
('succeeded', True, 'created'),
('succeeded', False, 'succeeded'),
])
def test_schedule(self, raw_ages, selected_raw, claim, prior_status, superfluous, expected_status):
suid = factories.SourceUniqueIdentifierFactory()
def test_schedule(self, suid, raw_ages, selected_raw, claim, prior_status, superfluous, expected_status):
raws = [
factories.RawDatumFactory(
suid=suid,
Expand All @@ -67,14 +70,14 @@ def test_schedule(self, raw_ages, selected_raw, claim, prior_status, superfluous
assert job.status == getattr(IngestJob.STATUS, expected_status)
assert job.claimed == claim

def test_reingest(self, mock_consume):
raw = factories.RawDatumFactory()
def test_reingest(self, suid, mock_consume):
raw = factories.RawDatumFactory(suid=suid)
job = IngestScheduler().reingest(raw.suid)
assert job.claimed
mock_consume.assert_called_once_with(job_id=job.id, exhaust=False, superfluous=True)

def test_reingest_async(self, mock_ingest):
raw = factories.RawDatumFactory()
def test_reingest_async(self, suid, mock_ingest):
raw = factories.RawDatumFactory(suid=suid)
job = IngestScheduler().reingest_async(raw.suid)
assert job.claimed
mock_ingest.delay.assert_called_once_with(job_id=job.id, exhaust=False, superfluous=True)
Expand Down Expand Up @@ -109,7 +112,7 @@ def test_bulk_schedule(self, claim, superfluous):
suids.add(suid)

actual_jobs = IngestScheduler().bulk_schedule(
SourceUniqueIdentifier.objects.all(),
SourceUniqueIdentifier.objects.filter(id__in=[suid.id for suid in suids]),
claim=claim,
superfluous=superfluous,
)
Expand Down

0 comments on commit 21cd19c

Please sign in to comment.