Skip to content

Commit

Permalink
Merge pull request #566 from naspeh-sf/master
Browse files Browse the repository at this point in the history
Keep only one base TestCase
  • Loading branch information
naspeh committed Sep 8, 2016
2 parents 04357a0 + 2da9406 commit 55c63ff
Show file tree
Hide file tree
Showing 58 changed files with 366 additions and 424 deletions.
4 changes: 2 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,6 @@ install:

script:
- flake8
- nosetests -v --with-timer
- behave --format progress3 --logging-level ERROR
- time nosetests -v --with-timer
- time behave --format progress3 --logging-level ERROR

5 changes: 2 additions & 3 deletions apps/apps_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,12 @@
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license

from test_factory import SuperdeskTestCase
from apps.preferences import PreferencesService
from superdesk.tests import TestCase


class Preference_Tests(SuperdeskTestCase):
class Preference_Tests(TestCase):
def setUp(self):
super().setUp()
self._default_user_settings = {
"archive:view": {
"default": "mgrid",
Expand Down
33 changes: 16 additions & 17 deletions apps/archive/archive_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,26 @@


import unittest
from datetime import timedelta, datetime
from unittest.mock import MagicMock

from bson import ObjectId
from unittest.mock import MagicMock
from superdesk import get_resource_service
from test_factory import SuperdeskTestCase
from superdesk.utc import get_expiry_date, utcnow
from apps.archive.archive import SOURCE as ARCHIVE
from superdesk.errors import SuperdeskApiError
from datetime import timedelta, datetime
from pytz import timezone
from apps.archive.common import validate_schedule, remove_media_files, \
format_dateline_to_locmmmddsrc, convert_task_attributes_to_objectId, \
is_genre, BROADCAST_GENRE, get_default_source, set_default_source, \

from apps.archive.archive import SOURCE as ARCHIVE
from apps.archive.common import (
validate_schedule, remove_media_files,
format_dateline_to_locmmmddsrc, convert_task_attributes_to_objectId,
is_genre, BROADCAST_GENRE, get_default_source, set_default_source,
get_utc_schedule, get_dateline_city
)
from superdesk import get_resource_service
from superdesk.errors import SuperdeskApiError
from superdesk.tests import TestCase
from superdesk.utc import get_expiry_date, utcnow


class RemoveSpikedContentTestCase(SuperdeskTestCase):
class RemoveSpikedContentTestCase(TestCase):

articles = [{'guid': 'tag:localhost:2015:69b961ab-2816-4b8a-a584-a7b402fed4f9',
'_id': '1',
Expand Down Expand Up @@ -170,9 +173,6 @@ class RemoveSpikedContentTestCase(SuperdeskTestCase):
}
}

def setUp(self):
super().setUp()

def test_query_getting_expired_content(self):
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(-10), 'state': 'spiked'}])
self.app.data.insert(ARCHIVE, [{'expiry': get_expiry_date(0), 'state': 'spiked'}])
Expand Down Expand Up @@ -212,7 +212,7 @@ def test_delete_by_ids(self):
self.assertEqual(len(self.articles), archive_service.on_delete.call_count)


class ArchiveTestCase(SuperdeskTestCase):
class ArchiveTestCase(TestCase):
def test_validate_schedule(self):
validate_schedule(utcnow() + timedelta(hours=2))

Expand Down Expand Up @@ -395,10 +395,9 @@ def test_get_utc_schedule(self):
self.assertEqual(utc_schedule, embargo_date)


class ExpiredArchiveContentTestCase(SuperdeskTestCase):
class ExpiredArchiveContentTestCase(TestCase):

def setUp(self):
super().setUp()
try:
from apps.archive.commands import RemoveExpiredContent
except ImportError:
Expand Down
6 changes: 3 additions & 3 deletions apps/auth/db/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license

from test_factory import SuperdeskTestCase
from superdesk import get_resource_service
from .commands import CreateUserCommand
from superdesk import get_resource_service
from superdesk.tests import TestCase


class UsersTestCase(SuperdeskTestCase):
class UsersTestCase(TestCase):

def test_create_user_command(self):
if not self.app.config.get('LDAP_SERVER'):
Expand Down
50 changes: 25 additions & 25 deletions apps/content_filters/content_filter/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,25 @@
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license

from test_factory import SuperdeskTestCase
from apps.content_filters.content_filter.content_filter_service import ContentFilterService
from superdesk.publish import SubscribersService
from eve.utils import ParsedRequest
import json
import os
import superdesk

from apps.content_filters.content_filter.content_filter_service import ContentFilterService
from superdesk import get_backend, get_resource_service
from superdesk.errors import SuperdeskApiError
from superdesk.publish import SubscribersService
from superdesk.tests import TestCase
from superdesk.vocabularies.command import VocabulariesPopulateCommand


class ContentFilterTests(SuperdeskTestCase):
class ContentFilterTests(TestCase):

def setUp(self):
super().setUp()
self.req = ParsedRequest()
with self.app.test_request_context(self.app.config.get('URL_PREFIX')):
self.f = ContentFilterService(datasource='content_filters', backend=superdesk.get_backend())
self.s = SubscribersService(datasource='subscribers', backend=superdesk.get_backend())
self.f = ContentFilterService(datasource='content_filters', backend=get_backend())
self.s = SubscribersService(datasource='subscribers', backend=get_backend())

self.articles = [{'_id': '1', 'urgency': 1, 'headline': 'story', 'state': 'fetched'},
{'_id': '2', 'headline': 'prtorque', 'state': 'fetched'},
Expand Down Expand Up @@ -134,7 +134,7 @@ def test_build_mongo_query_using_like_filter_single_fc(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
Expand All @@ -146,7 +146,7 @@ def test_build_mongo_query_using_like_filter_single_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
Expand All @@ -158,7 +158,7 @@ def test_build_mongo_query_using_like_filter_multi_filter_condition(self):
doc = {'content_filter': [{"expression": {"fc": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
Expand All @@ -170,7 +170,7 @@ def test_build_mongo_query_using_like_filter_multi_pf(self):
doc = {'content_filter': [{"expression": {"pf": [1]}}, {"expression": {"fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
Expand All @@ -182,7 +182,7 @@ def test_build_mongo_query_using_like_filter_multi_filter_condition2(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
Expand All @@ -192,7 +192,7 @@ def test_build_mongo_query_using_like_filter_multi_pf2(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
Expand All @@ -202,7 +202,7 @@ def test_build_mongo_query_using_like_filter_multi_condition3(self):
doc = {'content_filter': [{"expression": {"fc": [3, 4]}}, {"expression": {"fc": [1, 2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
Expand All @@ -212,7 +212,7 @@ def test_build_mongo_query_using_like_filter_multi_pf3(self):
doc = {'content_filter': [{"expression": {"pf": [2]}}, {"expression": {"pf": [1], "fc": [2]}}], 'name': 'pf-1'}
with self.app.app_context():
query = self.f.build_mongo_query(doc)
docs = superdesk.get_resource_service('archive').\
docs = get_resource_service('archive').\
get_from_mongo(req=self.req, lookup=query)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
Expand All @@ -223,7 +223,7 @@ def test_build_elastic_query_using_like_filter_single_filter_condition(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
Expand All @@ -235,7 +235,7 @@ def test_build_elastic_query_using_like_filter_single_content_filter(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(3, docs.count())
self.assertTrue('1' in doc_ids)
Expand All @@ -247,7 +247,7 @@ def test_build_elastic_query_using_like_filter_multi_filter_condition(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
Expand All @@ -260,7 +260,7 @@ def test_build_mongo_query_using_like_filter_multi_content_filter(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(4, docs.count())
self.assertTrue('1' in doc_ids)
Expand All @@ -273,7 +273,7 @@ def test_build_elastic_query_using_like_filter_multi_filter_condition2(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
Expand All @@ -284,7 +284,7 @@ def test_build_elastic_query_using_like_filter_multi_content_filter2(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
Expand All @@ -294,7 +294,7 @@ def test_build_elastic_query_using_like_filter_multi_content_filter3(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
Expand All @@ -304,7 +304,7 @@ def test_build_elastic_query_using_like_filter_multi_content_filter4(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
Expand All @@ -314,7 +314,7 @@ def test_build_elastic_query_using_like_filter_multi_content_filter4(self):
with self.app.app_context():
query = {'query': {'filtered': {'query': self.f._get_elastic_query(doc)}}}
self.req.args = {'source': json.dumps(query)}
docs = superdesk.get_resource_service('archive').get(req=self.req, lookup=None)
docs = get_resource_service('archive').get(req=self.req, lookup=None)
doc_ids = [d['_id'] for d in docs]
self.assertEqual(1, docs.count())
self.assertTrue('3' in doc_ids)
Expand Down

0 comments on commit 55c63ff

Please sign in to comment.