diff --git a/securedrop_client/api_jobs/downloads.py b/securedrop_client/api_jobs/downloads.py index de656dbff..b988501a0 100644 --- a/securedrop_client/api_jobs/downloads.py +++ b/securedrop_client/api_jobs/downloads.py @@ -17,7 +17,8 @@ from securedrop_client.crypto import GpgHelper, CryptoError from securedrop_client.db import File, Message, Reply from securedrop_client.storage import mark_as_decrypted, mark_as_downloaded, \ - set_message_or_reply_content, get_remote_data, update_local_storage + set_message_or_reply_content + logger = logging.getLogger(__name__) @@ -31,62 +32,6 @@ def __init__(self, message: str, self.uuid = uuid -class MetadataSyncJob(ApiJob): - ''' - Update source metadata such that new download jobs can be added to the queue. - ''' - - NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL = 15 - - def __init__(self, data_dir: str, gpg: GpgHelper) -> None: - super().__init__(remaining_attempts=self.NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL) - self.data_dir = data_dir - self.gpg = gpg - - def call_api(self, api_client: API, session: Session) -> Any: - ''' - Override ApiJob. - - Download new metadata, update the local database, import new keys, and - then the success signal will let the controller know to add any new download - jobs. - ''' - - # TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will want to - # pass the default request timeout to api calls instead of setting it on the api object - # directly. - api_client.default_request_timeout = 40 - remote_sources, remote_submissions, remote_replies = \ - get_remote_data(api_client) - - update_local_storage(session, - remote_sources, - remote_submissions, - remote_replies, - self.data_dir) - - fingerprints = self.gpg.fingerprints() - for source in remote_sources: - if source.key and source.key.get('type', None) == 'PGP': - pub_key = source.key.get('public', None) - fingerprint = source.key.get('fingerprint', None) - if not pub_key or not fingerprint: - # The below line needs to be excluded from the coverage computation - # as it will show as uncovered due to a cpython compiler optimziation. - # See: https://bugs.python.org/issue2506 - continue # pragma: no cover - - if fingerprint in fingerprints: - logger.debug("Skipping import of key with fingerprint {}".format(fingerprint)) - continue - - try: - logger.debug("Importing key with fingerprint {}".format(fingerprint)) - self.gpg.import_key(source.uuid, pub_key, fingerprint) - except CryptoError: - logger.warning('Failed to import key for source {}'.format(source.uuid)) - - class DownloadJob(ApiJob): ''' Download and decrypt a file that contains either a message, reply, or file submission. diff --git a/securedrop_client/api_jobs/sync.py b/securedrop_client/api_jobs/sync.py new file mode 100644 index 000000000..ca222c5f9 --- /dev/null +++ b/securedrop_client/api_jobs/sync.py @@ -0,0 +1,67 @@ +from typing import Any +import logging + +from sdclientapi import API +from sqlalchemy.orm.session import Session + +from securedrop_client.api_jobs.base import ApiJob +from securedrop_client.crypto import GpgHelper, CryptoError +from securedrop_client.storage import get_remote_data, update_local_storage + + +logger = logging.getLogger(__name__) + + +class MetadataSyncJob(ApiJob): + ''' + Update source metadata such that new download jobs can be added to the queue. + ''' + + NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL = 2 + + def __init__(self, data_dir: str, gpg: GpgHelper) -> None: + super().__init__(remaining_attempts=self.NUMBER_OF_TIMES_TO_RETRY_AN_API_CALL) + self.data_dir = data_dir + self.gpg = gpg + + def call_api(self, api_client: API, session: Session) -> Any: + ''' + Override ApiJob. + + Download new metadata, update the local database, import new keys, and + then the success signal will let the controller know to add any new download + jobs. + ''' + + # TODO: Once https://github.com/freedomofpress/securedrop-client/issues/648, we will want to + # pass the default request timeout to api calls instead of setting it on the api object + # directly. + api_client.default_request_timeout = 40 + remote_sources, remote_submissions, remote_replies = get_remote_data(api_client) + + update_local_storage(session, + remote_sources, + remote_submissions, + remote_replies, + self.data_dir) + + fingerprints = self.gpg.fingerprints() + for source in remote_sources: + if source.key and source.key.get('type', None) == 'PGP': + pub_key = source.key.get('public', None) + fingerprint = source.key.get('fingerprint', None) + if not pub_key or not fingerprint: + # The below line needs to be excluded from the coverage computation + # as it will show as uncovered due to a cpython compiler optimziation. + # See: https://bugs.python.org/issue2506 + continue # pragma: no cover + + if fingerprint in fingerprints: + logger.debug("Skipping import of key with fingerprint {}".format(fingerprint)) + continue + + try: + logger.debug("Importing key with fingerprint {}".format(fingerprint)) + self.gpg.import_key(source.uuid, pub_key, fingerprint) + except CryptoError: + logger.warning('Failed to import key for source {}'.format(source.uuid)) diff --git a/securedrop_client/logic.py b/securedrop_client/logic.py index 6ef70eca9..abbc3d8ec 100644 --- a/securedrop_client/logic.py +++ b/securedrop_client/logic.py @@ -34,7 +34,7 @@ from securedrop_client import db from securedrop_client.api_jobs.base import ApiInaccessibleError from securedrop_client.api_jobs.downloads import FileDownloadJob, MessageDownloadJob, \ - ReplyDownloadJob, DownloadChecksumMismatchException, MetadataSyncJob + ReplyDownloadJob, DownloadChecksumMismatchException from securedrop_client.api_jobs.sources import DeleteSourceJob from securedrop_client.api_jobs.uploads import SendReplyJob, SendReplyJobError, \ SendReplyJobTimeoutError @@ -42,6 +42,7 @@ from securedrop_client.crypto import GpgHelper from securedrop_client.export import Export from securedrop_client.queue import ApiJobQueue +from securedrop_client.sync import ApiSync from securedrop_client.utils import check_dir_permissions logger = logging.getLogger(__name__) @@ -194,6 +195,12 @@ def __init__(self, hostname: str, gui, session_maker: sessionmaker, # File data. self.data_dir = os.path.join(self.home, 'data') + # Background sync to keep client up-to-date with server changes + self.api_sync = ApiSync(self.api, self.session_maker, self.gpg, self.data_dir) + self.api_sync.sync_started.connect(self.on_sync_started, type=Qt.QueuedConnection) + self.api_sync.sync_success.connect(self.on_sync_success, type=Qt.QueuedConnection) + self.api_sync.sync_failure.connect(self.on_sync_failure, type=Qt.QueuedConnection) + @property def is_authenticated(self) -> bool: return self.__is_authenticated @@ -226,11 +233,6 @@ def setup(self): self.sync_timer.timeout.connect(self.update_sync) self.sync_timer.start(30000) - # Automagically sync with the API every minute. - self.sync_update = QTimer() - self.sync_update.timeout.connect(self.sync_api) - self.sync_update.start(1000 * 60) # every minute. - # Run export object in a separate thread context (a reference to the # thread is kept on self such that it does not get garbage collected # after this method returns) - we want to keep our export thread around for @@ -280,20 +282,6 @@ def call_api(self, new_api_thread.start() def on_queue_paused(self) -> None: - # TODO: remove if block once https://github.com/freedomofpress/securedrop-client/pull/739 - # is merged and rely on continuous metadata sync to encounter same auth error from the - # server which will log the user out in the on_sync_failure handler - if ( - not self.api or - not self.api_job_queue.main_queue.api_client or - not self.api_job_queue.download_file_queue.api_client or - not self.api_job_queue.metadata_queue.api_client - ): - self.invalidate_token() - self.logout() - self.gui.show_login(error=_('Your session expired. Please log in again.')) - return - self.gui.update_error_status( _('The SecureDrop server cannot be reached.'), duration=0, @@ -350,7 +338,7 @@ def on_authenticate_success(self, result): self.gui.show_main_window(user) self.update_sources() self.api_job_queue.login(self.api) - self.sync_api() + self.api_sync.start(self.api) self.is_authenticated = True self.resume_queues() @@ -359,6 +347,7 @@ def on_authenticate_failure(self, result: Exception) -> None: self.invalidate_token() error = _('There was a problem signing in. Please verify your credentials and try again.') self.gui.show_login_error(error=error) + self.api_sync.stop() def login_offline_mode(self): """ @@ -384,24 +373,6 @@ def authenticated(self): """ return bool(self.api and self.api.token is not None) - def sync_api(self): - """ - Grab data from the remote SecureDrop API in a non-blocking manner. - """ - logger.debug("In sync_api on thread {}".format(self.thread().currentThreadId())) - if self.authenticated(): - self.sync_events.emit('syncing') - logger.debug("You are authenticated, going to make your call") - - job = MetadataSyncJob(self.data_dir, self.gpg) - job.success_signal.connect(self.on_sync_success, type=Qt.QueuedConnection) - job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection) - - self.api_job_queue.enqueue(job) - - logger.debug("In sync_api, after call to submit job to queue, on " - "thread {}".format(self.thread().currentThreadId())) - def last_sync(self): """ Returns the time of last synchronisation with the remote SD server. @@ -412,6 +383,9 @@ def last_sync(self): except Exception: return None + def on_sync_started(self) -> None: + self.sync_events.emit('syncing') + def on_sync_success(self) -> None: """ Called when syncronisation of data via the API queue succeeds. @@ -506,6 +480,7 @@ def logout(self): for failed_reply in failed_replies: self.reply_failed.emit(failed_reply.uuid) + self.api_sync.stop() self.api_job_queue.logout() self.gui.logout() diff --git a/securedrop_client/queue.py b/securedrop_client/queue.py index 67295b0ff..8cc1a8aeb 100644 --- a/securedrop_client/queue.py +++ b/securedrop_client/queue.py @@ -10,7 +10,7 @@ from securedrop_client.api_jobs.base import ApiJob, ApiInaccessibleError, DEFAULT_NUM_ATTEMPTS, \ PauseQueueJob from securedrop_client.api_jobs.downloads import (FileDownloadJob, MessageDownloadJob, - ReplyDownloadJob, MetadataSyncJob) + ReplyDownloadJob) from securedrop_client.api_jobs.sources import DeleteSourceJob from securedrop_client.api_jobs.uploads import SendReplyJob from securedrop_client.api_jobs.updatestar import UpdateStarJob @@ -46,9 +46,8 @@ class RunnableQueue(QObject): DeleteSourceJob: 14, SendReplyJob: 15, UpdateStarJob: 16, - MetadataSyncJob: 17, - MessageDownloadJob: 18, - ReplyDownloadJob: 18, + MessageDownloadJob: 17, + ReplyDownloadJob: 17, } ''' @@ -61,11 +60,6 @@ class RunnableQueue(QObject): ''' resume = pyqtSignal() - """ - Signal emitted when the queue successfully. - """ - pinged = pyqtSignal() - def __init__(self, api_client: API, session_maker: scoped_session, size: int = 0) -> None: """ A size of zero means there's no upper bound to the queue size. @@ -92,9 +86,7 @@ def add_job(self, job: ApiJob) -> None: try: self.queue.put_nowait((priority, job)) except Full: - # Pass silently if the queue is full. For use with MetadataSyncJob. - # See #652. - pass + pass # Pass silently if the queue is full def re_add_job(self, job: ApiJob) -> None: ''' @@ -106,9 +98,7 @@ def re_add_job(self, job: ApiJob) -> None: try: self.queue.put_nowait((priority, job)) except Full: - # Pass silently if the queue is full. For use with MetadataSyncJob. - # See #652. - pass + pass # Pass silently if the queue is full @pyqtSlot() def process(self) -> None: @@ -118,10 +108,16 @@ def process(self) -> None: If the job is a PauseQueueJob, emit the paused signal and return from the processing loop so that no more jobs are processed until the queue resumes. - If the job raises RequestTimeoutError or ApiInaccessibleError, then: + If the job raises RequestTimeoutError, then: (1) Add a PauseQueuejob to the queue (2) Add the job back to the queue so that it can be reprocessed once the queue is resumed. + If the job raises ApiInaccessibleError, then: + (1) Set the token to None so that the queue manager will stop enqueuing jobs since we are + no longer able to make api requests. + (2) Return from the processing loop since a valid token will be needed in order to process + jobs. + Note: Generic exceptions are handled in _do_call_api. ''' while True: @@ -135,11 +131,10 @@ def process(self) -> None: try: session = self.session_maker() job._do_call_api(self.api_client, session) - self.pinged.emit() except ApiInaccessibleError as e: logger.debug('Job {} raised an exception: {}: {}'.format(self, type(e).__name__, e)) self.api_client = None - self.add_job(PauseQueueJob()) + return except RequestTimeoutError as e: logger.debug('Job {} raised an exception: {}: {}'.format(self, type(e).__name__, e)) self.add_job(PauseQueueJob()) @@ -162,25 +157,18 @@ def __init__(self, api_client: API, session_maker: scoped_session) -> None: self.main_thread = QThread() self.download_file_thread = QThread() - self.metadata_thread = QThread() self.main_queue = RunnableQueue(api_client, session_maker) self.download_file_queue = RunnableQueue(api_client, session_maker) - self.metadata_queue = RunnableQueue(api_client, session_maker, size=1) self.main_queue.moveToThread(self.main_thread) self.download_file_queue.moveToThread(self.download_file_thread) - self.metadata_queue.moveToThread(self.metadata_thread) self.main_thread.started.connect(self.main_queue.process) self.download_file_thread.started.connect(self.download_file_queue.process) - self.metadata_thread.started.connect(self.metadata_queue.process) self.main_queue.paused.connect(self.on_queue_paused) self.download_file_queue.paused.connect(self.on_queue_paused) - self.metadata_queue.paused.connect(self.on_queue_paused) - - self.metadata_queue.pinged.connect(self.resume_queues) def logout(self) -> None: if self.main_thread.isRunning(): @@ -191,15 +179,10 @@ def logout(self) -> None: logger.debug('Stopping download queue thread') self.download_file_thread.quit() - if self.metadata_thread.isRunning(): - logger.debug('Stopping metadata queue thread') - self.metadata_thread.quit() - def login(self, api_client: API) -> None: logger.debug('Passing API token to queues') self.main_queue.api_client = api_client self.download_file_queue.api_client = api_client - self.metadata_queue.api_client = api_client self.start_queues() def start_queues(self) -> None: @@ -211,31 +194,20 @@ def start_queues(self) -> None: logger.debug('Starting download thread') self.download_file_thread.start() - if not self.metadata_thread.isRunning(): - logger.debug("Starting metadata thread") - self.metadata_thread.start() - def on_queue_paused(self) -> None: self.paused.emit() def resume_queues(self) -> None: logger.info("Resuming queues") - main_paused = not self.main_thread.isRunning() - download_paused = not self.download_file_thread.isRunning() - metadata_paused = not self.metadata_thread.isRunning() self.start_queues() - if main_paused: + if not self.main_thread.isRunning(): self.main_queue.resume.emit() - if download_paused: + if not self.download_file_thread.isRunning(): self.download_file_queue.resume.emit() - if metadata_paused: - self.metadata_queue.resume.emit() def enqueue(self, job: ApiJob) -> None: # Prevent api jobs being added to the queue when not logged in. - if (not self.main_queue.api_client or - not self.download_file_queue.api_client or - not self.metadata_queue.api_client): + if (not self.main_queue.api_client or not self.download_file_queue.api_client): logger.info('Not adding job, we are not logged in') return @@ -245,9 +217,6 @@ def enqueue(self, job: ApiJob) -> None: if isinstance(job, FileDownloadJob): logger.debug('Adding job to download queue') self.download_file_queue.add_job(job) - elif isinstance(job, MetadataSyncJob): - logger.debug("Adding job to metadata queue") - self.metadata_queue.add_job(job) else: logger.debug('Adding job to main queue') self.main_queue.add_job(job) diff --git a/securedrop_client/sync.py b/securedrop_client/sync.py new file mode 100644 index 000000000..cac434b69 --- /dev/null +++ b/securedrop_client/sync.py @@ -0,0 +1,124 @@ +import logging + +from PyQt5.QtCore import pyqtSignal, QObject, QThread, QTimer, Qt +from sqlalchemy.orm import scoped_session +from sdclientapi import API, RequestTimeoutError + +from securedrop_client.api_jobs.base import ApiInaccessibleError +from securedrop_client.api_jobs.sync import MetadataSyncJob +from securedrop_client.crypto import GpgHelper + + +logger = logging.getLogger(__name__) + + +class ApiSync(QObject): + ''' + ApiSync continuously syncs, waiting 15 seconds between task completion. + ''' + + sync_started = pyqtSignal() + sync_success = pyqtSignal() + sync_failure = pyqtSignal(Exception) + + TIME_BETWEEN_SYNCS_MS = 1000 * 15 # fifteen seconds between syncs + + def __init__( + self, api_client: API, session_maker: scoped_session, gpg: GpgHelper, data_dir: str + ): + super().__init__() + self.api_client = api_client + + self.sync_thread = QThread() + self.api_sync_bg_task = ApiSyncBackgroundTask( + api_client, + session_maker, + gpg, + data_dir, + self.sync_started, + self.on_sync_success, + self.on_sync_failure) + self.api_sync_bg_task.moveToThread(self.sync_thread) + + self.sync_thread.started.connect(self.api_sync_bg_task.sync) + + def start(self, api_client: API) -> None: + ''' + Start metadata syncs. + ''' + self.api_client = api_client + + if not self.sync_thread.isRunning(): + logger.debug('Starting sync thread') + self.api_sync_bg_task.api_client = self.api_client + self.sync_thread.start() + + def stop(self) -> None: + ''' + Stop metadata syncs. + ''' + self.api_client = None + + if self.sync_thread.isRunning(): + logger.debug('Stopping sync thread') + self.sync_thread.quit() + + def on_sync_success(self) -> None: + ''' + Start another sync on success. + ''' + self.sync_success.emit() + QTimer.singleShot(self.TIME_BETWEEN_SYNCS_MS, self.api_sync_bg_task.sync) + + def on_sync_failure(self, result: Exception) -> None: + ''' + Only start another sync on failure if the reason is a timeout request. + ''' + self.sync_failure.emit(result) + if isinstance(result, RequestTimeoutError): + QTimer.singleShot(self.TIME_BETWEEN_SYNCS_MS, self.api_sync_bg_task.sync) + + +class ApiSyncBackgroundTask(QObject): + ''' + ApiSyncBackgroundTask provides a sync method that executes a MetadataSyncJob. + ''' + + def __init__( + self, + api_client: API, + session_maker: scoped_session, + gpg: GpgHelper, + data_dir: str, + sync_started: pyqtSignal, + on_sync_success, + on_sync_failure + ): + super().__init__() + + self.api_client = api_client + self.session_maker = session_maker + self.gpg = gpg + self.data_dir = data_dir + self.sync_started = sync_started + self.on_sync_success = on_sync_success + self.on_sync_failure = on_sync_failure + + self.job = MetadataSyncJob(self.data_dir, self.gpg) + self.job.success_signal.connect(self.on_sync_success, type=Qt.QueuedConnection) + self.job.failure_signal.connect(self.on_sync_failure, type=Qt.QueuedConnection) + + def sync(self) -> None: + ''' + Create and run a new MetadataSyncJob. + ''' + try: + self.sync_started.emit() + session = self.session_maker() + self.job._do_call_api(self.api_client, session) + except ApiInaccessibleError as e: + self.job.failure_signal.emit(e) # the job's failure signal is not emitted in base + except Exception: + pass # the job's failure signal is emitted for everything else in base + finally: + session.close() diff --git a/tests/api_jobs/test_downloads.py b/tests/api_jobs/test_downloads.py index e8b0c5772..18dc1df9a 100644 --- a/tests/api_jobs/test_downloads.py +++ b/tests/api_jobs/test_downloads.py @@ -1,13 +1,12 @@ import os import pytest from typing import Tuple -from uuid import UUID from sdclientapi import BaseError from sdclientapi import Submission as SdkSubmission from securedrop_client.api_jobs.downloads import DownloadJob, FileDownloadJob, MessageDownloadJob, \ - ReplyDownloadJob, DownloadChecksumMismatchException, MetadataSyncJob + ReplyDownloadJob, DownloadChecksumMismatchException from securedrop_client.crypto import GpgHelper, CryptoError from tests import factory @@ -22,159 +21,6 @@ def patch_decrypt(mocker, homedir, gpghelper, filename): return mock_decrypt -def test_MetadataSyncJob_success(mocker, homedir, session, session_maker): - gpg = GpgHelper(homedir, session_maker, is_qubes=False) - job = MetadataSyncJob(homedir, gpg) - - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': '123456ABC', - } - - mock_key_import = mocker.patch.object(job.gpg, 'import_key') - mock_get_remote_data = mocker.patch( - 'securedrop_client.api_jobs.downloads.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) - - api_client = mocker.MagicMock() - api_client.default_request_timeout = mocker.MagicMock() - - mocker.patch( - 'securedrop_client.api_jobs.downloads.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - - job.call_api(api_client, session) - - assert mock_key_import.call_args[0][0] == mock_source.uuid - assert mock_key_import.call_args[0][1] == mock_source.key['public'] - assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint'] - assert mock_get_remote_data.call_count == 1 - - -def test_MetadataSyncJob_only_imports_new_source_keys(mocker, homedir, session, session_maker): - """ - Verify that we only import source keys we don't already have. - """ - class LimitedImportGpgHelper(GpgHelper): - def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None: - self._import(key_data) - - gpg = LimitedImportGpgHelper(homedir, session_maker, is_qubes=False) - job = MetadataSyncJob(homedir, gpg) - - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': 'B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F', - } - - mock_get_remote_data = mocker.patch( - 'securedrop_client.api_jobs.downloads.get_remote_data', - return_value=([mock_source], [], [])) - - api_client = mocker.MagicMock() - api_client.default_request_timeout = mocker.MagicMock() - - mocker.patch( - 'securedrop_client.api_jobs.downloads.update_local_storage', - return_value=([mock_source], [], [])) - - mock_logger = mocker.patch('securedrop_client.api_jobs.downloads.logger') - - job.call_api(api_client, session) - - assert mock_get_remote_data.call_count == 1 - assert len(gpg.fingerprints()) == 2 - - log_msg = mock_logger.debug.call_args_list[0][0][0] - assert log_msg.startswith( - 'Importing key with fingerprint {}'.format(mock_source.key['fingerprint']) - ) - - job.call_api(api_client, session) - - assert mock_get_remote_data.call_count == 2 - - log_msg = mock_logger.debug.call_args_list[1][0][0] - assert log_msg.startswith( - 'Skipping import of key with fingerprint {}'.format(mock_source.key['fingerprint']) - ) - - -def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session, session_maker): - """ - Check that we can gracefully handle a key import failure. - """ - gpg = GpgHelper(homedir, session_maker, is_qubes=False) - job = MetadataSyncJob(homedir, gpg) - - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'public': PUB_KEY, - 'fingerprint': '123456ABC', - } - - mock_key_import = mocker.patch.object(job.gpg, 'import_key', - side_effect=CryptoError) - mock_get_remote_data = mocker.patch( - 'securedrop_client.api_jobs.downloads.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) - - api_client = mocker.MagicMock() - api_client.default_request_timeout = mocker.MagicMock() - - mocker.patch( - 'securedrop_client.api_jobs.downloads.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - - job.call_api(api_client, session) - - assert mock_key_import.call_args[0][0] == mock_source.uuid - assert mock_key_import.call_args[0][1] == mock_source.key['public'] - assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint'] - assert mock_get_remote_data.call_count == 1 - - -def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, session_maker): - """ - Check that we can gracefully handle missing source keys. - """ - gpg = GpgHelper(homedir, session_maker, is_qubes=False) - job = MetadataSyncJob(homedir, gpg) - - mock_source = mocker.MagicMock() - mock_source.uuid = 'bar' - mock_source.key = { - 'type': 'PGP', - 'pub_key': '', - 'fingerprint': '' - } - - mock_key_import = mocker.patch.object(job.gpg, 'import_key') - mock_get_remote_data = mocker.patch( - 'securedrop_client.api_jobs.downloads.get_remote_data', - return_value=([mock_source], 'submissions', 'replies')) - - api_client = mocker.MagicMock() - api_client.default_request_timeout = mocker.MagicMock() - - mocker.patch( - 'securedrop_client.api_jobs.downloads.update_local_storage', - return_value=([mock_source], 'submissions', 'replies')) - - job.call_api(api_client, session) - - assert mock_key_import.call_count == 0 - assert mock_get_remote_data.call_count == 1 - - def test_MessageDownloadJob_raises_NotImplementedError(mocker): job = DownloadJob('mock') diff --git a/tests/api_jobs/test_sync.py b/tests/api_jobs/test_sync.py new file mode 100644 index 000000000..570d16d8f --- /dev/null +++ b/tests/api_jobs/test_sync.py @@ -0,0 +1,162 @@ +import os +from uuid import UUID + +from securedrop_client.api_jobs.sync import MetadataSyncJob +from securedrop_client.crypto import GpgHelper, CryptoError + +with open(os.path.join(os.path.dirname(__file__), '..', 'files', 'test-key.gpg.pub.asc')) as f: + PUB_KEY = f.read() + + +def test_MetadataSyncJob_success(mocker, homedir, session, session_maker): + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + job = MetadataSyncJob(homedir, gpg) + + mock_source = mocker.MagicMock() + mock_source.uuid = 'bar' + mock_source.key = { + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': '123456ABC', + } + + mock_key_import = mocker.patch.object(job.gpg, 'import_key') + mock_get_remote_data = mocker.patch( + 'securedrop_client.api_jobs.sync.get_remote_data', + return_value=([mock_source], 'submissions', 'replies')) + + api_client = mocker.MagicMock() + api_client.default_request_timeout = mocker.MagicMock() + api_client.default_request_timeout = mocker.MagicMock() + + mocker.patch( + 'securedrop_client.api_jobs.sync.update_local_storage', + return_value=([mock_source], 'submissions', 'replies')) + + job.call_api(api_client, session) + + assert mock_key_import.call_args[0][0] == mock_source.uuid + assert mock_key_import.call_args[0][1] == mock_source.key['public'] + assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint'] + assert mock_get_remote_data.call_count == 1 + + +def test_MetadataSyncJob_success_with_key_import_fail(mocker, homedir, session, session_maker): + """ + Check that we can gracefully handle a key import failure. + """ + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + job = MetadataSyncJob(homedir, gpg) + + mock_source = mocker.MagicMock() + mock_source.uuid = 'bar' + mock_source.key = { + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': '123456ABC', + } + + mock_key_import = mocker.patch.object(job.gpg, 'import_key', + side_effect=CryptoError) + mock_get_remote_data = mocker.patch( + 'securedrop_client.api_jobs.sync.get_remote_data', + return_value=([mock_source], 'submissions', 'replies')) + + api_client = mocker.MagicMock() + api_client.default_request_timeout = mocker.MagicMock() + + mocker.patch( + 'securedrop_client.api_jobs.sync.update_local_storage', + return_value=([mock_source], 'submissions', 'replies')) + + job.call_api(api_client, session) + + assert mock_key_import.call_args[0][0] == mock_source.uuid + assert mock_key_import.call_args[0][1] == mock_source.key['public'] + assert mock_key_import.call_args[0][2] == mock_source.key['fingerprint'] + assert mock_get_remote_data.call_count == 1 + + +def test_MetadataSyncJob_success_with_missing_key(mocker, homedir, session, session_maker): + """ + Check that we can gracefully handle missing source keys. + """ + gpg = GpgHelper(homedir, session_maker, is_qubes=False) + job = MetadataSyncJob(homedir, gpg) + + mock_source = mocker.MagicMock() + mock_source.uuid = 'bar' + mock_source.key = { + 'type': 'PGP', + 'pub_key': '', + 'fingerprint': '' + } + + mock_key_import = mocker.patch.object(job.gpg, 'import_key') + mock_get_remote_data = mocker.patch( + 'securedrop_client.api_jobs.sync.get_remote_data', + return_value=([mock_source], 'submissions', 'replies')) + + api_client = mocker.MagicMock() + api_client.default_request_timeout = mocker.MagicMock() + + mocker.patch( + 'securedrop_client.api_jobs.sync.update_local_storage', + return_value=([mock_source], 'submissions', 'replies')) + + job.call_api(api_client, session) + + assert mock_key_import.call_count == 0 + assert mock_get_remote_data.call_count == 1 + + +def test_MetadataSyncJob_only_import_new_source_keys(mocker, homedir, session, session_maker): + """ + Verify that we only import source keys we don't already have. + """ + class LimitedImportGpgHelper(GpgHelper): + def import_key(self, source_uuid: UUID, key_data: str, fingerprint: str) -> None: + self._import(key_data) + + gpg = LimitedImportGpgHelper(homedir, session_maker, is_qubes=False) + job = MetadataSyncJob(homedir, gpg) + + mock_source = mocker.MagicMock() + mock_source.uuid = 'bar' + mock_source.key = { + 'type': 'PGP', + 'public': PUB_KEY, + 'fingerprint': 'B2FF7FB28EED8CABEBC5FB6C6179D97BCFA52E5F', + } + + mock_get_remote_data = mocker.patch( + 'securedrop_client.api_jobs.sync.get_remote_data', + return_value=([mock_source], [], [])) + + api_client = mocker.MagicMock() + api_client.default_request_timeout = mocker.MagicMock() + + mocker.patch( + 'securedrop_client.api_jobs.sync.update_local_storage', + return_value=([mock_source], [], [])) + + mock_logger = mocker.patch('securedrop_client.api_jobs.sync.logger') + + job.call_api(api_client, session) + + assert mock_get_remote_data.call_count == 1 + assert len(gpg.fingerprints()) == 2 + + log_msg = mock_logger.debug.call_args_list[0][0][0] + assert log_msg.startswith( + 'Importing key with fingerprint {}'.format(mock_source.key['fingerprint']) + ) + + job.call_api(api_client, session) + + assert mock_get_remote_data.call_count == 2 + + log_msg = mock_logger.debug.call_args_list[1][0][0] + assert log_msg.startswith( + 'Skipping import of key with fingerprint {}'.format(mock_source.key['fingerprint']) + ) diff --git a/tests/test_logic.py b/tests/test_logic.py index a3bc3f7a8..45bbe1247 100644 --- a/tests/test_logic.py +++ b/tests/test_logic.py @@ -176,10 +176,12 @@ def test_Controller_on_authenticate_failure(homedir, config, mocker, session_mak mock_gui = mocker.MagicMock() co = Controller('http://localhost', mock_gui, session_maker, homedir) + co.api_sync.stop = mocker.MagicMock() result_data = Exception('oh no') co.on_authenticate_failure(result_data) + co.api_sync.stop.assert_called_once_with() mock_gui.show_login_error.\ assert_called_once_with(error='There was a problem signing in. Please ' 'verify your credentials and try again.') @@ -195,7 +197,8 @@ def test_Controller_on_authenticate_success(homedir, config, mocker, session_mak mock_gui = mocker.MagicMock() mock_api_job_queue = mocker.patch("securedrop_client.logic.ApiJobQueue") co = Controller('http://localhost', mock_gui, session_maker, homedir) - co.sync_api = mocker.MagicMock() + # co.api_sync = mocker.MagicMock() + co.api_sync.start = mocker.MagicMock() co.update_sources = mocker.MagicMock() co.session.add(user) co.session.commit() @@ -209,8 +212,7 @@ def test_Controller_on_authenticate_success(homedir, config, mocker, session_mak co.on_authenticate_success(True) - co.sync_api.assert_called_once_with() - co.update_sources.assert_called_once_with() + co.api_sync.start.assert_called_once_with(co.api) assert co.is_authenticated assert mock_api_job_queue.called login.assert_called_with(co.api) @@ -342,41 +344,6 @@ def test_Controller_authenticated_no_api(homedir, config, mocker, session_maker) assert co.authenticated() is False -def test_Controller_sync_api_not_authenticated(homedir, config, mocker, session_maker): - """ - If the API isn't authenticated, don't sync. - Using the `config` fixture to ensure the config is written to disk. - """ - mock_gui = mocker.MagicMock() - - co = Controller('http://localhost', mock_gui, session_maker, homedir) - co.authenticated = mocker.MagicMock(return_value=False) - co.api_job_queue = mocker.MagicMock() - co.api_job_queue.enqueue = mocker.MagicMock() - - co.sync_api() - - co.api_job_queue.enqueue.call_count == 0 - - -def test_Controller_sync_api(homedir, config, mocker, session_maker): - """ - Sync the API is authenticated. - Using the `config` fixture to ensure the config is written to disk. - """ - mock_gui = mocker.MagicMock() - - co = Controller('http://localhost', mock_gui, session_maker, homedir) - - co.authenticated = mocker.MagicMock(return_value=True) - co.api_job_queue = mocker.MagicMock() - co.api_job_queue.enqueue = mocker.MagicMock() - - co.sync_api() - - co.api_job_queue.enqueue.call_count == 1 - - def test_Controller_last_sync_with_file(homedir, config, mocker, session_maker): """ The flag indicating the time of the last sync with the API is stored in a @@ -410,6 +377,18 @@ def test_Controller_last_sync_no_file(homedir, config, mocker, session_maker): assert co.last_sync() is None +def test_Controller_on_sync_started(mocker, homedir): + co = Controller('http://localhost', mocker.MagicMock(), mocker.MagicMock(), homedir) + + co.on_sync_started() + + sync_events = mocker.patch.object(co, 'sync_events') + + co.on_sync_started() + + sync_events.emit.assert_called_once_with('syncing') + + def test_Controller_on_sync_failure(homedir, config, mocker, session_maker): """ If there's no result to syncing, then don't attempt to update local storage @@ -532,7 +511,6 @@ def test_Controller_on_update_star_success(homedir, config, mocker, session_make co = Controller('http://localhost', mock_gui, session_maker, homedir) result = True co.call_reset = mocker.MagicMock() - co.sync_api = mocker.MagicMock() co.on_update_star_success(result) assert mock_gui.clear_error_status.called @@ -547,9 +525,7 @@ def test_Controller_on_update_star_failed(homedir, config, mocker, session_maker co = Controller('http://localhost', mock_gui, session_maker, homedir) result = Exception('boom') co.call_reset = mocker.MagicMock() - co.sync_api = mocker.MagicMock() co.on_update_star_failure(result) - co.sync_api.assert_not_called() mock_gui.update_error_status.assert_called_once_with('Failed to update star.') @@ -985,20 +961,17 @@ def test_Controller_on_file_open_file_missing(mocker, homedir, session_maker, se When file does not exist, test that we log and send status update to user. """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) - co.sync_api = mocker.MagicMock() file = factory.File(source=source['source']) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.on_file_open(file) log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_on_file_open_file_missing_not_qubes( @@ -1009,20 +982,17 @@ def test_Controller_on_file_open_file_missing_not_qubes( """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) co.qubes = False - co.sync_api = mocker.MagicMock() file = factory.File(source=source['source']) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.on_file_open(file) log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_download_new_replies_with_new_reply(mocker, session, session_maker, homedir): @@ -1250,7 +1220,6 @@ def test_Controller_on_delete_source_failure(homedir, config, mocker, session_ma ''' mock_gui = mocker.MagicMock() co = Controller('http://localhost', mock_gui, session_maker, homedir) - co.sync_api = mocker.MagicMock() co.on_delete_source_failure(Exception()) co.gui.update_error_status.assert_called_with('Failed to delete source at server') @@ -1349,8 +1318,6 @@ def test_Controller_on_reply_success(homedir, mocker, session_maker, session): Check that when the method is called, the client emits the correct signal. ''' co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) - co.session = mocker.MagicMock() - mocker.patch.object(co, 'sync_api') reply_succeeded = mocker.patch.object(co, 'reply_succeeded') reply_failed = mocker.patch.object(co, 'reply_failed') reply = factory.Reply(source=factory.Source()) @@ -1368,7 +1335,6 @@ def test_Controller_on_reply_success(homedir, mocker, session_maker, session): assert debug_logger.call_args_list[0][0][0] == '{} sent successfully'.format(reply.uuid) reply_succeeded.emit.assert_called_once_with("a_uuid", reply.uuid, "a message") reply_failed.emit.assert_not_called() - co.sync_api.assert_not_called() def test_Controller_on_reply_failure(homedir, mocker, session_maker): @@ -1464,24 +1430,6 @@ def test_Controller_on_queue_paused(homedir, config, mocker, session_maker): 'The SecureDrop server cannot be reached.', duration=0, retry=True) -def test_Controller_on_queue_paused_due_to_invalid_token(homedir, config, mocker, session_maker): - """ - If the api is inaccessible then ensure user is logged out and shown the login window. Also check - that "SecureDrop server cannot be reached" is not shown when the user is not authenticated. - """ - gui = mocker.MagicMock() - co = Controller('http://localhost', gui, session_maker, homedir) - co.api = None - co.logout = mocker.MagicMock() - co.gui = mocker.MagicMock() - co.gui.show_login = mocker.MagicMock() - - co.on_queue_paused() - - co.logout.assert_called_once_with() - co.gui.show_login.assert_called_once_with(error='Your session expired. Please log in again.') - - def test_Controller_call_update_star_success(homedir, config, mocker, session_maker, session): ''' Check that a UpdateStar is submitted to the queue when update_star is called. @@ -1569,20 +1517,16 @@ def test_Controller_print_file_file_missing(homedir, mocker, session, session_ma should be communicated to the user. """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) - co.sync_api = mocker.MagicMock() file = factory.File(source=factory.Source()) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.print_file(file.uuid) - log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( - file.filename) + log_msg = 'Cannot find {} in the data directory. File does not exist.'.format(file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_print_file_file_missing_not_qubes( @@ -1594,20 +1538,17 @@ def test_Controller_print_file_file_missing_not_qubes( """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) co.qubes = False - co.sync_api = mocker.MagicMock() file = factory.File(source=factory.Source()) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.print_file(file.uuid) log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_print_file_when_orig_file_already_exists( @@ -1744,20 +1685,17 @@ def test_Controller_export_file_to_usb_drive_file_missing(homedir, mocker, sessi should be communicated to the user. """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) - co.sync_api = mocker.MagicMock() file = factory.File(source=factory.Source()) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.export_file_to_usb_drive(file.uuid, 'mock passphrase') log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_export_file_to_usb_drive_file_missing_not_qubes( @@ -1769,20 +1707,17 @@ def test_Controller_export_file_to_usb_drive_file_missing_not_qubes( """ co = Controller('http://localhost', mocker.MagicMock(), session_maker, homedir) co.qubes = False - co.sync_api = mocker.MagicMock() file = factory.File(source=factory.Source()) session.add(file) session.commit() mocker.patch('securedrop_client.logic.Controller.get_file', return_value=file) debug_logger = mocker.patch('securedrop_client.logic.logger.debug') - co.sync_api = mocker.MagicMock() co.export_file_to_usb_drive(file.uuid, 'mock passphrase') log_msg = 'Cannot find {} in the data directory. File does not exist.'.format( file.filename) debug_logger.assert_called_once_with(log_msg) - co.sync_api.assert_not_called() def test_Controller_export_file_to_usb_drive_when_orig_file_already_exists( diff --git a/tests/test_queue.py b/tests/test_queue.py index adcf93f2b..0c438d303 100644 --- a/tests/test_queue.py +++ b/tests/test_queue.py @@ -4,7 +4,7 @@ from queue import Queue, Full from sdclientapi import RequestTimeoutError -from securedrop_client.api_jobs.downloads import FileDownloadJob, MetadataSyncJob +from securedrop_client.api_jobs.downloads import FileDownloadJob from securedrop_client.api_jobs.base import ApiInaccessibleError, PauseQueueJob from securedrop_client.queue import RunnableQueue, ApiJobQueue from tests import factory @@ -44,24 +44,22 @@ def test_RunnableQueue_happy_path(mocker): assert queue.queue.empty() -def test_RunnableQueue_with_size_constraint(mocker): +def test_RunnableQueue_with_size_constraint(mocker, session_maker): ''' - Add one job to the queue, run it. + Add one job to a queue with the size constraint of 1 and see that the next job is dropped. ''' - mock_api_client = mocker.MagicMock() - mock_session = mocker.MagicMock() - mock_session_maker = mocker.MagicMock(return_value=mock_session) - return_value = 'foo' - - dummy_job_cls = factory.dummy_job_factory(mocker, return_value) - queue = RunnableQueue(mock_api_client, mock_session_maker, size=1) - queue.JOB_PRIORITIES = {dummy_job_cls: 1, PauseQueueJob: 2} + queue = RunnableQueue(mocker.MagicMock(), session_maker, size=1) + job_cls = factory.dummy_job_factory(mocker, 'mock_return_value') + queue.JOB_PRIORITIES = {job_cls: 1} + job1 = job_cls() + job2 = job_cls() - queue.add_job(dummy_job_cls()) - queue.add_job(dummy_job_cls()) - queue.add_job(dummy_job_cls()) + queue.add_job(job1) + queue.add_job(job2) assert queue.queue.qsize() == 1 + assert queue.queue.get(block=True) == (1, job1) + assert queue.queue.qsize() == 0 def test_RunnableQueue_job_timeout(mocker): @@ -201,12 +199,10 @@ def test_RunnableQueue_job_generic_exception(mocker): def test_RunnableQueue_does_not_run_jobs_when_not_authed(mocker): ''' - Check that the queue is paused when a job returns with aApiInaccessibleError. Check that the - job does not get resubmitted since it is not authorized and that its api_client is None. + Check that a job that sees an ApiInaccessibleError does not get resubmitted since it is not + authorized and that its api_client is None. ''' queue = RunnableQueue(mocker.MagicMock(), mocker.MagicMock()) - queue.paused = mocker.MagicMock() - queue.paused.emit = mocker.MagicMock() job_cls = factory.dummy_job_factory(mocker, ApiInaccessibleError()) queue.JOB_PRIORITIES = {PauseQueueJob: 0, job_cls: 1} @@ -218,7 +214,6 @@ def test_RunnableQueue_does_not_run_jobs_when_not_authed(mocker): queue.process() assert queue.queue.qsize() == 0 # queue should not contain job since it was not resubmitted assert queue.api_client is None - queue.paused.emit.assert_called_once_with() def test_ApiJobQueue_enqueue(mocker): @@ -231,14 +226,11 @@ def test_ApiJobQueue_enqueue(mocker): job_queue.JOB_PRIORITIES = {FileDownloadJob: job_priority, type(dummy_job): job_priority} mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') - mock_metadata_queue = mocker.patch.object(job_queue, 'metadata_queue') mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_add_job = mocker.patch.object(mock_download_file_queue, 'add_job') - mock_metadata_add_job = mocker.patch.object(mock_metadata_queue, 'add_job') mock_main_queue_add_job = mocker.patch.object(mock_main_queue, 'add_job') job_queue.main_queue.api_client = 'has a value' job_queue.download_file_queue.api_client = 'has a value' - job_queue.metadata_queue.api_client = 'has a value' mock_start_queues = mocker.patch.object(job_queue, 'start_queues') dl_job = FileDownloadJob('mock', 'mock', 'mock') @@ -253,10 +245,8 @@ def test_ApiJobQueue_enqueue(mocker): mock_main_queue.reset_mock() mock_main_queue_add_job.reset_mock() - md_job = MetadataSyncJob("mock", "mock") - job_queue.enqueue(md_job) + job_queue.enqueue(FileDownloadJob('mock', 'mock', 'mock')) - mock_metadata_add_job.assert_called_once_with(md_job) assert not mock_main_queue_add_job.called # reset for next test @@ -290,10 +280,8 @@ def test_ApiJobQueue_resume_queues_emits_resume_signal(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) mocker.patch.object(job_queue.main_queue, 'resume') mocker.patch.object(job_queue.download_file_queue, 'resume') - mocker.patch.object(job_queue.metadata_queue, 'resume') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.download_file_thread.isRunning = mocker.MagicMock(return_value=False) - job_queue.metadata_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.start_queues = mocker.MagicMock() job_queue.resume_queues() @@ -301,7 +289,6 @@ def test_ApiJobQueue_resume_queues_emits_resume_signal(mocker): job_queue.start_queues.assert_called_once_with() job_queue.main_queue.resume.emit.assert_called_once_with() job_queue.download_file_queue.resume.emit.assert_called_once_with() - job_queue.metadata_queue.resume.emit.assert_called_once_with() def test_ApiJobQueue_enqueue_no_auth(mocker): @@ -311,13 +298,10 @@ def test_ApiJobQueue_enqueue_no_auth(mocker): job_queue = ApiJobQueue(mock_client, mock_session_maker) mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') mock_main_queue = mocker.patch.object(job_queue, 'main_queue') - mock_metadata_queue = mocker.patch.object(job_queue, 'metadata_queue') mock_download_file_add_job = mocker.patch.object(mock_download_file_queue, 'add_job') mock_main_queue_add_job = mocker.patch.object(mock_main_queue, 'add_job') - mock_metadata_queue_add_job = mocker.patch.object(mock_metadata_queue, "add_job") job_queue.main_queue.api_client = None job_queue.download_file_queue.api_client = None - job_queue.metadata_queue.api_client = None mock_start_queues = mocker.patch.object(job_queue, 'start_queues') dummy_job = factory.dummy_job_factory(mocker, 'mock')() @@ -326,7 +310,6 @@ def test_ApiJobQueue_enqueue_no_auth(mocker): assert mock_download_file_add_job.call_count == 0 assert mock_main_queue_add_job.call_count == 0 - assert mock_metadata_queue_add_job.call_count == 0 assert mock_start_queues.call_count == 0 @@ -339,23 +322,18 @@ def test_ApiJobQueue_login_if_queues_not_running(mocker): mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') - mock_metadata_queue = mocker.patch.object(job_queue, 'metadata_queue') mock_main_thread = mocker.patch.object(job_queue, 'main_thread') mock_download_file_thread = mocker.patch.object(job_queue, 'download_file_thread') - mock_metadata_thread = mocker.patch.object(job_queue, 'metadata_thread') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.download_file_thread.isRunning = mocker.MagicMock(return_value=False) - job_queue.metadata_thread.isRunning = mocker.MagicMock(return_value=False) job_queue.login(mock_api) assert mock_main_queue.api_client == mock_api assert mock_download_file_queue.api_client == mock_api - assert mock_metadata_queue.api_client == mock_api mock_main_thread.start.assert_called_once_with() mock_download_file_thread.start.assert_called_once_with() - mock_metadata_thread.start.assert_called_once_with() def test_ApiJobQueue_login_if_queues_running(mocker): @@ -367,23 +345,18 @@ def test_ApiJobQueue_login_if_queues_running(mocker): mock_main_queue = mocker.patch.object(job_queue, 'main_queue') mock_download_file_queue = mocker.patch.object(job_queue, 'download_file_queue') - mock_metadata_queue = mocker.patch.object(job_queue, 'metadata_queue') mock_main_thread = mocker.patch.object(job_queue, 'main_thread') mock_download_file_thread = mocker.patch.object(job_queue, 'download_file_thread') - mock_metadata_thread = mocker.patch.object(job_queue, 'metadata_thread') job_queue.main_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.download_file_thread.isRunning = mocker.MagicMock(return_value=True) - job_queue.metadata_thread.isRunning = mocker.MagicMock(return_value=True) job_queue.login(mock_api) assert mock_main_queue.api_client == mock_api assert mock_download_file_queue.api_client == mock_api - assert mock_metadata_queue.api_client == mock_api assert not mock_main_thread.start.called assert not mock_download_file_thread.start.called - assert not mock_metadata_thread.start.called def test_ApiJobQueue_logout_stops_queue_threads(mocker): @@ -393,17 +366,14 @@ def test_ApiJobQueue_logout_stops_queue_threads(mocker): assert not job_queue.main_thread.isRunning() assert not job_queue.download_file_thread.isRunning() - assert not job_queue.metadata_thread.isRunning() def test_ApiJobQueue_logout_results_in_queue_threads_not_running(mocker): job_queue = ApiJobQueue(mocker.MagicMock(), mocker.MagicMock()) job_queue.main_thread = mocker.MagicMock() job_queue.download_file_thread = mocker.MagicMock() - job_queue.metadata_thread = mocker.MagicMock() job_queue.logout() job_queue.main_thread.quit.assert_called_once_with() job_queue.download_file_thread.quit.assert_called_once_with() - job_queue.metadata_thread.quit.assert_called_once_with() diff --git a/tests/test_sync.py b/tests/test_sync.py new file mode 100644 index 000000000..bd1b4f739 --- /dev/null +++ b/tests/test_sync.py @@ -0,0 +1,169 @@ +from sdclientapi import RequestTimeoutError + +from securedrop_client.api_jobs.base import ApiInaccessibleError +from securedrop_client.sync import ApiSync + + +def test_ApiSync_init(mocker, session_maker, homedir): + ''' + Ensure sync thread is not started in the constructor. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + assert not api_sync.sync_thread.isRunning() + + +def test_ApiSync_start(mocker, session_maker, homedir): + ''' + Ensure sync thread starts when start is called and not already running. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + api_sync.sync_thread = mocker.MagicMock() + api_sync.sync_thread.isRunning = mocker.MagicMock(return_value=False) + + api_sync.start(mocker.MagicMock()) + + api_sync.sync_thread.start.assert_called_once_with() + + +def test_ApiSync_start_not_called_when_already_started(mocker, session_maker, homedir): + ''' + Ensure sync thread does not start when start is called if already running. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + api_sync.sync_thread = mocker.MagicMock() + api_sync.sync_thread.isRunning = mocker.MagicMock(return_value=True) + + api_sync.start(mocker.MagicMock()) + + api_sync.sync_thread.start.assert_not_called() + + +def test_ApiSync_stop(mocker, session_maker, homedir): + ''' + Ensure thread is not running when stopped and api_client is None. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + + api_sync.stop() + + assert api_sync.api_client is None + assert not api_sync.sync_thread.isRunning() + + +def test_ApiSync_stop_calls_quit(mocker, session_maker, homedir): + ''' + Ensure stop calls QThread's quit method and api_client is None. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + api_sync.sync_thread = mocker.MagicMock() + api_sync.sync_thread.isRunning = mocker.MagicMock(return_value=True) + + api_sync.stop() + + assert api_sync.api_client is None + api_sync.sync_thread.quit.assert_called_once_with() + + +def test_ApiSyncBackgroundTask_sync(mocker, session_maker, homedir): + ''' + Ensure sync enqueues a MetadataSyncJob and calls it's parent's processing function + ''' + api_client = mocker.MagicMock() + api_sync = ApiSync(api_client, session_maker, mocker.MagicMock(), homedir) + sync_started = mocker.patch.object(api_sync.api_sync_bg_task, 'sync_started') + _do_call_api_fn = mocker.patch('securedrop_client.sync.MetadataSyncJob._do_call_api') + + api_sync.api_sync_bg_task.sync() + + sync_started.emit.assert_called_once_with() + assert _do_call_api_fn.called + + +def test_ApiSyncBackgroundTask_sync_catches_ApiInaccessibleError(mocker, session_maker, homedir): + ''' + Ensure sync calls the parent processing function of MetadataSyncJob, catches + ApiInaccessibleError exception, and emits failure signal. + ''' + api_client = mocker.MagicMock() + api_sync = ApiSync(api_client, session_maker, mocker.MagicMock(), homedir) + sync_started = mocker.patch.object(api_sync.api_sync_bg_task, 'sync_started') + success_signal = mocker.patch('securedrop_client.sync.MetadataSyncJob.success_signal') + failure_signal = mocker.patch('securedrop_client.sync.MetadataSyncJob.failure_signal') + error = ApiInaccessibleError() + _do_call_api_fn = mocker.patch( + 'securedrop_client.sync.MetadataSyncJob._do_call_api', side_effect=error) + + api_sync.api_sync_bg_task.sync() + + assert _do_call_api_fn.called + sync_started.emit.assert_called_once_with() + success_signal.emit.assert_not_called() + failure_signal.emit.assert_called_once_with(error) + + +def test_ApiSyncBackgroundTask_sync_catches_all_other_exceptions(mocker, session_maker, homedir): + ''' + Ensure sync calls the parent processing function of MetadataSyncJob, catches all exceptions, + and emits failure signal. + ''' + api_client = mocker.MagicMock() + api_sync = ApiSync(api_client, session_maker, mocker.MagicMock(), homedir) + sync_started = mocker.patch.object(api_sync.api_sync_bg_task, 'sync_started') + success_signal = mocker.patch('securedrop_client.sync.MetadataSyncJob.success_signal') + failure_signal = mocker.patch('securedrop_client.sync.MetadataSyncJob.failure_signal') + error = Exception() + call_api_fn = mocker.patch( + 'securedrop_client.sync.MetadataSyncJob.call_api', side_effect=error) + + api_sync.api_sync_bg_task.sync() + + assert call_api_fn.called + sync_started.emit.assert_called_once_with() + success_signal.emit.assert_not_called() + failure_signal.emit.assert_called_once_with(error) + + +def test_ApiSync_on_sync_success(mocker, session_maker, homedir): + ''' + Ensure success handler emits success signal that the Controller links to and fires another sync + after a supplied amount of time. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + sync_success = mocker.patch.object(api_sync, 'sync_success') + + api_sync.on_sync_success() + + sync_success.emit.assert_called_once_with() + + +def test_ApiSync_on_sync_failure(mocker, session_maker, homedir): + ''' + Ensure failure handler emits failure signal that the Controller links to and does not fire + another sync for errors other than RequestTimeoutError + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + sync_failure = mocker.patch.object(api_sync, 'sync_failure') + singleShot_fn = mocker.patch('securedrop_client.sync.QTimer.singleShot') + + error = Exception() + + api_sync.on_sync_failure(error) + + sync_failure.emit.assert_called_once_with(error) + singleShot_fn.assert_not_called() + + +def test_ApiSync_on_sync_failure_because_of_timeout(mocker, session_maker, homedir): + ''' + Ensure failure handler emits failure signal that the Controller links to and sets up timer to + fire another sync after 15 seconds if the failure reason is a RequestTimeoutError. + ''' + api_sync = ApiSync(mocker.MagicMock(), session_maker, mocker.MagicMock(), homedir) + sync_failure = mocker.patch.object(api_sync, 'sync_failure') + singleShot_fn = mocker.patch('securedrop_client.sync.QTimer.singleShot') + error = RequestTimeoutError() + + api_sync.on_sync_failure(error) + + sync_failure.emit.assert_called_once_with(error) + singleShot_fn.assert_called_once_with(15000, api_sync.api_sync_bg_task.sync)