Skip to content

Commit

Permalink
Fix #180 - Issues with non-NRTM sources which do have serial files.
Browse files Browse the repository at this point in the history
  • Loading branch information
mxsasha committed Jan 31, 2019
1 parent 5940474 commit 135a39b
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 13 deletions.
4 changes: 4 additions & 0 deletions docs/users/mirroring.rst
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,10 @@ For sources that do not offer NRTM, simply configuring a source of the data in
Journals can not be generated, and NRTM queries by clients for this source will
be rejected.

When `import_serial_source`, is set, a full import will only be run if the
serial in that file is greater than the highest imported serial so far.
The serial is checked every `import_timer`.

Downloads
~~~~~~~~~
For downloads, FTP and local files are supported. The full copy to be
Expand Down
18 changes: 13 additions & 5 deletions irrd/mirroring/mirror_runners_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,12 @@ def run(self) -> None:

try:
serial_newest_seen, force_reload = self._status()
logger.debug(f'Most recent serial seen for {self.source}: {serial_newest_seen}, force_reload: {force_reload}')
if not serial_newest_seen or force_reload:
self.full_import_runner.run(database_handler=self.database_handler)
nrtm_enabled = bool(get_setting(f'sources.{self.source}.nrtm_host'))
logger.debug(f'Most recent serial seen for {self.source}: {serial_newest_seen},'
f'force_reload: {force_reload}, nrtm enabled: {nrtm_enabled}')
if force_reload or not serial_newest_seen or not nrtm_enabled:
self.full_import_runner.run(database_handler=self.database_handler,
serial_newest_seen=serial_newest_seen, force_reload=force_reload)
else:
self.update_stream_runner.run(serial_newest_seen, database_handler=self.database_handler)

Expand Down Expand Up @@ -72,7 +75,7 @@ class MirrorFullImportRunner:
def __init__(self, source: str) -> None:
self.source = source

def run(self, database_handler: DatabaseHandler):
def run(self, database_handler: DatabaseHandler, serial_newest_seen: Optional[int]=None, force_reload=False):
import_sources = get_setting(f'sources.{self.source}.import_source')
if isinstance(import_sources, str):
import_sources = [import_sources]
Expand All @@ -85,10 +88,15 @@ def run(self, database_handler: DatabaseHandler):
database_handler.delete_all_rpsl_objects_with_journal(self.source)
logger.info(f'Running full import of {self.source} from {import_sources}, serial from {import_serial_source}')

import_serial = 0
import_serial = None
if import_serial_source:
import_serial = int(self._retrieve_file(import_serial_source, return_contents=True)[0])

if not force_reload and serial_newest_seen is not None and import_serial <= serial_newest_seen:
logger.info(f'Current newest serial seen for {self.source} is '
f'{serial_newest_seen}, import_serial is {import_serial}, cancelling import.')
return

import_data = [self._retrieve_file(import_source, return_contents=False) for import_source in import_sources]

database_handler.disable_journaling()
Expand Down
92 changes: 84 additions & 8 deletions irrd/mirroring/tests/test_mirror_runners_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,14 @@ def test_full_import_call(self, monkeypatch):
assert len(mock_full_import_runner.mock_calls) == 1
assert mock_full_import_runner.mock_calls[0][0] == 'run'

def test_force_reload(self, monkeypatch):
def test_force_reload(self, monkeypatch, config_override):
config_override({
'sources': {
'TEST': {
'nrtm_host': '192.0.2.1',
}
}
})
mock_dh = Mock()
mock_dq = Mock()
mock_full_import_runner = Mock()
Expand All @@ -47,7 +54,14 @@ def test_force_reload(self, monkeypatch):
assert len(mock_full_import_runner.mock_calls) == 1
assert mock_full_import_runner.mock_calls[0][0] == 'run'

def test_update_stream_call(self, monkeypatch):
def test_update_stream_call(self, monkeypatch, config_override):
config_override({
'sources': {
'TEST': {
'nrtm_host': '192.0.2.1',
}
}
})
mock_dh = Mock()
mock_dq = Mock()
mock_stream_runner = Mock()
Expand All @@ -70,12 +84,12 @@ def test_update_stream_call(self, monkeypatch):
def test_exception_handling(self, monkeypatch, caplog):
mock_dh = Mock()
mock_dq = Mock()
mock_stream_runner = Mock()
mock_full_import_runner = Mock()

monkeypatch.setattr('irrd.mirroring.mirror_runners_import.DatabaseHandler', lambda: mock_dh)
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.DatabaseStatusQuery', lambda: mock_dq)
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.NRTMImportUpdateStreamRunner', lambda source: mock_stream_runner)
mock_stream_runner.run = Mock(side_effect=Exception('test-error'))
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.MirrorFullImportRunner', lambda source: mock_full_import_runner)
mock_full_import_runner.run = Mock(side_effect=Exception('test-error'))

mock_dh.execute_query = lambda q: iter([{'serial_newest_seen': 424242, 'force_reload': False}])
runner = MirrorImportUpdateRunner(source='TEST')
Expand Down Expand Up @@ -111,7 +125,7 @@ def test_run_import_ftp(self, monkeypatch, config_override):
'RETR /serial': b'424242',
}
mock_ftp.retrbinary = lambda path, callback: callback(responses[path])
MirrorFullImportRunner('TEST').run(mock_dh)
MirrorFullImportRunner('TEST').run(mock_dh, serial_newest_seen=424241)

assert MockMirrorFileImportParser.rpsl_data_calls == ['source1', 'source2']
assert flatten_mock_calls(mock_dh) == [
Expand Down Expand Up @@ -167,15 +181,77 @@ def test_no_serial_ftp(self, monkeypatch, config_override):
MockMirrorFileImportParser.rpsl_data_calls = []
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.MirrorFileImportParser', MockMirrorFileImportParser)
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.FTP', lambda url: mock_ftp)
MockMirrorFileImportParser.expected_serial = 0
MockMirrorFileImportParser.expected_serial = None

responses = {
# gzipped data, contains 'source1'
'RETR /source1.gz': b64decode('H4sIAE4CfFsAAyvOLy1KTjUEAE5Fj0oHAAAA'),
'RETR /source2': b'source2',
}
mock_ftp.retrbinary = lambda path, callback: callback(responses[path])
MirrorFullImportRunner('TEST').run(mock_dh)
MirrorFullImportRunner('TEST').run(mock_dh, serial_newest_seen=42)

assert MockMirrorFileImportParser.rpsl_data_calls == ['source1', 'source2']
assert flatten_mock_calls(mock_dh) == [
['delete_all_rpsl_objects_with_journal', ('TEST',), {}],
['disable_journaling', (), {}],
]

def test_import_cancelled_serial_too_old(self, monkeypatch, config_override, caplog):
config_override({
'sources': {
'TEST': {
'import_source': ['ftp://host/source1.gz', 'ftp://host/source2'],
'import_serial_source': 'ftp://host/serial',
}
}
})

mock_dh = Mock()
mock_ftp = Mock()
MockMirrorFileImportParser.rpsl_data_calls = []
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.MirrorFileImportParser', MockMirrorFileImportParser)
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.FTP', lambda url: mock_ftp)
MockMirrorFileImportParser.expected_serial = 424242

responses = {
# gzipped data, contains 'source1'
'RETR /source1.gz': b64decode('H4sIAE4CfFsAAyvOLy1KTjUEAE5Fj0oHAAAA'),
'RETR /source2': b'source2',
'RETR /serial': b'424242',
}
mock_ftp.retrbinary = lambda path, callback: callback(responses[path])
MirrorFullImportRunner('TEST').run(mock_dh, serial_newest_seen=424243)

assert not MockMirrorFileImportParser.rpsl_data_calls
assert not mock_dh.call_count
assert 'Current newest serial seen for TEST is 424243, import_serial is 424242, cancelling import.'

def test_import_force_reload_with_serial_too_old(self, monkeypatch, config_override):
config_override({
'sources': {
'TEST': {
'import_source': ['ftp://host/source1.gz', 'ftp://host/source2'],
'import_serial_source': 'ftp://host/serial',
}
}
})

mock_dh = Mock()
mock_ftp = Mock()
MockMirrorFileImportParser.rpsl_data_calls = []
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.MirrorFileImportParser', MockMirrorFileImportParser)
monkeypatch.setattr('irrd.mirroring.mirror_runners_import.FTP', lambda url: mock_ftp)
MockMirrorFileImportParser.expected_serial = 424242

responses = {
# gzipped data, contains 'source1'
'RETR /source1.gz': b64decode('H4sIAE4CfFsAAyvOLy1KTjUEAE5Fj0oHAAAA'),
'RETR /source2': b'source2',
'RETR /serial': b'424242',
}
mock_ftp.retrbinary = lambda path, callback: callback(responses[path])
MirrorFullImportRunner('TEST').run(mock_dh, serial_newest_seen=424243, force_reload=True)

assert MockMirrorFileImportParser.rpsl_data_calls == ['source1', 'source2']
assert flatten_mock_calls(mock_dh) == [
Expand Down

0 comments on commit 135a39b

Please sign in to comment.