Skip to content

Commit

Permalink
Fix: race condition within daemon and notus
Browse files Browse the repository at this point in the history
While creating a hash sum verification it can happen that a lock exist
within daemon which is not yet resolved while already doing a hashsum
verification in a different process.

This results in a non functioning OSPD since it stuck on loading; to
prevent a scenario like this notus will create the verification only
when needed and not eagerly while creating the first instance.
  • Loading branch information
nichtsfrei committed Aug 31, 2022
1 parent 0071f0f commit a0f96a1
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 53 deletions.
38 changes: 5 additions & 33 deletions ospd_openvas/daemon.py
Expand Up @@ -25,7 +25,7 @@
import time
import copy

from typing import Callable, Optional, Dict, List, Tuple, Iterator, Any
from typing import Optional, Dict, List, Tuple, Iterator, Any
from datetime import datetime

from pathlib import Path
Expand Down Expand Up @@ -55,12 +55,6 @@
from ospd_openvas.messaging.mqtt import MQTTClient, MQTTDaemon, MQTTSubscriber
from ospd_openvas.feed import Feed

from ospd_openvas.gpg_sha_verifier import (
ReloadConfiguration,
create_verify,
reload_sha256sums,
)

SENTRY_DSN_OSPD_OPENVAS = environ.get("SENTRY_DSN_OSPD_OPENVAS")
if SENTRY_DSN_OSPD_OPENVAS:
# pylint: disable=import-error
Expand Down Expand Up @@ -439,28 +433,6 @@ def get_filtered_vts_list(self, vts, vt_filter: str) -> Optional[List[str]]:
return vt_oid_list


def hashsum_verificator(
advisories_directory_path: Path, disable: bool
) -> Callable[[Path], bool]:
if disable:
logger.info("hashsum verification is disabled")
return lambda _: True

def on_hash_sum_verification_failure(
_: Optional[Dict[str, str]]
) -> Dict[str, str]:
raise Exception("GPG verification of notus sha256sums failed")

sha_sum_file_path = advisories_directory_path / "sha256sums"
sha_sum_reload_config = ReloadConfiguration(
hash_file=sha_sum_file_path,
on_verification_failure=on_hash_sum_verification_failure,
)

sums = reload_sha256sums(sha_sum_reload_config)
return create_verify(sums)


class OSPDopenvas(OSPDaemon):

"""Class for ospd-openvas daemon."""
Expand All @@ -482,12 +454,12 @@ def __init__(
notus = None
if notus_dir:
ndir = Path(notus_dir)
verifier = hashsum_verificator(
ndir, disable_notus_hashsum_verification
notus = Notus(
ndir,
Cache(self.main_db.ctx),
disable_notus_hashsum_verification,
)

notus = Notus(ndir, Cache(self.main_db.ctx), verifier)

self.nvti = NVTICache(
self.main_db,
notus,
Expand Down
59 changes: 47 additions & 12 deletions ospd_openvas/notus.py
Expand Up @@ -28,9 +28,37 @@
from ospd.parser import CliParser
from ospd_openvas.messages.result import ResultMessage

from ospd_openvas.gpg_sha_verifier import (
ReloadConfiguration,
create_verify,
reload_sha256sums,
)

logger = logging.getLogger(__name__)


def hashsum_verificator(
advisories_directory_path: Path, disable: bool
) -> Callable[[Path], bool]:
if disable:
logger.info("hashsum verification is disabled")
return lambda _: True

def on_hash_sum_verification_failure(
_: Optional[Dict[str, str]]
) -> Dict[str, str]:
raise Exception("GPG verification of notus sha256sums failed")

sha_sum_file_path = advisories_directory_path / "sha256sums"
sha_sum_reload_config = ReloadConfiguration(
hash_file=sha_sum_file_path,
on_verification_failure=on_hash_sum_verification_failure,
)

sums = reload_sha256sums(sha_sum_reload_config)
return create_verify(sums)


class Cache:
def __init__(self, db: Redis, prefix: str = "internal/notus/advisories"):
self.db = db
Expand Down Expand Up @@ -60,17 +88,19 @@ class Notus:
loaded: bool = False
loading: bool = False
path: Path
_verifier: Callable[[Path], bool]
disable_hashsum_verification: bool
_verifier: Optional[Callable[[Path], bool]]

def __init__(
self,
path: Path,
cache: Cache,
verifier: Callable[[Path], bool],
disable_hashsum_verification: bool = False,
):
self.path = path
self.cache = cache
self._verifier = verifier
self._verifier = None
self.disable_hashsum_verification = disable_hashsum_verification

def reload_cache(self):
if self.loading:
Expand All @@ -81,16 +111,21 @@ def reload_cache(self):
self.loading = True
self.loaded = False
for f in self.path.glob('*.notus'):
if self._verifier(f):
data = json.loads(f.read_bytes())
advisories = data.pop("advisories", [])
for advisory in advisories:
res = self.__to_ospd(f, advisory, data)
self.cache.store_advisory(advisory["oid"], res)
else:
logger.log(
logging.WARN, "ignoring %s due to invalid signature", f
if not self._verifier:
self._verifier = hashsum_verificator(
self.path, self.disable_hashsum_verification
)
if self._verifier:
if self._verifier(f):
data = json.loads(f.read_bytes())
advisories = data.pop("advisories", [])
for advisory in advisories:
res = self.__to_ospd(f, advisory, data)
self.cache.store_advisory(advisory["oid"], res)
else:
logger.log(
logging.WARN, "ignoring %s due to invalid signature", f
)
self.loading = False
self.loaded = True

Expand Down
3 changes: 1 addition & 2 deletions tests/test_daemon.py
Expand Up @@ -36,10 +36,9 @@
from ospd_openvas.daemon import (
OSPD_PARAMS,
OpenVasVtsFilter,
hashsum_verificator,
)
from ospd_openvas.openvas import Openvas
from ospd_openvas.notus import Notus
from ospd_openvas.notus import Notus, hashsum_verificator

OSPD_PARAMS_OUT = {
'auto_enable_dependencies': {
Expand Down
16 changes: 10 additions & 6 deletions tests/test_notus.py
Expand Up @@ -50,7 +50,8 @@ def test_notus_retrieve(self):
redis_mock = mock.MagicMock()
redis_mock.scan_iter.return_value = ["internal/notus/advisories/12"]
redis_mock.lindex.return_value = '{"file_name": "/tmp/something" }'
notus = Notus(path_mock, Cache(redis_mock), lambda _: True)
notus = Notus(path_mock, Cache(redis_mock))
notus._verifier = lambda _: True # pylint: disable=protected-access
oids = [x for x in notus.get_filenames_and_oids()]
self.assertEqual(len(oids), 1)

Expand All @@ -69,13 +70,15 @@ def test_notus_reload(self):
]
}'''
redis_mock = mock.MagicMock()
load_into_redis = Notus(path_mock, Cache(redis_mock), lambda _: True)
load_into_redis = Notus(path_mock, Cache(redis_mock))
# pylint: disable=protected-access
load_into_redis._verifier = lambda _: True
load_into_redis.reload_cache()
self.assertEqual(redis_mock.lpush.call_count, 1)
redis_mock.reset_mock()
do_not_load_into_redis = Notus(
path_mock, Cache(redis_mock), lambda _: False
)
do_not_load_into_redis = Notus(path_mock, Cache(redis_mock))
# pylint: disable=protected-access
do_not_load_into_redis._verifier = lambda _: False
do_not_load_into_redis.reload_cache()
self.assertEqual(redis_mock.lpush.call_count, 0)

Expand All @@ -102,7 +105,8 @@ def test_notus_cvss_v2_v3_none(self):
]
}'''
cache_fake = CacheFake()
notus = Notus(path_mock, cache_fake, lambda _: True)
notus = Notus(path_mock, cache_fake)
notus._verifier = lambda _: True # pylint: disable=protected-access
notus.reload_cache()
nm = notus.get_nvt_metadata("12")
assert nm
Expand Down

0 comments on commit a0f96a1

Please sign in to comment.