Skip to content
This repository has been archived by the owner on Nov 29, 2021. It is now read-only.

Support scan queuing #278

Merged
merged 31 commits into from
Jun 4, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
b8fedf7
Check if it is allow to start a pending
jjnicola May 27, 2020
ccff9b9
Check if it is allow to start a pending.
jjnicola May 27, 2020
c6c4794
Check for each pending scan if it is allowed.
jjnicola May 27, 2020
07402c9
Add new argument file_storage_dir
jjnicola May 27, 2020
de53fb8
Create the the dictionary for the new scan in the scan table.
jjnicola May 27, 2020
00d3fae
Add method to pickle the scan info into a file.
jjnicola May 27, 2020
f2a30ef
Add method to unpickle the scan_info.
jjnicola May 27, 2020
6bc4a8f
Support stopping a PENDING scan.
jjnicola May 27, 2020
9c6c3cf
Support deleting a stopped scan which previous status was PENDING
jjnicola May 27, 2020
fe63af1
Check for existence of the scan before trying to stop a PENDING scan
jjnicola May 27, 2020
4646c0c
Add class to handle pickled data in files
jjnicola May 28, 2020
1f741f4
Use DataPickler class and improve error handling.
jjnicola May 28, 2020
96bbc7b
Add integrity check for pickled data
jjnicola May 28, 2020
e8730e2
Fix test
jjnicola May 28, 2020
1a045d4
Make pylint happy
jjnicola May 28, 2020
829abf8
Improve error handling when remove the pickled data file.
jjnicola May 28, 2020
db078a4
Add test for data pickler
jjnicola May 29, 2020
caec16c
Create the file with permission only for the owner.
jjnicola May 29, 2020
35e46df
More pylint fixes
jjnicola May 29, 2020
a0495d2
Empty results element in get_scan response for pending scans
jjnicola May 29, 2020
27dbfd7
Rename Pending status to Queued
jjnicola Jun 2, 2020
5e155a8
Check if there is enough free memory before starting a scan.
jjnicola Jun 2, 2020
c16d416
Cleanup queued scans if the daemon is killed.
jjnicola Jun 2, 2020
263e69f
Make pylint happy again
jjnicola Jun 2, 2020
3c9f225
Add option and check for max count of queued scans.
jjnicola Jun 3, 2020
e79c828
Add test for max queued scans
jjnicola Jun 3, 2020
eb22217
Add typing to datapickler
jjnicola Jun 4, 2020
a9c4530
Don't access scan collection method from main.py
jjnicola Jun 4, 2020
3a35e06
Return immediately if unpickling fails.
jjnicola Jun 4, 2020
c09489b
Check for min_free_mem_scan_queue inside is_enough_free_memory()
jjnicola Jun 4, 2020
4081913
More typing
jjnicola Jun 4, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 6 additions & 41 deletions ospd/command/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -471,55 +471,20 @@ def get_elements(self):

return elements

def is_new_scan_allowed(self) -> bool:
""" Check if max_scans has been reached.

Return:
True if a new scan can be launch.
"""
if (self._daemon.max_scans == 0) or (
len(self._daemon.scan_processes) < self._daemon.max_scans
):
return True

return False

def is_enough_free_memory(self) -> bool:
""" Check if there is enough free memory in the system to run
a new scan. The necessary memory is a rough calculation and very
conservative.

Return:
True if there is enough memory for a new scan.
"""

ps_process = psutil.Process()
proc_memory = ps_process.memory_info().rss

free_mem = psutil.virtual_memory().free

if free_mem > (4 * proc_memory):
return True

return False

def handle_xml(self, xml: Element) -> bytes:
""" Handles <start_scan> command.

Return:
Response string for <start_scan> command.
"""

if self._daemon.check_free_memory and not self.is_enough_free_memory():
raise OspdCommandError(
'Not possible to run a new scan. Not enough free memory.',
'start_scan',
)

if not self.is_new_scan_allowed():
if (
self._daemon.max_queued_scans
and self._daemon.get_count_queued_scans()
>= self._daemon.max_queued_scans
):
raise OspdCommandError(
'Not possible to run a new scan. Max scan limit reached.',
'start_scan',
'Maximum number of queued scans reached.', 'start_scan'
)

target_str = xml.get('target')
Expand Down
144 changes: 144 additions & 0 deletions ospd/datapickler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
# Copyright (C) 2014-2020 Greenbone Networks GmbH
#
# SPDX-License-Identifier: AGPL-3.0-or-later
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.

""" Pickle Handler class
"""

import logging
import pickle
import os

from hashlib import sha256
from pathlib import Path
from typing import Dict, BinaryIO, Any

from ospd.errors import OspdCommandError

logger = logging.getLogger(__name__)

OWNER_ONLY_RW_PERMISSION = 0o600


class DataPickler:
def __init__(self, storage_path: str):
self._storage_path = storage_path
self._storage_fd = None

def _fd_opener(self, path: str, flags: int) -> BinaryIO:
os.umask(0)
flags = os.O_CREAT | os.O_WRONLY
self._storage_fd = os.open(path, flags, mode=OWNER_ONLY_RW_PERMISSION)
return self._storage_fd

def _fd_close(self) -> None:
try:
self._storage_fd.close()
self._storage_fd = None
except Exception: # pylint: disable=broad-except
pass

def remove_file(self, filename: str) -> None:
""" Remove the file containing a scan_info pickled object """
storage_file_path = Path(self._storage_path) / filename
try:
storage_file_path.unlink()
except Exception as e: # pylint: disable=broad-except
logger.error('Not possible to delete %s. %s', filename, e)

def store_data(self, filename: str, data_object: Any) -> str:
""" Pickle a object and store it in a file named"""
storage_file_path = Path(self._storage_path) / filename

try:
# create parent directories recursively
parent_dir = storage_file_path.parent
parent_dir.mkdir(parents=True, exist_ok=True)
except Exception as e: # pylint: disable=broad-except
raise OspdCommandError(
'Not possible to access dir for %s. %s' % (filename, e),
'start_scan',
)

try:
pickled_data = pickle.dumps(data_object)
except pickle.PicklingError as e:
raise OspdCommandError(
'Not possible to pickle scan info for %s. %s' % (filename, e),
'start_scan',
)

try:
with open(
str(storage_file_path), 'wb', opener=self._fd_opener
) as scan_info_f:
scan_info_f.write(pickled_data)
except Exception as e: # pylint: disable=broad-except
self._fd_close()
raise OspdCommandError(
'Not possible to store scan info for %s. %s' % (filename, e),
'start_scan',
)
self._fd_close()

return self._pickled_data_hash_generator(pickled_data)

def load_data(self, filename: str, original_data_hash: str) -> Any:
""" Unpickle the stored data in the filename. Perform an
intengrity check of the read data with the the hash generated
with the original data.

Return:
Dictionary containing the scan info. None otherwise.
"""

storage_file_path = Path(self._storage_path) / filename
pickled_data = None
try:
with storage_file_path.open('rb') as scan_info_f:
pickled_data = scan_info_f.read()
except Exception as e: # pylint: disable=broad-except
logger.error(
'Not possible to read pickled data from %s. %s', filename, e
)
return

unpickled_scan_info = None
try:
unpickled_scan_info = pickle.loads(pickled_data)
except pickle.UnpicklingError as e:
logger.error(
'Not possible to read pickled data from %s. %s', filename, e
)
return

pickled_scan_info_hash = self._pickled_data_hash_generator(pickled_data)

if original_data_hash != pickled_scan_info_hash:
logger.error('Unpickled data from %s corrupted.', filename)
return

return unpickled_scan_info

def _pickled_data_hash_generator(self, pickled_data: bytes) -> str:
""" Calculate the sha256 hash of a pickled data """
if not pickled_data:
return

hash_sha256 = sha256()
hash_sha256.update(pickled_data)

return hash_sha256.hexdigest()
17 changes: 14 additions & 3 deletions ospd/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
from ospd.parser import create_parser, ParserType
from ospd.server import TlsServer, UnixSocketServer, BaseServer


COPYRIGHT = """Copyright (C) 2014, 2015, 2018, 2019 Greenbone Networks GmbH
License GPLv2+: GNU GPL version 2 or later
This is free software: you are free to change and redistribute it.
Expand Down Expand Up @@ -107,12 +108,18 @@ def init_logging(


def exit_cleanup(
pidfile: str, server: BaseServer, _signum=None, _frame=None
pidfile: str,
server: BaseServer,
daemon: OSPDaemon,
_signum=None,
_frame=None,
) -> None:
""" Removes the pidfile before ending the daemon. """
signal.signal(signal.SIGINT, signal.SIG_IGN)
pidpath = Path(pidfile)

daemon.daemon_exit_cleanup()

if not pidpath.is_file():
return

Expand Down Expand Up @@ -174,8 +181,12 @@ def main(
sys.exit()

# Set signal handler and cleanup
atexit.register(exit_cleanup, pidfile=args.pid_file, server=server)
signal.signal(signal.SIGTERM, partial(exit_cleanup, args.pid_file, server))
atexit.register(
exit_cleanup, pidfile=args.pid_file, server=server, daemon=daemon
)
signal.signal(
signal.SIGTERM, partial(exit_cleanup, args.pid_file, server, daemon)
)

if not daemon.check():
return 1
Expand Down
Loading