Skip to content

Commit

Permalink
Version 3.0.0-beta2
Browse files Browse the repository at this point in the history
  • Loading branch information
The n6 Development Team authored and zuo committed Nov 28, 2021
1 parent c38dac3 commit f91588a
Show file tree
Hide file tree
Showing 95 changed files with 7,325 additions and 830 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ ENV/
n6-env/
env.bak/
venv.bak/
env_py3k/

# logs, runtime data
*.log
Expand Down
2 changes: 1 addition & 1 deletion .n6-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
3.0.0b1
3.0.0b2
9 changes: 5 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# Changelog

Starting with 3.x.x release series, all notable changes applied to the
code of _n6_ are continuously documented in this file.
Starting with the 3.0.0 release, all notable changes applied to
the code of _n6_ will be continuously documented in this file.

The format of this file is based, to much extent, on
[Keep a Changelog](https://keepachangelog.com/).


## 3.0.0b1 - 2021-10-13
## 3.0.0b... (beta releases...) - since 2021-10-13...

TBD in the description of the 3.0.0 final release (soon...).

TBD
4 changes: 2 additions & 2 deletions N6AdminPanel/n6adminpanel/admin_panel.conf
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
## IMPORTANT: the following 3 config sections should be uncommented
## and adjusted *ONLY* if the n6 Admin Panel application does *NOT*
## have access to the `09_auth_db.conf` file (being, typically, a part
## of the N6Core/N6Pipeline configuration) which (typically) already
## of the N6Core/N6DataPipeline configuration) which (typically) already
## contains these sections!
#
#[auth_db]
Expand Down Expand Up @@ -65,7 +65,7 @@
## IMPORTANT: the following 3 config sections should be kept here
## uncommented *ONLY* if the n6 Admin Panel application does *NOT* have
## access to the `11_mailing.conf` and `11_jinja_rendering.conf` files
## which, if exist (as a part of the N6Core/N6Pipeline configuration),
## which, if exist (as a part of the N6Core/N6DataPipeline configuration),
## typically already contain these sections!


Expand Down
43 changes: 27 additions & 16 deletions N6AdminPanel/n6adminpanel/mail_notices_helpers.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,21 @@
# Copyright (c) 2021 NASK. All rights reserved.

from typing import (
Iterable,
Union,
)
from collections.abc import Iterable
from typing import Union

from flask import (
flash,
g,
)

from n6lib.auth_db.api import AuthDatabaseAPILookupError
from n6lib.common_helpers import ascii_str


class NoRecipients(Exception):
pass


class MailNoticesMixin(object):

def try_to_send_mail_notices(self, notice_key, **get_notice_data_kwargs):
Expand All @@ -21,10 +24,16 @@ def try_to_send_mail_notices(self, notice_key, **get_notice_data_kwargs):
'for notice_key={!a}.'.format(ascii_str(notice_key)))
flash(msg, 'warning')
return
notice_data = self.get_notice_data(**get_notice_data_kwargs)
try:
notice_data = self.get_notice_data(**get_notice_data_kwargs)
notice_recipients = list(self.get_notice_recipients(notice_data))
if not notice_recipients:
raise NoRecipients('no matching non-blocked user(s) could be found')
except NoRecipients as exc:
flash(f'No e-mail notices could be sent because {exc}!', 'error')
return
notice_lang = self.get_notice_lang(notice_data)
assert notice_lang is None or isinstance(notice_lang, str) and len(notice_lang) == 2
notice_recipients = list(self.get_notice_recipients(notice_data))
gathered_ok_recipients = []
with g.n6_mail_notices_api.dispatcher(notice_key,
suppress_and_log_smtp_exc=True) as dispatch:
Expand All @@ -43,18 +52,20 @@ def try_to_send_mail_notices(self, notice_key, **get_notice_data_kwargs):

# (The following hooks can be overridden in subclasses.)

def get_notice_data(self, user_login):
# type: (...) -> dict
with g.n6_auth_manage_api_adapter as api:
user_and_org_basic_info = api.get_user_and_org_basic_info(user_login)
def get_notice_data(self, user_login) -> dict:
try:
with g.n6_auth_manage_api_adapter as api:
if api.is_user_blocked(user_login):
raise NoRecipients('the user {user_login!a} is blocked')
user_and_org_basic_info = api.get_user_and_org_basic_info(user_login)
except AuthDatabaseAPILookupError:
raise NoRecipients('the user {user_login!a} does not exist')
return dict(
user_and_org_basic_info,
user_login=user_login)

def get_notice_lang(self, notice_data):
# type: (dict) -> Union[str, None]
return notice_data['lang']

def get_notice_recipients(self, notice_data):
# type: (dict) -> Iterable[str]
def get_notice_recipients(self, notice_data: dict) -> Iterable[str]:
return [notice_data['user_login']]

def get_notice_lang(self, notice_data: dict) -> Union[str, None]:
return notice_data['lang']
16 changes: 8 additions & 8 deletions N6AdminPanel/n6adminpanel/org_request_helpers.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Copyright (c) 2020-2021 NASK. All rights reserved.

from collections.abc import Iterable
import html
import re
import string
Expand Down Expand Up @@ -591,19 +592,18 @@ def _after_status_transition_to_other(self,
self.try_to_send_mail_notices(notice_key='org_config_update_rejected',
req_id=org_request.id)

def get_notice_data(self, req_id):
# type: (...) -> dict
def get_notice_data(self, req_id) -> dict:
notice_data = g.n6_org_config_info
notice_data['update_info']['update_request_id'] = req_id
return notice_data

def get_notice_lang(self, notice_data):
# type: (dict) -> Union[str, None]
return notice_data['notification_language'] # TODO?: separate per-user setting?...

def get_notice_recipients(self, notice_data):
def get_notice_recipients(self, notice_data: dict) -> Iterable[str]:
with g.n6_auth_manage_api_adapter as api:
return api.get_org_user_logins(org_id=notice_data['org_id'])
return api.get_org_user_logins(org_id=notice_data['org_id'],
only_nonblocked=True)

def get_notice_lang(self, notice_data: dict) -> Union[str, None]:
return notice_data['notification_language'] # TODO?: separate per-user setting?...


#
Expand Down
5 changes: 3 additions & 2 deletions N6Core/n6/base/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,10 @@ def check_existing_dir_content(install_to, alternative_to):

try:
config_template_dir = 'n6/data/conf/'
files = resource_listdir(Requirement.parse("n6"), config_template_dir)
files = resource_listdir(Requirement.parse("n6core-py2"), config_template_dir)
for f in files:
filename = resource_filename(Requirement.parse("n6"), os.path.join(config_template_dir, f))
filename = resource_filename(Requirement.parse("n6core-py2"),
os.path.join(config_template_dir, f))
try:
if not os.path.isdir(install_to):
os.makedirs(install_to)
Expand Down
14 changes: 13 additions & 1 deletion N6Core/n6/collectors/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
#
# Exceptions

# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
class n6CollectorException(Exception):
pass

Expand All @@ -73,6 +74,7 @@ def set_configuration(self):
self.config = ConfigSection('<no section declared>')


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
class CollectorStateMixIn(object):

"""DO NOT USE THIS CLASS IN NEW CODE, USE ONLY CollectorWithStateMixin!"""
Expand Down Expand Up @@ -112,6 +114,7 @@ def get_cache_file_name(self):
return self.config['source'] + ".txt"


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
class CollectorStateMixInPlus(CollectorStateMixIn):

"""
Expand Down Expand Up @@ -650,6 +653,7 @@ def start_publishing(self):
self.inner_stop()


# TODO: migrate it to `n6datasources.collectors.base` when needed...
class BaseEmailSourceCollector(BaseOneShotCollector):

"""
Expand Down Expand Up @@ -690,6 +694,8 @@ def get_output_prop_kwargs(self, email_msg, **kwargs):
return prop_kwargs


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# (use `n6datasources.collectors.base.BaseDownloadingCollector` instead)
class BaseUrlDownloaderCollector(BaseCollector):

config_group = None
Expand Down Expand Up @@ -868,6 +874,9 @@ def _try_to_set_http_last_modified(self, headers):
break


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# **unless** (TODO) modernized to use `BaseDownloadingCollector`
# (instead of `BaseUrlDownloaderCollector`).
class BaseRSSCollector(BaseOneShotCollector, BaseUrlDownloaderCollector):

type = 'stream'
Expand Down Expand Up @@ -1477,6 +1486,8 @@ def obtain_orig_data(self):
#
# Script/entry point factories

# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# (replaced by `n6datasources.collectors.base.AbstractBaseCollector.run_script()`)
def generate_collector_main(collector_class):
def collector_main():
with logging_configured():
Expand All @@ -1485,7 +1496,8 @@ def collector_main():
collector.run_handling()
return collector_main


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# (use `n6datasources.collectors.base.add_collector_entry_point_functions()` instead)
def entry_point_factory(module):
for collector_class in all_subclasses(AbstractBaseCollector):
if (not collector_class.__module__.endswith('.generic') and
Expand Down
3 changes: 3 additions & 0 deletions N6Core/n6/data/conf/70_abuse_ch.conf
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,9 @@ prefetch_count = 20
[AbuseChFeodoTracker201908Parser]
prefetch_count = 1

[AbuseChFeodoTracker202110Parser]
prefetch_count = 1

[AbuseChPalevoDoms201406Parser]
prefetch_count = 1

Expand Down
17 changes: 17 additions & 0 deletions N6Core/n6/parsers/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -849,6 +849,9 @@ def get_bl_current_time_from_data(self, data, parsed):



# LEGACY STUFF -- we DO NOT want to migrate it to `n6datasources.parsers.base`.
# IF it is really needed in Py3, please (TODO?) migrate it to
# `n6datasources.parsers.base_legacy`.
class TabDataParser(BaseParser):

"""
Expand Down Expand Up @@ -1000,6 +1003,9 @@ def process_row_fields(self, data, parsed, *fields):



# LEGACY STUFF -- we DO NOT want to migrate it to `n6datasources.parsers.base`.
# IF it is really needed in Py3, please (TODO?) migrate it to
# `n6datasources.parsers.base_legacy`.
class BlackListTabDataParser(TabDataParser, BlackListParser):

"""
Expand All @@ -1012,6 +1018,10 @@ class BlackListTabDataParser(TabDataParser, BlackListParser):
# (+ adding process_row_fields() method placeholder)
# -- then update the wiki page about parsers...
# XXX: is it tested?
#
# LEGACY STUFF -- we DO NOT want to migrate it to `n6datasources.parsers.base`.
# IF it is really needed in Py3, please (TODO?) migrate it to
# `n6datasources.parsers.base_legacy`.
class XmlDataParser(BaseParser):

"""
Expand Down Expand Up @@ -1057,6 +1067,11 @@ def iter_entry(self, data):



#
# Script/entry point factories

# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# (replaced by `n6datasources.parsers.base.BaseParser.run_script()`)
def generate_parser_main(parser_class):
def parser_main():
with logging_configured():
Expand All @@ -1066,6 +1081,8 @@ def parser_main():
return parser_main


# LEGACY STUFF -- we DO NOT want to migrate it to n6datasources...
# (use `n6datasources.parsers.base.add_parser_entry_point_functions()` instead)
def entry_point_factory(module):
for parser_class in all_subclasses(BaseParser):
if (not parser_class.__module__.endswith('.generic') and
Expand Down
25 changes: 16 additions & 9 deletions N6Core/n6/tests/utils/test_aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -913,7 +913,7 @@ def test_publish_event(self, count, expected_body_content):

self.assertEqual(len(self._aggregator.publish_output.mock_calls), 1)
publish_output_kwargs = self._aggregator.publish_output.mock_calls[0][-1]
self.assertEqual(set(publish_output_kwargs.iterkeys()), {"routing_key", "body"})
self.assertEqual(set(publish_output_kwargs.keys()), {"routing_key", "body"})
self.assertEqual(publish_output_kwargs["routing_key"], expected_routing_key)
self.assertJsonEqual(publish_output_kwargs["body"], expected_body_content)

Expand Down Expand Up @@ -1544,19 +1544,26 @@ def test_store_restore_state(self):
# the state, but there is no access to the given path; first,
# make sure there actually is no access to the given path
tmp_db_path = "/root/example.pickle"
if not os.access(tmp_db_path, os.W_OK):
with patch.object(self._adw, "dbpath", tmp_db_path):
self.assertRaises(IOError, self._adw.store_state())
assert not os.access(tmp_db_path, os.W_OK), ('The test case relies on the assumption that '
'the user running the tests does not '
'have permission to write '
'to: {!r}'.format(tmp_db_path))
self._adw.dbpath = tmp_db_path
with patch('n6.utils.aggregator.LOGGER') as patched_logger:
self._adw.store_state()
patched_logger.error.assert_called_once()
# assert the exception is being raised when trying to restore
# the state from nonexistent file; first, safely create
# a temporary file, then close and remove it, so the path
# most likely does not exist
with tempfile.NamedTemporaryFile() as fp:
tmp_db_path = fp.name
if not os.path.exists(tmp_db_path):
with patch.object(self._adw, "dbpath", tmp_db_path), \
self.assertRaisesRegexp(IOError, r"No such file or directory"):
self._adw.restore_state()
assert not os.path.exists(tmp_db_path), ('The randomly generated temporary directory: '
'{!r} still exists, so the test cannot '
'be correctly performed'.format(tmp_db_path))
with patch.object(self._adw, "dbpath", tmp_db_path), \
self.assertRaisesRegexp(IOError, r"No such file or directory"):
self._adw.restore_state()

@foreach(_test_process_new_message_data)
def test_process_new_message(self, messages, expected_source_time,
Expand Down Expand Up @@ -1695,7 +1702,7 @@ def test_generate_suppressed_events_after_timeout(self,
datetime.timedelta(*args, **kw))
# actual call
generated_events = list(self._adw.generate_suppresed_events_after_timeout())
expected_events = [event for source, vals in source_to_expected_events.iteritems()
expected_events = [event for source, vals in source_to_expected_events.items()
if source in expected_inactive_sources for event in vals]
self.assertEqual(expected_events, generated_events)

Expand Down

0 comments on commit f91588a

Please sign in to comment.