From 2cc1f3c53abd65924ba53abf30c914b885ceee2f Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Wed, 6 Apr 2022 16:45:08 +0200 Subject: [PATCH 1/3] [Config] Remove dependency to API in non-API code Removed all dependencies to the aiohttp app object in non-API code. This object was used all over the place to retrieve the config object. Instead, we now use a global variable to store the config object itself. --- src/aleph/chains/common.py | 22 +++++------ src/aleph/chains/ethereum.py | 2 - src/aleph/chains/nuls2.py | 4 +- src/aleph/commands.py | 46 +++++++++++----------- src/aleph/config.py | 9 +++++ src/aleph/handlers/storage.py | 21 +++++----- src/aleph/jobs/job_utils.py | 32 +++++++++------ src/aleph/jobs/process_pending_messages.py | 11 +++--- src/aleph/jobs/process_pending_txs.py | 9 +++-- src/aleph/model/__init__.py | 7 +++- src/aleph/network.py | 1 - src/aleph/services/filestore.py | 14 ++++--- src/aleph/services/ipfs/common.py | 35 +++++++++------- src/aleph/services/ipfs/storage.py | 7 ++-- src/aleph/services/p2p/manager.py | 12 +++--- src/aleph/storage.py | 11 ++++-- src/aleph/utils.py | 2 +- src/aleph/web/controllers/metrics.py | 20 ++++------ tests/storage/conftest.py | 3 +- 19 files changed, 146 insertions(+), 122 deletions(-) diff --git a/src/aleph/chains/common.py b/src/aleph/chains/common.py index b84a326e2..03cb413e9 100644 --- a/src/aleph/chains/common.py +++ b/src/aleph/chains/common.py @@ -4,10 +4,12 @@ from enum import IntEnum from typing import Dict, Optional, Tuple, List -from aleph_message.models import MessageType -from bson import ObjectId -from pymongo import UpdateOne - +from aleph.config import get_config +from aleph.exceptions import ( + AlephStorageException, + InvalidContent, + ContentCurrentlyUnavailable, UnknownHashError, +) from aleph.handlers.forget import handle_forget_message from aleph.handlers.storage import handle_new_storage from aleph.model.db_bulk_operation import DbBulkOperation @@ -17,12 +19,9 @@ from aleph.network import check_message as check_message_fn from aleph.permissions import check_sender_authorization from aleph.storage import get_json, pin_hash, add_json, get_message_content -from aleph.web import app -from aleph.exceptions import ( - AlephStorageException, - InvalidContent, - ContentCurrentlyUnavailable, UnknownHashError, -) +from aleph_message.models import MessageType +from bson import ObjectId +from pymongo import UpdateOne LOGGER = logging.getLogger("chains.common") @@ -347,6 +346,7 @@ async def get_chaindata(messages, bulk_threshold=2000): async def get_chaindata_messages( chaindata: Dict, context, seen_ids: Optional[List] = None ): + config = get_config() protocol = chaindata.get("protocol", None) version = chaindata.get("version", None) @@ -384,7 +384,7 @@ async def get_chaindata_messages( raise LOGGER.info("Got bulk data with %d items" % len(messages)) - if app["config"].ipfs.enabled.value: + if config.ipfs.enabled.value: # wait for 4 seconds to try to pin that try: LOGGER.info(f"chaindatax {chaindata}") diff --git a/src/aleph/chains/ethereum.py b/src/aleph/chains/ethereum.py index c2ff2fa70..66d8227ac 100644 --- a/src/aleph/chains/ethereum.py +++ b/src/aleph/chains/ethereum.py @@ -34,9 +34,7 @@ async def verify_signature(message): """Verifies a signature of a message, return True if verified, false if not""" - from aleph.web import app - config = app["config"] # w3 = await loop.run_in_executor(None, get_web3, config) verification = await get_verification_buffer(message) diff --git a/src/aleph/chains/nuls2.py b/src/aleph/chains/nuls2.py index cf1eec39c..da4cb3a85 100644 --- a/src/aleph/chains/nuls2.py +++ b/src/aleph/chains/nuls2.py @@ -348,9 +348,9 @@ async def nuls2_outgoing_worker(config): async def nuls2_balance_getter(address, config=None): global DECIMALS if config is None: - from aleph.web import app + from aleph.config import get_config + config = get_config() - config = app["config"] server = get_server(config.nuls2.api_url.value) contract_address = get_server(config.nuls2.contract_address.value) chain_id = config.nuls2.chain_id.value diff --git a/src/aleph/commands.py b/src/aleph/commands.py index 8e20434fb..51aea3e46 100644 --- a/src/aleph/commands.py +++ b/src/aleph/commands.py @@ -24,7 +24,7 @@ from aleph import model from aleph.chains import connector_tasks from aleph.cli.args import parse_args -from aleph.config import get_defaults +import aleph.config from aleph.exceptions import InvalidConfigException, KeyNotFoundException from aleph.jobs.job_utils import prepare_loop from aleph.jobs import start_jobs @@ -42,7 +42,9 @@ LOGGER = logging.getLogger(__name__) -async def run_server(host: str, port: int, shared_stats: dict, extra_web_config: dict): +async def run_server( + config: Config, host: str, port: int, shared_stats: dict, extra_web_config: dict +): # These imports will run in different processes from aiohttp import web from aleph.web.controllers.listener import broadcast @@ -52,6 +54,7 @@ async def run_server(host: str, port: int, shared_stats: dict, extra_web_config: LOGGER.debug("Setup of runner") + app["config"] = config app["extra_config"] = extra_web_config app["shared_stats"] = shared_stats @@ -81,22 +84,25 @@ def run_server_coroutine( Used as target of multiprocessing.Process. """ setproctitle(f"pyaleph-run_server_coroutine-{port}") + + loop, config = prepare_loop(config_values) + extra_web_config = extra_web_config or {} setup_logging( - loglevel=config_values["logging"]["level"], + loglevel=config.logging.level.value, filename=f"/tmp/run_server_coroutine-{port}.log", ) if enable_sentry: sentry_sdk.init( - dsn=config_values["sentry"]["dsn"], - traces_sample_rate=config_values["sentry"]["traces_sample_rate"], + dsn=config.sentry.dsn.value, + traces_sample_rate=config.sentry.traces_sample_rate.value, ignore_errors=[KeyboardInterrupt], ) + # Use a try-catch-capture_exception to work with multiprocessing, see # https://github.com/getsentry/raven-python/issues/1110 try: - loop = prepare_loop(config_values) - loop.run_until_complete(run_server(host, port, shared_stats, extra_web_config)) + loop.run_until_complete(run_server(config, host, port, shared_stats, extra_web_config)) except Exception as e: if enable_sentry: sentry_sdk.capture_exception(e) @@ -104,7 +110,7 @@ def run_server_coroutine( raise -def main(args): +async def main(args): """Main entry point allowing external calls Args: @@ -125,7 +131,7 @@ def main(args): return LOGGER.info("Loading configuration") - config = Config(schema=get_defaults()) + config = aleph.config.app_config if args.config_file is not None: LOGGER.debug("Loading config file '%s'", args.config_file) @@ -133,7 +139,6 @@ def main(args): # CLI config values override config file values config.logging.level.value = args.loglevel - app["config"] = config # Check for invalid/deprecated config if "protocol" in config.p2p.clients.value: @@ -155,10 +160,10 @@ def main(args): if args.sentry_disabled: LOGGER.info("Sentry disabled by CLI arguments") - elif app["config"].sentry.dsn.value: + elif config.sentry.dsn.value: sentry_sdk.init( - dsn=app["config"].sentry.dsn.value, - traces_sample_rate=app["config"].sentry.traces_sample_rate.value, + dsn=config.sentry.dsn.value, + traces_sample_rate=config.sentry.traces_sample_rate.value, ignore_errors=[KeyboardInterrupt], ) LOGGER.info("Sentry enabled") @@ -169,8 +174,6 @@ def main(args): model.init_db(config, ensure_indexes=True) LOGGER.info("Database initialized.") - # filestore.init_store(config) - # LOGGER.info("File store initalized.") init_cors() # FIXME: This is stateful and process-dependent set_start_method("spawn") @@ -191,12 +194,9 @@ def main(args): use_processes=True, ) - loop = asyncio.get_event_loop() - # handler = app.make_handler(loop=loop) LOGGER.debug("Initializing p2p") - p2p_init_task = p2p.init_p2p(config, api_servers) - p2p_client, p2p_tasks = loop.run_until_complete(p2p_init_task) + p2p_client, p2p_tasks = await p2p.init_p2p(config, api_servers) tasks += p2p_tasks LOGGER.debug("Initialized p2p") @@ -217,7 +217,7 @@ def main(args): config.aleph.host.value, config.p2p.http_port.value, shared_stats, - args.sentry_disabled is False and app["config"].sentry.dsn.value, + args.sentry_disabled is False and config.sentry.dsn.value, extra_web_config, ), ) @@ -228,7 +228,7 @@ def main(args): config.aleph.host.value, config.aleph.port.value, shared_stats, - args.sentry_disabled is False and app["config"].sentry.dsn.value, + args.sentry_disabled is False and config.sentry.dsn.value, extra_web_config, ), ) @@ -248,13 +248,13 @@ def main(args): # srv = loop.run_until_complete(f) # LOGGER.info('Serving on %s', srv.sockets[0].getsockname()) LOGGER.debug("Running event loop") - loop.run_until_complete(asyncio.gather(*tasks)) + await asyncio.gather(*tasks) def run(): """Entry point for console_scripts""" try: - main(sys.argv[1:]) + asyncio.run(main(sys.argv[1:])) except (KeyNotFoundException, InvalidConfigException): sys.exit(1) diff --git a/src/aleph/config.py b/src/aleph/config.py index 25aaf79e3..c056be042 100644 --- a/src/aleph/config.py +++ b/src/aleph/config.py @@ -1,5 +1,7 @@ import logging +from configmanager import Config + def get_defaults(): return { @@ -85,3 +87,10 @@ def get_defaults(): "traces_sample_rate": None, }, } + + +app_config = Config(schema=get_defaults()) + + +def get_config() -> Config: + return app_config diff --git a/src/aleph/handlers/storage.py b/src/aleph/handlers/storage.py index 0629aaf6c..b6fe55d2f 100644 --- a/src/aleph/handlers/storage.py +++ b/src/aleph/handlers/storage.py @@ -5,30 +5,29 @@ TODO: - check balances and storage allowance - handle incentives from 3rd party -- hjandle garbage collection of unused hashes +- handle garbage collection of unused hashes """ -import logging - -import aioipfs import asyncio +import logging from typing import Dict -from aleph_message.models import StoreMessage -from pydantic import ValidationError +import aioipfs from aioipfs import InvalidCIDError - +from aleph.config import get_config +from aleph.exceptions import AlephStorageException, UnknownHashError from aleph.services.ipfs.common import get_ipfs_api from aleph.storage import get_hash_content from aleph.types import ItemType -from aleph.web import app -from aleph.exceptions import AlephStorageException, UnknownHashError +from aleph_message.models import StoreMessage +from pydantic import ValidationError LOGGER = logging.getLogger("HANDLERS.STORAGE") async def handle_new_storage(message: Dict, content: Dict): - if not app["config"].storage.store_files.value: + config = get_config() + if not config.storage.store_files.value: return True # Ignore # TODO: ideally the content should be transformed earlier, but this requires more clean up @@ -49,7 +48,7 @@ async def handle_new_storage(message: Dict, content: Dict): is_folder = False item_hash = store_message.content.item_hash - ipfs_enabled = app["config"].ipfs.enabled.value + ipfs_enabled = config.ipfs.enabled.value do_standard_lookup = True size = 0 diff --git a/src/aleph/jobs/job_utils.py b/src/aleph/jobs/job_utils.py index d8fdd4041..010d79c8b 100644 --- a/src/aleph/jobs/job_utils.py +++ b/src/aleph/jobs/job_utils.py @@ -1,24 +1,30 @@ import asyncio from typing import Dict +from typing import Tuple +import aleph.config +from aleph.model import init_db_globals +from aleph.services.ipfs.common import init_ipfs_globals +from aleph.services.p2p import init_p2p_client +from configmanager import Config -def prepare_loop(config_values: Dict) -> asyncio.AbstractEventLoop: - from aleph.model import init_db - from aleph.web import app - from configmanager import Config - from aleph.config import get_defaults - from aleph.services.ipfs.common import get_ipfs_api - from aleph.services.p2p import http, init_p2p_client - http.SESSION = None # type:ignore +def prepare_loop(config_values: Dict) -> Tuple[asyncio.AbstractEventLoop, Config]: + """ + Prepares all the global variables (sigh) needed to run an Aleph subprocess. + + :param config_values: Dictionary of config values, as provided by the main process. + :returns: A preconfigured event loop, and the application config for convenience. + Use the event loop as event loop of the process as it is used by Motor. Using another + event loop will cause DB calls to fail. + """ loop = asyncio.get_event_loop() - config = Config(schema=get_defaults()) - app["config"] = config + config = aleph.config.app_config config.load_values(config_values) - init_db(config, ensure_indexes=False) - loop.run_until_complete(get_ipfs_api(timeout=2, reset=True)) + init_db_globals(config) + init_ipfs_globals(config) _ = init_p2p_client(config) - return loop + return loop, config diff --git a/src/aleph/jobs/process_pending_messages.py b/src/aleph/jobs/process_pending_messages.py index 82721e9e2..614564da3 100644 --- a/src/aleph/jobs/process_pending_messages.py +++ b/src/aleph/jobs/process_pending_messages.py @@ -194,16 +194,17 @@ def pending_messages_subprocess( """ setproctitle("aleph.jobs.messages_task_loop") + loop, config = prepare_loop(config_values) + sentry_sdk.init( - dsn=config_values["sentry"]["dsn"], - traces_sample_rate=config_values["sentry"]["traces_sample_rate"], + dsn=config.sentry.dsn.value, + traces_sample_rate=config.sentry.traces_sample_rate.value, ignore_errors=[KeyboardInterrupt], ) setup_logging( - loglevel=config_values["logging"]["level"], + loglevel=config.logging.level.value, filename="/tmp/messages_task_loop.log", ) singleton.api_servers = api_servers - loop = prepare_loop(config_values) - loop.run_until_complete(asyncio.gather(retry_messages_task(shared_stats))) + loop.run_until_complete(retry_messages_task(shared_stats)) diff --git a/src/aleph/jobs/process_pending_txs.py b/src/aleph/jobs/process_pending_txs.py index 66fb8eba7..d55c08447 100644 --- a/src/aleph/jobs/process_pending_txs.py +++ b/src/aleph/jobs/process_pending_txs.py @@ -134,16 +134,17 @@ async def handle_txs_task(): def pending_txs_subprocess(config_values: Dict, api_servers: List): setproctitle("aleph.jobs.txs_task_loop") + loop, config = prepare_loop(config_values) + sentry_sdk.init( - dsn=config_values["sentry"]["dsn"], - traces_sample_rate=config_values["sentry"]["traces_sample_rate"], + dsn=config.sentry.dsn.value, + traces_sample_rate=config.sentry.traces_sample_rate.value, ignore_errors=[KeyboardInterrupt], ) setup_logging( - loglevel=config_values["logging"]["level"], + loglevel=config.logging.level.value, filename="/tmp/txs_task_loop.log", ) singleton.api_servers = api_servers - loop = prepare_loop(config_values) loop.run_until_complete(handle_txs_task()) diff --git a/src/aleph/model/__init__.py b/src/aleph/model/__init__.py index 05c04217b..ac3e4cc9f 100644 --- a/src/aleph/model/__init__.py +++ b/src/aleph/model/__init__.py @@ -1,3 +1,4 @@ +import asyncio from logging import getLogger from configmanager import Config @@ -22,11 +23,15 @@ fs = None -def init_db(config: Config, ensure_indexes: bool = True): +def init_db_globals(config: Config): global connection, db, fs connection = AsyncIOMotorClient(config.mongodb.uri.value, tz_aware=True) db = connection[config.mongodb.database.value] fs = AsyncIOMotorGridFSBucket(db) + + +def init_db(config: Config, ensure_indexes: bool = True): + init_db_globals(config) sync_connection = MongoClient(config.mongodb.uri.value, tz_aware=True) sync_db = sync_connection[config.mongodb.database.value] diff --git a/src/aleph/network.py b/src/aleph/network.py index 8a4b28230..89729def9 100644 --- a/src/aleph/network.py +++ b/src/aleph/network.py @@ -107,7 +107,6 @@ async def check_message( if message.get("hash_type", "sha256") == "sha256": # leave the door open. if not trusted: - loop = asyncio.get_event_loop() item_hash = get_sha256(message["item_content"]) # item_hash = await loop.run_in_executor(None, get_sha256, message['item_content']) # item_hash = sha256(message['item_content'].encode('utf-8')).hexdigest() diff --git a/src/aleph/services/filestore.py b/src/aleph/services/filestore.py index fee28acf0..fb40f0615 100644 --- a/src/aleph/services/filestore.py +++ b/src/aleph/services/filestore.py @@ -1,16 +1,20 @@ import logging from typing import Optional, Union -from bson.objectid import ObjectId - +from aleph.config import get_config from aleph.model import hashes -from aleph.web import app +from bson.objectid import ObjectId LOGGER = logging.getLogger("filestore") +def _get_storage_engine() -> str: + config = get_config() + return config.storage.engine.value + + async def get_value(key: str) -> Optional[bytes]: - engine = app["config"].storage.engine.value + engine = _get_storage_engine() if engine != "mongodb": raise ValueError(f"Unsupported storage engine: '{engine}'.") @@ -24,7 +28,7 @@ async def get_value(key: str) -> Optional[bytes]: async def set_value(key: Union[bytes, str], value: Union[bytes, str]) -> ObjectId: - engine = app["config"].storage.engine.value + engine = _get_storage_engine() if engine != "mongodb": raise ValueError(f"Unsupported storage engine: '{engine}'.") diff --git a/src/aleph/services/ipfs/common.py b/src/aleph/services/ipfs/common.py index d051fd441..551783426 100644 --- a/src/aleph/services/ipfs/common.py +++ b/src/aleph/services/ipfs/common.py @@ -1,8 +1,9 @@ import logging import aioipfs - +import aleph.config from aleph.services.utils import get_IP +from configmanager import Config API = None LOGGER = logging.getLogger("IPFS") @@ -18,22 +19,26 @@ async def get_ipfs_gateway_url(config, hash): ) -async def get_ipfs_api(timeout=5, reset=False): +def init_ipfs_globals(config: Config, timeout: int = 5) -> None: + global API + + host = config.ipfs.host.value + port = config.ipfs.port.value + + API = aioipfs.AsyncIPFS( + host=host, + port=port, + read_timeout=timeout, + conns_max=25, + conns_max_per_host=10, + debug=(config.logging.level.value <= logging.DEBUG) + ) + + +async def get_ipfs_api(timeout: int = 5, reset: bool = False): global API if API is None or reset: - from aleph.web import app - - host = app["config"].ipfs.host.value - port = app["config"].ipfs.port.value - - API = aioipfs.AsyncIPFS( - host=host, - port=port, - read_timeout=timeout, - conns_max=25, - conns_max_per_host=10, - debug=(app["config"].logging.level.value <= logging.DEBUG) - ) + init_ipfs_globals(aleph.config.app_config, timeout) return API diff --git a/src/aleph/services/ipfs/storage.py b/src/aleph/services/ipfs/storage.py index ff07c5458..197cf00d8 100644 --- a/src/aleph/services/ipfs/storage.py +++ b/src/aleph/services/ipfs/storage.py @@ -6,6 +6,7 @@ import aiohttp import aioipfs +from aleph.config import get_config from .common import get_ipfs_api, get_base_url from ...utils import run_in_executor @@ -106,10 +107,10 @@ async def pin_add(hash: str, timeout: int = 2, tries: int = 1): async def add_file(fileobject, filename): - async with aiohttp.ClientSession() as session: - from aleph.web import app + config = get_config() - url = "%s/api/v0/add" % (await get_base_url(app["config"])) + async with aiohttp.ClientSession() as session: + url = "%s/api/v0/add" % (await get_base_url(config)) data = aiohttp.FormData() data.add_field("path", fileobject, filename=filename) diff --git a/src/aleph/services/p2p/manager.py b/src/aleph/services/p2p/manager.py index fc18bf1e3..da7dc71a8 100644 --- a/src/aleph/services/p2p/manager.py +++ b/src/aleph/services/p2p/manager.py @@ -40,13 +40,11 @@ async def initialize_host( tidy_http_peers_job(config=config, api_servers=api_servers), ] if listen: - from aleph.web import app - peer_id, _ = await p2p_client.identify() LOGGER.info("Listening on " + f"{transport_opt}/p2p/{peer_id}") ip = await get_IP() public_address = f"/ip4/{ip}/tcp/{port}/p2p/{peer_id}" - http_port = app["config"].p2p.http_port.value + http_port = config.p2p.http_port.value public_adresses.append(public_address) public_http_address = f"http://{ip}:{http_port}" @@ -60,7 +58,7 @@ async def initialize_host( p2p_alive_topic=config.p2p.alive_topic.value, ipfs_alive_topic=config.ipfs.alive_topic.value, peer_type="P2P", - use_ipfs=app["config"].ipfs.enabled.value, + use_ipfs=config.ipfs.enabled.value, ), publish_host( public_http_address, @@ -68,14 +66,14 @@ async def initialize_host( p2p_alive_topic=config.p2p.alive_topic.value, ipfs_alive_topic=config.ipfs.alive_topic.value, peer_type="HTTP", - use_ipfs=app["config"].ipfs.enabled.value, + use_ipfs=config.ipfs.enabled.value, ), monitor_hosts_p2p(p2p_client, alive_topic=config.p2p.alive_topic.value), ] - if app["config"].ipfs.enabled.value: + if config.ipfs.enabled.value: tasks.append( - monitor_hosts_ipfs(alive_topic=app["config"].ipfs.alive_topic.value) + monitor_hosts_ipfs(alive_topic=config.ipfs.alive_topic.value) ) try: public_ipfs_address = await get_public_address() diff --git a/src/aleph/storage.py b/src/aleph/storage.py index f9eac3cb7..2de2333c4 100644 --- a/src/aleph/storage.py +++ b/src/aleph/storage.py @@ -19,8 +19,8 @@ from aleph.services.p2p.singleton import get_streamer from aleph.types import ItemType from aleph.utils import run_in_executor, get_sha256 -from aleph.web import app from aleph.services.ipfs.common import get_cid_version +from aleph.config import get_config LOGGER = logging.getLogger("STORAGE") @@ -99,7 +99,8 @@ async def fetch_content_from_network( content_hash: str, engine: ItemType, timeout: int ) -> Optional[bytes]: content = None - enabled_clients = app["config"].p2p.clients.value + config = get_config() + enabled_clients = config.p2p.clients.value if "protocol" in enabled_clients: streamer = get_streamer() @@ -145,7 +146,8 @@ async def verify_content_hash( Checks that the hash of a content we fetched from the network matches the expected hash. :return: True if the hashes match, False otherwise. """ - ipfs_enabled = app["config"].ipfs.enabled.value + config = get_config() + ipfs_enabled = config.ipfs.enabled.value if engine == ItemType.IPFS and ipfs_enabled: try: @@ -181,7 +183,8 @@ async def get_hash_content( store_value: bool = True, ) -> RawContent: # TODO: determine which storage engine to use - ipfs_enabled = app["config"].ipfs.enabled.value + config = get_config() + ipfs_enabled = config.ipfs.enabled.value source = None diff --git a/src/aleph/utils.py b/src/aleph/utils.py index 122c77539..3afe1acfb 100644 --- a/src/aleph/utils.py +++ b/src/aleph/utils.py @@ -7,7 +7,7 @@ async def run_in_executor(executor, func, *args): if settings.use_executors: - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() return await loop.run_in_executor(executor, func, *args) else: return func(*args) diff --git a/src/aleph/web/controllers/metrics.py b/src/aleph/web/controllers/metrics.py index 30d9afd50..8e120e026 100644 --- a/src/aleph/web/controllers/metrics.py +++ b/src/aleph/web/controllers/metrics.py @@ -1,25 +1,20 @@ -from logging import getLogger - import asyncio import json -import logging import platform from dataclasses import dataclass, asdict +from logging import getLogger from typing import Dict, Optional from urllib.parse import urljoin -from requests import HTTPError import aiohttp +import aleph.model from aiocache import cached +from aleph import __version__ +from aleph.config import get_config from dataclasses_json import DataClassJsonMixin +from requests import HTTPError from web3 import Web3 -import aleph.model -from aleph import __version__ -from aleph.web import app - -LOGGER = logging.getLogger(__name__) - LOGGER = getLogger("WEB.metrics") @@ -104,7 +99,8 @@ async def fetch_reference_total_messages() -> Optional[int]: """Obtain the total number of Aleph messages from another node.""" LOGGER.debug("Fetching Aleph messages count") - url = app["config"].aleph.reference_node_url.value + config = get_config() + url = config.aleph.reference_node_url.value if url is None: return None @@ -125,7 +121,7 @@ async def fetch_reference_total_messages() -> Optional[int]: async def fetch_eth_height() -> Optional[int]: """Obtain the height of the Ethereum blockchain.""" LOGGER.debug("Fetching ETH height") - config = app["config"] + config = get_config() try: if config.ethereum.enabled.value: diff --git a/tests/storage/conftest.py b/tests/storage/conftest.py index 2b96a1b3f..dda6a5f34 100644 --- a/tests/storage/conftest.py +++ b/tests/storage/conftest.py @@ -1,7 +1,6 @@ import pytest from configmanager import Config from aleph.config import get_defaults -from aleph.web import app @pytest.fixture @@ -10,5 +9,5 @@ def mock_config(mocker): # To test handle_new_storage config.storage.store_files.value = True - mock_config = mocker.patch.dict(app, {"config": config}) + mock_config = mocker.patch("aleph.config.app_config", config) return mock_config From 3d1b91587fde5ba902e54ef25c3cab6d52a5e235 Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Thu, 7 Apr 2022 12:59:22 +0200 Subject: [PATCH 2/3] Fix config mock --- tests/storage/conftest.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/storage/conftest.py b/tests/storage/conftest.py index dda6a5f34..70a2bb450 100644 --- a/tests/storage/conftest.py +++ b/tests/storage/conftest.py @@ -1,13 +1,16 @@ +import aleph.config import pytest from configmanager import Config -from aleph.config import get_defaults @pytest.fixture def mock_config(mocker): - config = Config(get_defaults()) + config = Config(aleph.config.get_defaults()) # To test handle_new_storage config.storage.store_files.value = True - mock_config = mocker.patch("aleph.config.app_config", config) + # We set the global variable directly instead of patching it because of an issue + # with mocker.patch. mocker.patch uses hasattr to determine the properties of + # the mock, which does not work well with configmanager Config objects. + aleph.config.app_config = config return mock_config From 8209122bc04d8f32852ee80983c3c8cd730e16e6 Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Tue, 26 Apr 2022 11:35:46 +0200 Subject: [PATCH 3/3] use get_config in ipfs init --- src/aleph/services/ipfs/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/services/ipfs/common.py b/src/aleph/services/ipfs/common.py index 551783426..310cea842 100644 --- a/src/aleph/services/ipfs/common.py +++ b/src/aleph/services/ipfs/common.py @@ -38,7 +38,7 @@ def init_ipfs_globals(config: Config, timeout: int = 5) -> None: async def get_ipfs_api(timeout: int = 5, reset: bool = False): global API if API is None or reset: - init_ipfs_globals(aleph.config.app_config, timeout) + init_ipfs_globals(aleph.config.get_config(), timeout) return API