diff --git a/.github/system_tests/test_verdi_load_time.sh b/.github/system_tests/test_verdi_load_time.sh deleted file mode 100755 index 07e8772eb..000000000 --- a/.github/system_tests/test_verdi_load_time.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env bash -set -e - -# Test the loading time of `verdi`. This is and attempt to catch changes to the imports in `aiida.cmdline` that will -# indirectly load the `aiida.orm` module which will trigger loading of the backend environment. This slows down `verdi` -# significantly, making tab-completion unusable. -VERDI=`which verdi` - -# Typically, the loading time of `verdi` should be around ~0.2 seconds. When loading the database environment this -# tends to go towards ~0.8 seconds. Since these timings are obviously machine and environment dependent, typically these -# types of tests are fragile. But with a load limit of more than twice the ideal loading time, if exceeded, should give -# a reasonably sure indication that the loading of `verdi` is unacceptably slowed down. -LOAD_LIMIT=0.4 -MAX_NUMBER_ATTEMPTS=5 - -iteration=0 - -while true; do - - iteration=$((iteration+1)) - load_time=$(/usr/bin/time -q -f "%e" $VERDI 2>&1 > /dev/null) - - if (( $(echo "$load_time < $LOAD_LIMIT" | bc -l) )); then - echo "SUCCESS: loading time $load_time at iteration $iteration below $LOAD_LIMIT" - break - else - echo "WARNING: loading time $load_time at iteration $iteration above $LOAD_LIMIT" - - if [ $iteration -eq $MAX_NUMBER_ATTEMPTS ]; then - echo "ERROR: loading time exceeded the load limit $iteration consecutive times." - echo "ERROR: please check that 'aiida.cmdline' does not import 'aiida.orm' at module level, even indirectly" - echo "ERROR: also, the database backend environment should not be loaded." - exit 2 - fi - fi - -done diff --git a/.github/workflows/ci-code.yml b/.github/workflows/ci-code.yml index b93f0a4da..da5d99530 100644 --- a/.github/workflows/ci-code.yml +++ b/.github/workflows/ci-code.yml @@ -148,4 +148,5 @@ jobs: - name: Run verdi run: | verdi devel check-load-time + verdi devel check-undesired-imports .github/workflows/verdi.sh diff --git a/.github/workflows/verdi.sh b/.github/workflows/verdi.sh index ac9ecda4b..11bf8d2c0 100755 --- a/.github/workflows/verdi.sh +++ b/.github/workflows/verdi.sh @@ -9,7 +9,7 @@ VERDI=`which verdi` # tends to go towards ~0.8 seconds. Since these timings are obviously machine and environment dependent, typically these # types of tests are fragile. But with a load limit of more than twice the ideal loading time, if exceeded, should give # a reasonably sure indication that the loading of `verdi` is unacceptably slowed down. -LOAD_LIMIT=0.5 +LOAD_LIMIT=0.4 MAX_NUMBER_ATTEMPTS=5 iteration=0 @@ -35,10 +35,6 @@ while true; do done -$VERDI devel check-load-time -$VERDI devel check-undesired-imports - - # Test that we can also run the CLI via `python -m aiida`, # that it returns a 0 exit code, and contains the expected stdout. echo "Invoking verdi via `python -m aiida`" diff --git a/aiida/cmdline/commands/cmd_devel.py b/aiida/cmdline/commands/cmd_devel.py index 0f24dfd72..ccd4be4cf 100644 --- a/aiida/cmdline/commands/cmd_devel.py +++ b/aiida/cmdline/commands/cmd_devel.py @@ -65,7 +65,10 @@ def devel_check_undesired_imports(): """ loaded_modules = 0 - for modulename in ['seekpath', 'CifFile', 'ase', 'pymatgen', 'spglib', 'pymysql']: + for modulename in [ + 'asyncio', 'requests', 'plumpy', 'disk_objectstore', 'paramiko', 'seekpath', 'CifFile', 'ase', 'pymatgen', + 'spglib', 'pymysql' + ]: if modulename in sys.modules: echo.echo_warning(f'Detected loaded module "{modulename}"') loaded_modules += 1 diff --git a/aiida/cmdline/commands/cmd_rabbitmq.py b/aiida/cmdline/commands/cmd_rabbitmq.py index ece4893df..000176928 100644 --- a/aiida/cmdline/commands/cmd_rabbitmq.py +++ b/aiida/cmdline/commands/cmd_rabbitmq.py @@ -16,7 +16,6 @@ import typing as t import click -import requests import tabulate import wrapt import yaml @@ -27,6 +26,7 @@ if t.TYPE_CHECKING: import kiwipy.rmq + import requests from aiida.manage.configuration.profile import Profile @@ -91,12 +91,13 @@ def cmd_tasks(): ) -def echo_response(response: requests.Response, exit_on_error: bool = True) -> None: +def echo_response(response: 'requests.Response', exit_on_error: bool = True) -> None: """Echo the response of a request. :param response: The response to the request. :param exit_on_error: Boolean, if ``True``, call ``sys.exit`` with the status code of the response. """ + import requests try: response.raise_for_status() except requests.HTTPError: diff --git a/aiida/manage/__init__.py b/aiida/manage/__init__.py index 2f41729cc..a958f61b6 100644 --- a/aiida/manage/__init__.py +++ b/aiida/manage/__init__.py @@ -42,7 +42,6 @@ 'Option', 'Postgres', 'PostgresConnectionMode', - 'ProcessLauncher', 'Profile', 'RabbitmqManagementClient', 'check_and_migrate_config', diff --git a/aiida/manage/configuration/config.py b/aiida/manage/configuration/config.py index f4b3f1347..2ff0ebc22 100644 --- a/aiida/manage/configuration/config.py +++ b/aiida/manage/configuration/config.py @@ -17,8 +17,6 @@ import tempfile from typing import Any, Dict, Optional, Sequence, Tuple -import jsonschema - from aiida.common.exceptions import ConfigurationError from . import schema as schema_module @@ -126,6 +124,7 @@ def _backup(cls, filepath): @staticmethod def validate(config: dict, filepath: Optional[str] = None): """Validate a configuration dictionary.""" + import jsonschema try: jsonschema.validate(instance=config, schema=config_schema()) except jsonschema.ValidationError as error: diff --git a/aiida/manage/configuration/options.py b/aiida/manage/configuration/options.py index f41f6f328..977964260 100644 --- a/aiida/manage/configuration/options.py +++ b/aiida/manage/configuration/options.py @@ -10,8 +10,6 @@ """Definition of known configuration options and methods to parse and get option values.""" from typing import Any, Dict, List, Tuple -import jsonschema - from aiida.common.exceptions import ConfigurationError __all__ = ('get_option', 'get_option_names', 'parse_option', 'Option') @@ -64,6 +62,8 @@ def validate(self, value: Any, cast: bool = True) -> Any: """ # pylint: disable=too-many-branches + import jsonschema + from aiida.manage.caching import _validate_identifier_pattern from .config import ConfigValidationError diff --git a/aiida/manage/external/__init__.py b/aiida/manage/external/__init__.py index d5ebc58bb..c77310935 100644 --- a/aiida/manage/external/__init__.py +++ b/aiida/manage/external/__init__.py @@ -23,7 +23,6 @@ 'ManagementApiConnectionError', 'Postgres', 'PostgresConnectionMode', - 'ProcessLauncher', 'RabbitmqManagementClient', 'get_launch_queue_name', 'get_message_exchange_name', diff --git a/aiida/manage/external/rmq/__init__.py b/aiida/manage/external/rmq/__init__.py index 503173d81..1af41bff0 100644 --- a/aiida/manage/external/rmq/__init__.py +++ b/aiida/manage/external/rmq/__init__.py @@ -16,13 +16,11 @@ from .client import * from .defaults import * -from .launcher import * from .utils import * __all__ = ( 'BROKER_DEFAULTS', 'ManagementApiConnectionError', - 'ProcessLauncher', 'RabbitmqManagementClient', 'get_launch_queue_name', 'get_message_exchange_name', diff --git a/aiida/manage/external/rmq/client.py b/aiida/manage/external/rmq/client.py index 3c0938b7a..e56fb5945 100644 --- a/aiida/manage/external/rmq/client.py +++ b/aiida/manage/external/rmq/client.py @@ -5,10 +5,11 @@ import typing as t from urllib.parse import quote -import requests - from aiida.common.exceptions import AiidaException +if t.TYPE_CHECKING: + import requests + __all__ = ('RabbitmqManagementClient', 'ManagementApiConnectionError') @@ -31,6 +32,7 @@ def __init__(self, username: str, password: str, hostname: str, virtual_host: st :param hostname: The hostname of the RabbitMQ server. :param virtual_host: The virtual host. """ + import requests self._username = username self._password = password self._hostname = hostname @@ -58,7 +60,7 @@ def request( url_params: dict[str, str] | None = None, method: str = 'GET', params: dict[str, t.Any] | None = None, - ) -> requests.Response: + ) -> 'requests.Response': """Make a request. :param url: The resource path with placeholders, e.g., ``queues/{virtual_host}/{queue}``. @@ -69,6 +71,7 @@ def request( :returns: The response of the request. :raises `ManagementApiConnectionError`: If connection to the API cannot be made. """ + import requests url = self.format_url(url, url_params) try: return requests.request(method, url, auth=self._authentication, params=params or {}, timeout=5) diff --git a/aiida/manage/external/rmq/launcher.py b/aiida/manage/external/rmq/launcher.py index 29acd584a..aa89b016f 100644 --- a/aiida/manage/external/rmq/launcher.py +++ b/aiida/manage/external/rmq/launcher.py @@ -9,8 +9,6 @@ LOGGER = logging.getLogger(__name__) -__all__ = ('ProcessLauncher',) - class ProcessLauncher(plumpy.ProcessLauncher): """A sub class of :class:`plumpy.ProcessLauncher` to launch a ``Process``. diff --git a/aiida/manage/manager.py b/aiida/manage/manager.py index 9bb862cd6..e63a6d3f8 100644 --- a/aiida/manage/manager.py +++ b/aiida/manage/manager.py @@ -9,11 +9,12 @@ ########################################################################### # pylint: disable=cyclic-import """AiiDA manager for global settings""" -import asyncio import functools from typing import TYPE_CHECKING, Any, Optional, Union if TYPE_CHECKING: + import asyncio + from kiwipy.rmq import RmqThreadCommunicator from plumpy.process_comms import RemoteProcessThreadController @@ -416,7 +417,7 @@ def create_runner(self, with_persistence: bool = True, **kwargs: Any) -> 'Runner return runners.Runner(**settings) - def create_daemon_runner(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> 'Runner': + def create_daemon_runner(self, loop: Optional['asyncio.AbstractEventLoop'] = None) -> 'Runner': """Create and return a new daemon runner. This is used by workers when the daemon is running and in testing. @@ -429,13 +430,13 @@ def create_daemon_runner(self, loop: Optional[asyncio.AbstractEventLoop] = None) from plumpy.persistence import LoadSaveContext from aiida.engine import persistence - from aiida.manage.external import rmq + from aiida.manage.external.rmq.launcher import ProcessLauncher runner = self.create_runner(rmq_submit=True, loop=loop) runner_loop = runner.loop # Listen for incoming launch requests - task_receiver = rmq.ProcessLauncher( + task_receiver = ProcessLauncher( loop=runner_loop, persister=self.get_persister(), load_context=LoadSaveContext(runner=runner), diff --git a/aiida/repository/backend/disk_object_store.py b/aiida/repository/backend/disk_object_store.py index 121767405..dc7325275 100644 --- a/aiida/repository/backend/disk_object_store.py +++ b/aiida/repository/backend/disk_object_store.py @@ -4,13 +4,14 @@ import shutil import typing as t -from disk_objectstore import Container - from aiida.common.lang import type_check from aiida.storage.log import STORAGE_LOGGER from .abstract import AbstractRepositoryBackend +if t.TYPE_CHECKING: + from disk_objectstore import Container # pylint: disable=unused-import + __all__ = ('DiskObjectStoreRepositoryBackend',) BYTES_TO_MB = 1 / 1024**2 @@ -30,7 +31,9 @@ class DiskObjectStoreRepositoryBackend(AbstractRepositoryBackend): """ - def __init__(self, container: Container): + def __init__(self, container: 'Container'): + if not t.TYPE_CHECKING: + from disk_objectstore import Container # pylint: disable=redefined-outer-name type_check(container, Container) self._container = container diff --git a/aiida/storage/psql_dos/backend.py b/aiida/storage/psql_dos/backend.py index 50b6af6ed..1af3fa56f 100644 --- a/aiida/storage/psql_dos/backend.py +++ b/aiida/storage/psql_dos/backend.py @@ -15,7 +15,6 @@ import pathlib from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Set, Union -from disk_objectstore import Container from sqlalchemy.orm import Session, scoped_session, sessionmaker from aiida.common.exceptions import ClosedStorage, ConfigurationError, IntegrityError @@ -29,6 +28,8 @@ from .orm import authinfos, comments, computers, convert, groups, logs, nodes, querybuilder, users if TYPE_CHECKING: + from disk_objectstore import Container + from aiida.repository.backend import DiskObjectStoreRepositoryBackend __all__ = ('PsqlDosBackend',) @@ -195,7 +196,10 @@ def _clear(self) -> None: ) def get_repository(self) -> 'DiskObjectStoreRepositoryBackend': + from disk_objectstore import Container + from aiida.repository.backend import DiskObjectStoreRepositoryBackend + container = Container(str(get_filepath_container(self.profile))) return DiskObjectStoreRepositoryBackend(container=container) diff --git a/aiida/storage/psql_dos/migrator.py b/aiida/storage/psql_dos/migrator.py index cc5e11efa..b15d8e6d5 100644 --- a/aiida/storage/psql_dos/migrator.py +++ b/aiida/storage/psql_dos/migrator.py @@ -19,14 +19,13 @@ import contextlib import pathlib -from typing import Any, Dict, Iterator, Optional +from typing import TYPE_CHECKING, Any, Dict, Iterator, Optional from alembic.command import downgrade, upgrade from alembic.config import Config from alembic.runtime.environment import EnvironmentContext from alembic.runtime.migration import MigrationContext, MigrationInfo from alembic.script import ScriptDirectory -from disk_objectstore import Container from sqlalchemy import MetaData, String, column, desc, insert, inspect, select, table from sqlalchemy.exc import OperationalError, ProgrammingError from sqlalchemy.ext.automap import automap_base @@ -38,6 +37,9 @@ from aiida.storage.psql_dos.models.settings import DbSetting from aiida.storage.psql_dos.utils import create_sqlalchemy_engine +if TYPE_CHECKING: + from disk_objectstore import Container + TEMPLATE_LEGACY_DJANGO_SCHEMA = """ Database schema is using the legacy Django schema. To migrate the database schema version to the current one, run the following command: @@ -175,12 +177,15 @@ def validate_storage(self) -> None: f'but the disk-objectstore\'s is {repository_uuid}.' ) - def get_container(self) -> Container: + def get_container(self) -> 'Container': """Return the disk-object store container. :returns: The disk-object store container configured for the repository path of the current profile. """ + from disk_objectstore import Container + from .backend import get_filepath_container + return Container(get_filepath_container(self.profile)) def get_repository_uuid(self) -> str: diff --git a/aiida/transports/plugins/ssh.py b/aiida/transports/plugins/ssh.py index e8a3325a6..8a810fec3 100644 --- a/aiida/transports/plugins/ssh.py +++ b/aiida/transports/plugins/ssh.py @@ -16,7 +16,6 @@ from stat import S_ISDIR, S_ISREG import click -import paramiko from aiida.cmdline.params import options from aiida.cmdline.params.types.path import AbsolutePathOrEmptyParamType @@ -36,6 +35,8 @@ def parse_sshconfig(computername): :param computername: the computer name for which we want the configuration. """ + import paramiko + config = paramiko.SSHConfig() try: with open(os.path.expanduser('~/.ssh/config'), encoding='utf8') as fhandle: @@ -397,6 +398,8 @@ def __init__(self, *args, **kwargs): function (as port, username, password, ...); taken from the accepted paramiko.SSHClient.connect() params. """ + import paramiko + super().__init__(*args, **kwargs) self._sftp = None @@ -440,6 +443,7 @@ def open(self): # pylint: disable=too-many-branches,too-many-statements :raise aiida.common.InvalidOperation: if the channel is already open """ + import paramiko from paramiko.ssh_exception import SSHException from aiida.common.exceptions import InvalidOperation