Skip to content

Commit

Permalink
Pre-commit: Add ruff as the new linter and formatter (#6233)
Browse files Browse the repository at this point in the history
Reduces pre-commit run time by a factor of ~10.
  • Loading branch information
sphuber committed Dec 21, 2023
1 parent d1c3922 commit 64c5e6a
Show file tree
Hide file tree
Showing 829 changed files with 11,908 additions and 14,914 deletions.
8 changes: 2 additions & 6 deletions .docker/tests/conftest.py
@@ -1,8 +1,7 @@
# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring, redefined-outer-name
import json
from pathlib import Path
import time
from pathlib import Path

import pytest

Expand All @@ -13,13 +12,12 @@ def variant(request):


@pytest.fixture(scope='session')
def docker_compose_file(pytestconfig, variant): # pylint: disable=unused-argument
def docker_compose_file(pytestconfig, variant):
return f'docker-compose.{variant}.yml'


@pytest.fixture(scope='session')
def docker_compose(docker_services):
# pylint: disable=protected-access
return docker_services._docker_compose


Expand All @@ -31,7 +29,6 @@ def is_container_ready(docker_compose):
@pytest.fixture(scope='session', autouse=True)
def _docker_service_wait(docker_services):
"""Container startup wait."""

time.sleep(30)


Expand All @@ -42,7 +39,6 @@ def container_user():

@pytest.fixture
def aiida_exec(docker_compose):

def execute(command, user=None, **kwargs):
if user:
command = f'exec -T --user={user} aiida {command}'
Expand Down
3 changes: 1 addition & 2 deletions .docker/tests/test_aiida.py
@@ -1,9 +1,8 @@
# -*- coding: utf-8 -*-
# pylint: disable=missing-docstring
import json

from packaging.version import parse
import pytest
from packaging.version import parse


def test_correct_python_version_installed(aiida_exec, python_version):
Expand Down
123 changes: 58 additions & 65 deletions .github/system_tests/test_daemon.py
Expand Up @@ -7,7 +7,6 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=no-name-in-module
"""Tests to run with a running daemon."""
import os
import re
Expand Down Expand Up @@ -40,7 +39,7 @@
from aiida.orm.nodes.caching import NodeCaching
from aiida.plugins import CalculationFactory, WorkflowFactory
from aiida.workflows.arithmetic.add_multiply import add, add_multiply
from tests.utils.memory import get_instances # pylint: disable=import-error
from tests.utils.memory import get_instances

CODENAME_ADD = 'add@localhost'
CODENAME_DOUBLER = 'doubler@localhost'
Expand All @@ -56,10 +55,12 @@ def print_daemon_log():

print(f"Output of 'cat {daemon_log}':")
try:
print(subprocess.check_output(
['cat', f'{daemon_log}'],
stderr=subprocess.STDOUT,
))
print(
subprocess.check_output(
['cat', f'{daemon_log}'],
stderr=subprocess.STDOUT,
)
)
except subprocess.CalledProcessError as exception:
print(f'Note: the command failed, message: {exception}')

Expand All @@ -81,10 +82,12 @@ def print_report(pk):
"""Print the process report for given pk."""
print(f"Output of 'verdi process report {pk}':")
try:
print(subprocess.check_output(
['verdi', 'process', 'report', f'{pk}'],
stderr=subprocess.STDOUT,
))
print(
subprocess.check_output(
['verdi', 'process', 'report', f'{pk}'],
stderr=subprocess.STDOUT,
)
)
except subprocess.CalledProcessError as exception:
print(f'Note: the command failed, message: {exception}')

Expand Down Expand Up @@ -193,12 +196,9 @@ def validate_workchains(expected_results):


def validate_cached(cached_calcs):
"""
Check that the calculations with created with caching are indeed cached.
"""
"""Check that the calculations with created with caching are indeed cached."""
valid = True
for calc in cached_calcs:

if not calc.is_finished_ok:
print(
'Cached calculation<{}> not finished ok: process_state<{}> exit_status<{}>'.format(
Expand All @@ -208,8 +208,9 @@ def validate_cached(cached_calcs):
print_report(calc.pk)
valid = False

if NodeCaching.CACHED_FROM_KEY not in calc.base.extras or calc.base.caching.get_hash(
) != calc.base.extras.get('_aiida_hash'):
if NodeCaching.CACHED_FROM_KEY not in calc.base.extras or calc.base.caching.get_hash() != calc.base.extras.get(
'_aiida_hash'
):
print(f'Cached calculation<{calc.pk}> has invalid hash')
print_report(calc.pk)
valid = False
Expand Down Expand Up @@ -270,48 +271,41 @@ def launch_workfunction(inputval):


def launch_calculation(code, counter, inputval):
"""
Launch calculations to the daemon through the Process layer
"""
"""Launch calculations to the daemon through the Process layer"""
process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval)
calc = submit(process, **inputs)
print(f'[{counter}] launched calculation {calc.uuid}, pk={calc.pk}')
return calc, expected_result


def run_calculation(code, counter, inputval):
"""
Run a calculation through the Process layer.
"""
"""Run a calculation through the Process layer."""
process, inputs, expected_result = create_calculation_process(code=code, inputval=inputval)
_, calc = run.get_node(process, **inputs)
print(f'[{counter}] ran calculation {calc.uuid}, pk={calc.pk}')
return calc, expected_result


def create_calculation_process(code, inputval):
"""
Create the process and inputs for a submitting / running a calculation.
"""
TemplatereplacerCalculation = CalculationFactory('core.templatereplacer')
"""Create the process and inputs for a submitting / running a calculation."""
parameters = Dict({'value': inputval})
template = Dict({
# The following line adds a significant sleep time.
# I set it to 1 second to speed up tests
# I keep it to a non-zero value because I want
# To test the case when AiiDA finds some calcs
# in a queued state
# 'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ['1'],
'input_file_template': '{value}', # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp']
})
template = Dict(
{
# The following line adds a significant sleep time.
# I set it to 1 second to speed up tests
# I keep it to a non-zero value because I want
# To test the case when AiiDA finds some calcs
# in a queued state
# 'cmdline_params': ["{}".format(counter % 3)], # Sleep time
'cmdline_params': ['1'],
'input_file_template': '{value}', # File just contains the value to double
'input_file_name': 'value_to_double.txt',
'output_file_name': 'output.txt',
'retrieve_temporary_files': ['triple_value.tmp'],
}
)
options = {
'resources': {
'num_machines': 1
},
'resources': {'num_machines': 1},
'max_wallclock_seconds': 5 * 60,
'withmpi': False,
'parser_name': 'core.templatereplacer',
Expand All @@ -325,15 +319,13 @@ def create_calculation_process(code, inputval):
'template': template,
'metadata': {
'options': options,
}
},
}
return TemplatereplacerCalculation, inputs, expected_result
return CalculationFactory('core.templatereplacer'), inputs, expected_result


def run_arithmetic_add():
"""Run the `ArithmeticAddCalculation`."""
ArithmeticAddCalculation = CalculationFactory('core.arithmetic.add')

code = load_code(CODENAME_ADD)
inputs = {
'x': Int(1),
Expand All @@ -342,7 +334,7 @@ def run_arithmetic_add():
}

# Normal inputs should run just fine
results, node = run.get_node(ArithmeticAddCalculation, **inputs)
results, node = run.get_node(CalculationFactory('core.arithmetic.add'), **inputs)
assert node.is_finished_ok, node.exit_status
assert results['sum'] == 3

Expand Down Expand Up @@ -378,22 +370,20 @@ def run_base_restart_workchain():
inputs['add']['y'] = Int(10)
results, node = run.get_node(ArithmeticAddBaseWorkChain, **inputs)
assert not node.is_finished_ok, node.process_state
assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_TOO_BIG.status, node.exit_status # pylint: disable=no-member
assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_TOO_BIG.status, node.exit_status
assert len(node.called) == 1

# Check that overriding default handler enabled status works
inputs['add']['y'] = Int(1)
inputs['handler_overrides'] = Dict({'disabled_handler': True})
results, node = run.get_node(ArithmeticAddBaseWorkChain, **inputs)
assert not node.is_finished_ok, node.process_state
assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_ENABLED_DOOM.status, node.exit_status # pylint: disable=no-member
assert node.exit_status == ArithmeticAddBaseWorkChain.exit_codes.ERROR_ENABLED_DOOM.status, node.exit_status
assert len(node.called) == 1


def run_multiply_add_workchain():
"""Run the `MultiplyAddWorkChain`."""
MultiplyAddWorkChain = WorkflowFactory('core.arithmetic.multiply_add')

code = load_code(CODENAME_ADD)
inputs = {
'x': Int(1),
Expand All @@ -403,7 +393,7 @@ def run_multiply_add_workchain():
}

# Normal inputs should run just fine
results, node = run.get_node(MultiplyAddWorkChain, **inputs)
results, node = run.get_node(WorkflowFactory('core.arithmetic.multiply_add'), **inputs)
assert node.is_finished_ok, node.exit_status
assert len(node.called) == 2
assert 'result' in results
Expand All @@ -429,7 +419,6 @@ def launch_all():
:returns: dictionary with expected results and pks of all launched calculations and workchains
"""
# pylint: disable=too-many-locals,too-many-statements
expected_results_process_functions = {}
expected_results_calculations = {}
expected_results_workchains = {}
Expand All @@ -451,7 +440,6 @@ def launch_all():
print('Testing the stashing functionality')
process, inputs, expected_result = create_calculation_process(code=code_doubler, inputval=1)
with tempfile.TemporaryDirectory() as tmpdir:

# Delete the temporary directory to test that the stashing functionality will create it if necessary
shutil.rmtree(tmpdir, ignore_errors=True)

Expand Down Expand Up @@ -571,8 +559,10 @@ def relaunch_cached(results):
results['calculations'][calc.pk] = expected_result

if not (
validate_calculations(results['calculations']) and validate_workchains(results['workchains']) and
validate_cached(cached_calcs) and validate_process_functions(results['process_functions'])
validate_calculations(results['calculations'])
and validate_workchains(results['workchains'])
and validate_cached(cached_calcs)
and validate_process_functions(results['process_functions'])
):
print_daemon_log()
print('')
Expand All @@ -586,7 +576,6 @@ def relaunch_cached(results):

def main():
"""Launch a bunch of calculation jobs and workchains."""

results = launch_all()

print('Waiting for end of execution...')
Expand All @@ -603,19 +592,23 @@ def main():
print('#' * 78)
print("Output of 'verdi process list -a':")
try:
print(subprocess.check_output(
['verdi', 'process', 'list', '-a'],
stderr=subprocess.STDOUT,
))
print(
subprocess.check_output(
['verdi', 'process', 'list', '-a'],
stderr=subprocess.STDOUT,
)
)
except subprocess.CalledProcessError as exception:
print(f'Note: the command failed, message: {exception}')

print("Output of 'verdi daemon status':")
try:
print(subprocess.check_output(
['verdi', 'daemon', 'status'],
stderr=subprocess.STDOUT,
))
print(
subprocess.check_output(
['verdi', 'daemon', 'status'],
stderr=subprocess.STDOUT,
)
)
except subprocess.CalledProcessError as exception:
print(f'Note: the command failed, message: {exception}')

Expand Down
21 changes: 7 additions & 14 deletions .github/system_tests/workchains.py
Expand Up @@ -7,7 +7,6 @@
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=invalid-name
"""Work chain implementations for testing purposes."""
from aiida.common import AttributeDict
from aiida.engine import (
Expand Down Expand Up @@ -64,15 +63,15 @@ def setup(self):
def sanity_check_not_too_big(self, node):
"""My puny brain cannot deal with numbers that I cannot count on my hand."""
if node.is_finished_ok and node.outputs.sum > 10:
return ProcessHandlerReport(True, self.exit_codes.ERROR_TOO_BIG) # pylint: disable=no-member
return ProcessHandlerReport(True, self.exit_codes.ERROR_TOO_BIG)

@process_handler(priority=460, enabled=False)
def disabled_handler(self, node): # pylint: disable=unused-argument
def disabled_handler(self, node):
"""By default this is not enabled and so should never be called, irrespective of exit codes of sub process."""
return ProcessHandlerReport(True, self.exit_codes.ERROR_ENABLED_DOOM) # pylint: disable=no-member
return ProcessHandlerReport(True, self.exit_codes.ERROR_ENABLED_DOOM)

@process_handler(priority=450, exit_codes=ExitCode(1000, 'Unicorn encountered'))
def a_magic_unicorn_appeared(self, node): # pylint: disable=no-self-argument
def a_magic_unicorn_appeared(self, node):
"""As we all know unicorns do not exist so we should never have to deal with it."""
raise RuntimeError('this handler should never even have been called')

Expand All @@ -85,9 +84,7 @@ def error_negative_sum(self, node):


class NestedWorkChain(WorkChain):
"""
Nested workchain which creates a workflow where the nesting level is equal to its input.
"""
"""Nested workchain which creates a workflow where the nesting level is equal to its input."""

@classmethod
def define(cls, spec):
Expand Down Expand Up @@ -216,9 +213,7 @@ def do_test(self):


class CalcFunctionRunnerWorkChain(WorkChain):
"""
WorkChain which calls an InlineCalculation in its step.
"""
"""WorkChain which calls an InlineCalculation in its step."""

@classmethod
def define(cls, spec):
Expand All @@ -234,9 +229,7 @@ def do_run(self):


class WorkFunctionRunnerWorkChain(WorkChain):
"""
WorkChain which calls a workfunction in its step
"""
"""WorkChain which calls a workfunction in its step"""

@classmethod
def define(cls, spec):
Expand Down

0 comments on commit 64c5e6a

Please sign in to comment.