From a979d5d28b59786203fd771d6281d6e16963fd57 Mon Sep 17 00:00:00 2001 From: Marcel Stimberg Date: Fri, 2 Dec 2022 15:37:18 +0100 Subject: [PATCH 1/4] ci: add pyupgrade for Python >= 3.8 --- .devcontainer/dev-requirements.txt | 1 + .pre-commit-config.yaml | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/.devcontainer/dev-requirements.txt b/.devcontainer/dev-requirements.txt index 6d3ebfa3d..ae50d6251 100644 --- a/.devcontainer/dev-requirements.txt +++ b/.devcontainer/dev-requirements.txt @@ -6,3 +6,4 @@ ipympl pre-commit == 2.20.* black == 22.10.0 isort == 5.10.1 +pyupgrade == 3.2.3 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3661da38a..7b0881926 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,6 +3,13 @@ repos: hooks: - id: check-hooks-apply - id: check-useless-excludes + - repo: https://github.com/asottile/pyupgrade + rev: v3.2.3 + hooks: + - id: pyupgrade + args: [--py38-plus] + exclude: '^brian2/_version.py$' + files: '^brian2/.*\.pyi?$' - repo: https://github.com/pycqa/isort rev: 5.10.1 hooks: From 40e4a51e4e190a7516b5ce4cd2a193176e5731ff Mon Sep 17 00:00:00 2001 From: Marcel Stimberg Date: Fri, 2 Dec 2022 15:40:59 +0100 Subject: [PATCH 2/4] modernize unit template syntax --- brian2/units/allunits.py | 3 +-- dev/tools/static_codegen/units_template.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/brian2/units/allunits.py b/brian2/units/allunits.py index 5370a8e3c..c24adb112 100644 --- a/brian2/units/allunits.py +++ b/brian2/units/allunits.py @@ -1,4 +1,3 @@ -# coding=utf-8 """ THIS FILE IS AUTOMATICALLY GENERATED BY A STATIC CODE GENERATION TOOL DO NOT EDIT BY HAND @@ -7910,7 +7909,7 @@ ] -class _Celsius(object): +class _Celsius: """ A dummy object to raise errors when ``celsius`` is used. The use of `celsius` can lead to ambiguities when mixed with temperatures in `kelvin`, diff --git a/dev/tools/static_codegen/units_template.py b/dev/tools/static_codegen/units_template.py index 22b6ce28e..037414acb 100644 --- a/dev/tools/static_codegen/units_template.py +++ b/dev/tools/static_codegen/units_template.py @@ -1,4 +1,3 @@ -# coding=utf-8 """ THIS FILE IS AUTOMATICALLY GENERATED BY A STATIC CODE GENERATION TOOL DO NOT EDIT BY HAND @@ -48,7 +47,7 @@ {additional_units} {all_units} -class _Celsius(object): +class _Celsius: """ A dummy object to raise errors when ``celsius`` is used. The use of `celsius` can lead to ambiguities when mixed with temperatures in `kelvin`, From 28b02c51545298cb9a76d8295e64a5df391b9207 Mon Sep 17 00:00:00 2001 From: Marcel Stimberg Date: Fri, 2 Dec 2022 15:45:14 +0100 Subject: [PATCH 3/4] Modernize syntax via pyupgrade --- brian2/__init__.py | 4 +- brian2/codegen/codeobject.py | 2 +- brian2/codegen/cpp_prefs.py | 18 ++--- brian2/codegen/generators/GSL_generator.py | 30 ++++---- brian2/codegen/generators/base.py | 40 +++++----- brian2/codegen/generators/cpp_generator.py | 2 +- brian2/codegen/generators/cython_generator.py | 8 +- brian2/codegen/optimisation.py | 24 +++--- .../runtime/GSLcython_rt/GSLcython_rt.py | 2 +- brian2/codegen/runtime/cython_rt/cython_rt.py | 2 +- .../runtime/cython_rt/extension_manager.py | 6 +- brian2/codegen/statements.py | 2 +- brian2/codegen/templates.py | 12 +-- brian2/codegen/translation.py | 28 ++++--- brian2/core/clocks.py | 2 +- brian2/core/functions.py | 6 +- brian2/core/magic.py | 18 ++--- brian2/core/names.py | 2 +- brian2/core/namespace.py | 6 +- brian2/core/network.py | 22 +++--- brian2/core/operations.py | 2 +- brian2/core/preferences.py | 10 +-- brian2/core/spikesource.py | 2 +- brian2/core/tracking.py | 4 +- brian2/core/variables.py | 30 ++++---- brian2/devices/cpp_standalone/codeobject.py | 2 +- brian2/devices/cpp_standalone/device.py | 26 +++---- brian2/devices/device.py | 16 ++-- brian2/equations/codestrings.py | 2 +- brian2/equations/equations.py | 12 ++- brian2/groups/group.py | 6 +- brian2/groups/neurongroup.py | 2 +- brian2/importexport/importexport.py | 2 +- brian2/input/binomial.py | 2 +- brian2/input/poissongroup.py | 2 +- brian2/input/spikegeneratorgroup.py | 6 +- brian2/memory/__init__.py | 1 - brian2/memory/dynamicarray.py | 2 +- brian2/monitors/spikemonitor.py | 6 +- brian2/monitors/statemonitor.py | 8 +- brian2/numpy_.py | 2 +- brian2/parsing/bast.py | 2 +- brian2/parsing/dependencies.py | 16 ++-- brian2/parsing/functions.py | 2 +- brian2/parsing/rendering.py | 2 +- brian2/parsing/sympytools.py | 12 +-- brian2/spatialneuron/morphology.py | 12 +-- brian2/spatialneuron/spatialneuron.py | 4 +- brian2/sphinxext/briandoc.py | 4 +- brian2/sphinxext/docscrape.py | 6 +- brian2/sphinxext/docscrape_sphinx.py | 2 +- brian2/sphinxext/examplefinder.py | 2 +- brian2/sphinxext/generate_reference.py | 1 - brian2/stateupdaters/GSL.py | 2 +- brian2/stateupdaters/base.py | 2 +- brian2/stateupdaters/explicit.py | 75 ++++++++----------- brian2/synapses/spikequeue.py | 2 +- brian2/synapses/synapses.py | 14 ++-- brian2/tests/__init__.py | 6 +- brian2/tests/features/base.py | 8 +- brian2/tests/features/speed.py | 2 +- brian2/tests/test_base.py | 2 +- brian2/tests/test_codegen.py | 2 +- brian2/tests/test_cpp_standalone.py | 4 +- brian2/tests/test_devices.py | 2 +- brian2/tests/test_equations.py | 14 ++-- brian2/tests/test_functions.py | 2 +- brian2/tests/test_logger.py | 8 +- brian2/tests/test_network.py | 18 ++--- brian2/tests/test_neurongroup.py | 10 +-- brian2/tests/test_parsing.py | 6 +- brian2/tests/test_synapses.py | 6 +- brian2/units/constants.py | 1 - brian2/units/fundamentalunits.py | 37 +++++---- brian2/utils/caching.py | 4 +- brian2/utils/filelock.py | 10 +-- brian2/utils/filetools.py | 2 +- brian2/utils/logger.py | 28 +++---- brian2/utils/stringtools.py | 8 +- brian2/utils/topsort.py | 2 +- 80 files changed, 341 insertions(+), 382 deletions(-) diff --git a/brian2/__init__.py b/brian2/__init__.py index bc662f8ce..85ca1250e 100644 --- a/brian2/__init__.py +++ b/brian2/__init__.py @@ -128,7 +128,7 @@ def _get_size_recursively(dirname): try: size = os.path.getsize(os.path.join(dirpath, fname)) total_size += size - except (OSError, IOError): + except OSError: pass # ignore the file return total_size @@ -185,7 +185,7 @@ def clear_cache(target): if f.endswith(ext): break else: - raise IOError( + raise OSError( f"The cache directory for target '{target}' contains " f"the file '{os.path.join(folder, f)}' of an unexpected type and " "will therefore not be removed. Delete files in " diff --git a/brian2/codegen/codeobject.py b/brian2/codegen/codeobject.py index f4c4e5181..864acd53b 100644 --- a/brian2/codegen/codeobject.py +++ b/brian2/codegen/codeobject.py @@ -342,7 +342,7 @@ def create_runner_codeobj( device = get_device() if override_conditional_write is None: - override_conditional_write = set([]) + override_conditional_write = set() else: override_conditional_write = set(override_conditional_write) diff --git a/brian2/codegen/cpp_prefs.py b/brian2/codegen/cpp_prefs.py index a412fb3c6..0eb6b9099 100644 --- a/brian2/codegen/cpp_prefs.py +++ b/brian2/codegen/cpp_prefs.py @@ -46,13 +46,13 @@ hostname = socket.gethostname() if os.path.isfile(flag_file): try: - with open(flag_file, "r", encoding="utf-8") as f: + with open(flag_file, encoding="utf-8") as f: previously_stored_flags = json.load(f) if hostname not in previously_stored_flags: logger.debug("Ignoring stored CPU flags for a different host") else: flags = previously_stored_flags[hostname] - except (IOError, OSError) as ex: + except OSError as ex: logger.debug( f'Opening file "{flag_file}" to get CPU flags failed with error' f' "{str(ex)}".' @@ -66,7 +66,7 @@ try: output = subprocess.check_output( [sys.executable, get_cpu_flags_script], - universal_newlines=True, + text=True, encoding="utf-8", ) flags = json.loads(output) @@ -79,7 +79,7 @@ to_store = {hostname: flags} with open(flag_file, "w", encoding="utf-8") as f: json.dump(to_store, f) - except (IOError, OSError) as ex: + except OSError as ex: logger.debug( f'Writing file "{flag_file}" to store CPU flags failed with error' f' "{str(ex)}".' @@ -269,7 +269,7 @@ def _determine_flag_compatibility(compiler, flagname): prefix="brian_flag_test_" ) as temp_dir, std_silent(): fname = os.path.join(temp_dir, "flag_test.cpp") - with open(fname, "wt") as f: + with open(fname, "w") as f: f.write("int main (int argc, char **argv) { return 0; }") try: compiler.compile([fname], output_dir=temp_dir, extra_postargs=[flagname]) @@ -351,7 +351,7 @@ def get_msvc_env(): try: _msvc_env = msvc.msvc14_get_vc_env(arch_name) except distutils.errors.DistutilsPlatformError: - raise IOError( + raise OSError( "Cannot find Microsoft Visual Studio, You " "can try to set the path to vcvarsall.bat " "via the codegen.cpp.msvc_vars_location " @@ -370,11 +370,11 @@ def compiler_supports_c99(): fd, tmp_file = tempfile.mkstemp(suffix=".cpp") os.write( fd, - """ + b""" #if _MSC_VER < 1800 #error #endif - """.encode(), + """, ) os.close(fd) msvc_env, vcvars_cmd = get_msvc_env() @@ -396,7 +396,7 @@ def compiler_supports_c99(): return _compiler_supports_c99 -class C99Check(object): +class C99Check: """ Helper class to create objects that can be passed as an ``availability_check`` to a `FunctionImplementation`. diff --git a/brian2/codegen/generators/GSL_generator.py b/brian2/codegen/generators/GSL_generator.py index 04608278e..c634cf400 100644 --- a/brian2/codegen/generators/GSL_generator.py +++ b/brian2/codegen/generators/GSL_generator.py @@ -64,7 +64,7 @@ def valid_gsl_dir(val): ) -class GSLCodeGenerator(object): +class GSLCodeGenerator: """ GSL code generator. @@ -427,9 +427,9 @@ def write_dataholder_single(self, var_obj): restrict = "" if var_obj.scalar or var_obj.size == 1: restrict = "" - return "%s* %s %s{end_statement}" % (dtype, restrict, pointer_name) + return f"{dtype}* {restrict} {pointer_name}{{end_statement}}" else: - return "%s %s{end_statement}" % (dtype, var_obj.name) + return f"{dtype} {var_obj.name}{{end_statement}}" def write_dataholder(self, variables_in_vector): """ @@ -530,7 +530,7 @@ def scale_array_code(self, diff_vars, method_options): ) def find_undefined_variables(self, statements): - """ + r""" Find identifiers that are not in ``self.variables`` dictionary. Brian does not save the ``_lio_`` variables it uses anywhere. This is @@ -737,9 +737,9 @@ def translate_vector_code(self, code_lines, to_replace): # special substitute because of limitations of regex word boundaries with # variable[_idx] for from_sub, to_sub in list(to_replace.items()): - m = re.search("\[(\w+)\];?$", from_sub) + m = re.search(r"\[(\w+)\];?$", from_sub) if m: - code = re.sub(re.sub("\[", "\[", from_sub), to_sub, code) + code = re.sub(re.sub(r"\[", r"\[", from_sub), to_sub, code) if "_gsl" in code: raise AssertionError( @@ -775,7 +775,7 @@ def translate_scalar_code( """ code = [] for line in code_lines: - m = re.search("(\w+ = .*)", line) + m = re.search(r"(\w+ = .*)", line) try: new_line = m.group(1) var, op, expr, comment = parse_statement(new_line) @@ -935,14 +935,12 @@ def translate( f"{len(vs)} lines of abstract code, first line is: '{vs[0]}'\n" ) logger.warn( - ( - "Came across an abstract code block that may not be " - "well-defined: the outcome may depend on the " - "order of execution. You can ignore this warning if " - "you are sure that the order of operations does not " - "matter. " - + error_msg - ) + "Came across an abstract code block that may not be " + "well-defined: the outcome may depend on the " + "order of execution. You can ignore this warning if " + "you are sure that the order of operations does not " + "matter. " + + error_msg ) # save function names because self.generator.translate_statement_sequence @@ -960,7 +958,7 @@ def translate( # first check if any indexing other than '_idx' is used (currently not supported) for code_list in list(scalar_code.values()) + list(vector_code.values()): for code in code_list: - m = re.search("\[(\w+)\]", code) + m = re.search(r"\[(\w+)\]", code) if m is not None: if m.group(1) != "0" and m.group(1) != "_idx": from brian2.stateupdaters.base import ( diff --git a/brian2/codegen/generators/base.py b/brian2/codegen/generators/base.py index 0dff37493..769b807fc 100644 --- a/brian2/codegen/generators/base.py +++ b/brian2/codegen/generators/base.py @@ -19,7 +19,7 @@ logger = get_logger(__name__) -class CodeGenerator(object): +class CodeGenerator: """ Base class for all languages. @@ -187,30 +187,30 @@ def array_read_write(self, statements): f"referring to vector variable '{name}'" ) write.add(stmt.var) - read = set( + read = { varname for varname, var in list(variables.items()) if isinstance(var, ArrayVariable) and varname in read - ) - write = set( + } + write = { varname for varname, var in list(variables.items()) if isinstance(var, ArrayVariable) and varname in write - ) + } # Gather the indices stored as arrays (ignore _idx which is special) indices = set() - indices |= set( + indices |= { variable_indices[varname] for varname in read if not variable_indices[varname] in ("_idx", "0") and isinstance(variables[variable_indices[varname]], ArrayVariable) - ) - indices |= set( + } + indices |= { variable_indices[varname] for varname in write if not variable_indices[varname] in ("_idx", "0") and isinstance(variables[variable_indices[varname]], ArrayVariable) - ) + } # don't list arrays that are read explicitly and used as indices twice read -= indices return read, write, indices @@ -236,12 +236,12 @@ def arrays_helper(self, statements): """ read, write, indices = self.array_read_write(statements) conditional_write_vars = self.get_conditional_write_vars() - read |= set(var for var in write if var in conditional_write_vars) - read |= set( + read |= {var for var in write if var in conditional_write_vars} + read |= { conditional_write_vars[var] for var in write if var in conditional_write_vars - ) + } return read, write, indices, conditional_write_vars def has_repeated_indices(self, statements): @@ -255,7 +255,7 @@ def has_repeated_indices(self, statements): # Check whether we potentially deal with repeated indices (which will # be the case most importantly when we write to pre- or post-synaptic # variables in synaptic code) - used_indices = set(variable_indices[var] for var in write) + used_indices = {variable_indices[var] for var in write} all_unique = all( variables[index].unique for index in used_indices @@ -293,14 +293,12 @@ def translate(self, code, dtype): f"{len(vs)} lines of abstract code, first line is: '{vs[0]}'\n" ) logger.warn( - ( - "Came across an abstract code block that may not be " - "well-defined: the outcome may depend on the " - "order of execution. You can ignore this warning if " - "you are sure that the order of operations does not " - "matter. " - + error_msg - ) + "Came across an abstract code block that may not be " + "well-defined: the outcome may depend on the " + "order of execution. You can ignore this warning if " + "you are sure that the order of operations does not " + "matter. " + + error_msg ) translated = self.translate_statement_sequence( diff --git a/brian2/codegen/generators/cpp_generator.py b/brian2/codegen/generators/cpp_generator.py index 3c4720f7f..918a95aaf 100644 --- a/brian2/codegen/generators/cpp_generator.py +++ b/brian2/codegen/generators/cpp_generator.py @@ -160,7 +160,7 @@ class CPPCodeGenerator(CodeGenerator): universal_support_code = _universal_support_code def __init__(self, *args, **kwds): - super(CPPCodeGenerator, self).__init__(*args, **kwds) + super().__init__(*args, **kwds) self.c_data_type = c_data_type @property diff --git a/brian2/codegen/generators/cython_generator.py b/brian2/codegen/generators/cython_generator.py index 059693f44..3b4455c8d 100644 --- a/brian2/codegen/generators/cython_generator.py +++ b/brian2/codegen/generators/cython_generator.py @@ -32,8 +32,8 @@ ] # fmt: on -cpp_dtype = dict((canonical, cpp) for canonical, cpp, np in data_type_conversion_table) -numpy_dtype = dict((canonical, np) for canonical, cpp, np in data_type_conversion_table) +cpp_dtype = {canonical: cpp for canonical, cpp, np in data_type_conversion_table} +numpy_dtype = {canonical: np for canonical, cpp, np in data_type_conversion_table} def get_cpp_dtype(obj): @@ -57,7 +57,7 @@ def render_BinOp(self, node): right = self.render_node(node.right) return f"((({left})%({right}))+({right}))%({right})" else: - return super(CythonNodeRenderer, self).render_BinOp(node) + return super().render_BinOp(node) class CythonCodeGenerator(CodeGenerator): @@ -69,7 +69,7 @@ class CythonCodeGenerator(CodeGenerator): def __init__(self, *args, **kwds): self.temporary_vars = set() - super(CythonCodeGenerator, self).__init__(*args, **kwds) + super().__init__(*args, **kwds) def translate_expression(self, expr): expr = word_substitute(expr, self.func_name_replacements) diff --git a/brian2/codegen/optimisation.py b/brian2/codegen/optimisation.py index 364a753a0..b10cb2037 100644 --- a/brian2/codegen/optimisation.py +++ b/brian2/codegen/optimisation.py @@ -23,8 +23,8 @@ from .statements import Statement # Default namespace has all the standard functions and constants in it -defaults_ns = dict((k, v.pyfunc) for k, v in DEFAULT_FUNCTIONS.items()) -defaults_ns.update(dict((k, v.value) for k, v in DEFAULT_CONSTANTS.items())) +defaults_ns = {k: v.pyfunc for k, v in DEFAULT_FUNCTIONS.items()} +defaults_ns.update({k: v.value for k, v in DEFAULT_CONSTANTS.items()}) __all__ = ["optimise_statements", "ArithmeticSimplifier", "Simplifier"] @@ -91,11 +91,11 @@ def optimise_statements(scalar_statements, vector_statements, variables, blockna new_vector_statements : sequence of Statement Simplified/optimised versions of statements """ - boolvars = dict( - (k, v) + boolvars = { + k: v for k, v in variables.items() if hasattr(v, "dtype") and brian_dtype_from_dtype(v.dtype) == "boolean" - ) + } # We use the Simplifier class by rendering each expression, which generates new scalar statements # stored in the Simplifier object, and these are then added to the scalar statements. simplifier = Simplifier(variables, scalar_statements, extra_lio_prefix=blockname) @@ -125,9 +125,7 @@ def optimise_statements(scalar_statements, vector_statements, variables, blockna for bool_vals in itertools.product(*bool_space): # substitute those values into the expr and simplify (including potentially pulling out new # loop invariants) - subs = dict( - (var, str(val)) for var, val in zip(used_boolvars, bool_vals) - ) + subs = {var: str(val) for var, val in zip(used_boolvars, bool_vals)} curexpr = word_substitute(new_expr, subs) curexpr = simplifier.render_expr(curexpr) key = tuple((var, val) for var, val in zip(used_boolvars, bool_vals)) @@ -217,7 +215,7 @@ def render_node(self, node): Assumes that the node has already been fully processed by BrianASTRenderer """ if not hasattr(node, "simplified"): - node = super(ArithmeticSimplifier, self).render_node(node) + node = super().render_node(node) node.simplified = True # can't evaluate vector expressions, so abandon in this case if not node.scalar: @@ -270,7 +268,7 @@ def render_BinOp(self, node): return self.render_node(newnode) left = node.left = self.render_node(node.left) right = node.right = self.render_node(node.right) - node = super(ArithmeticSimplifier, self).render_BinOp(node) + node = super().render_BinOp(node) op = node.op # Handle multiplication by 0 or 1 if op.__class__.__name__ == "Mult": @@ -437,7 +435,7 @@ def render_node(self, node): newnode.stateless = node.stateless return newnode # otherwise, render node as usual - return super(Simplifier, self).render_node(node) + return super().render_node(node) def reduced_node(terms, op): @@ -494,8 +492,8 @@ def cancel_identical_terms(primary, inverted): Inverted nodes after cancellation """ nr = NodeRenderer() - expressions = dict((node, nr.render_node(node)) for node in primary) - expressions.update(dict((node, nr.render_node(node)) for node in inverted)) + expressions = {node: nr.render_node(node) for node in primary} + expressions.update({node: nr.render_node(node) for node in inverted}) new_primary = [] inverted_expressions = [expressions[term] for term in inverted] for term in primary: diff --git a/brian2/codegen/runtime/GSLcython_rt/GSLcython_rt.py b/brian2/codegen/runtime/GSLcython_rt/GSLcython_rt.py index 1b7f8a848..219daee3c 100644 --- a/brian2/codegen/runtime/GSLcython_rt/GSLcython_rt.py +++ b/brian2/codegen/runtime/GSLcython_rt/GSLcython_rt.py @@ -48,7 +48,7 @@ def compile(self): if prefs.GSL.directory is not None: self.include_dirs += [prefs.GSL.directory] try: - super(GSLCythonCodeObject, self).compile() + super().compile() except CompileError as err: raise GSLCompileError( "\nCompilation of files generated for integration with GSL has failed." diff --git a/brian2/codegen/runtime/cython_rt/cython_rt.py b/brian2/codegen/runtime/cython_rt/cython_rt.py index 30f15ce67..145235ee5 100644 --- a/brian2/codegen/runtime/cython_rt/cython_rt.py +++ b/brian2/codegen/runtime/cython_rt/cython_rt.py @@ -105,7 +105,7 @@ def __init__( ], "Cython", ) - super(CythonCodeObject, self).__init__( + super().__init__( owner, code, variables, diff --git a/brian2/codegen/runtime/cython_rt/extension_manager.py b/brian2/codegen/runtime/cython_rt/extension_manager.py index 630295162..59e04317b 100644 --- a/brian2/codegen/runtime/cython_rt/extension_manager.py +++ b/brian2/codegen/runtime/cython_rt/extension_manager.py @@ -61,7 +61,7 @@ def get_cython_extensions(): } -class CythonExtensionManager(object): +class CythonExtensionManager: def __init__(self): self._code_cache = {} @@ -97,7 +97,7 @@ def create_extension( os.makedirs(lib_dir) except OSError: if not os.path.exists(lib_dir): - raise IOError( + raise OSError( f"Couldn't create Cython cache directory '{lib_dir}', try setting" " the cache directly with prefs.codegen.runtime.cython.cache_dir." ) @@ -325,7 +325,7 @@ def _load_module( ) for fname in glob.glob(temp_dir): os.remove(fname) - except (OSError, IOError) as ex: + except OSError as ex: logger.debug( "Deleting Cython source files failed with error:" f" {str(ex)}" diff --git a/brian2/codegen/statements.py b/brian2/codegen/statements.py index 47dbd0558..3c4abf612 100644 --- a/brian2/codegen/statements.py +++ b/brian2/codegen/statements.py @@ -3,7 +3,7 @@ """ -class Statement(object): +class Statement: """ A single line mathematical statement. diff --git a/brian2/codegen/templates.py b/brian2/codegen/templates.py index 0c86377ff..32f562c31 100644 --- a/brian2/codegen/templates.py +++ b/brian2/codegen/templates.py @@ -55,7 +55,7 @@ def variables_to_array_names(variables, access_data=True): return names -class LazyTemplateLoader(object): +class LazyTemplateLoader: """ Helper object to load templates only when they are needed. """ @@ -85,7 +85,7 @@ def get_template(self, name): return self._templates[name] -class Templater(object): +class Templater: """ Class to load and return all the templates a `CodeObject` defines. @@ -171,7 +171,7 @@ def derive( ) -class CodeObjectTemplate(object): +class CodeObjectTemplate: """ Single template object returned by `Templater` and used for final code generation @@ -187,11 +187,11 @@ def __init__(self, template, template_source): self.template = template self.template_source = template_source #: The set of variables in this template - self.variables = set([]) + self.variables = set() #: The indices over which the template iterates completely - self.iterate_all = set([]) + self.iterate_all = set() #: Read-only variables that are changed by this template - self.writes_read_only = set([]) + self.writes_read_only = set() # This is the bit inside {} for USES_VARIABLES { list of words } specifier_blocks = re.findall( r"\bUSES_VARIABLES\b\s*\{(.*?)\}", template_source, re.M | re.S diff --git a/brian2/codegen/translation.py b/brian2/codegen/translation.py index 35abb8cf8..20bfa6620 100644 --- a/brian2/codegen/translation.py +++ b/brian2/codegen/translation.py @@ -37,7 +37,7 @@ __all__ = ["analyse_identifiers", "get_identifiers_recursively"] -class LineInfo(object): +class LineInfo: """ A helper class, just used to store attributes. """ @@ -86,19 +86,19 @@ def analyse_identifiers(code, variables, recursive=False): external namespace. """ if isinstance(variables, Mapping): - known = set( + known = { k for k, v in variables.items() if not isinstance(k, AuxiliaryVariable) - ) + } else: known = set(variables) - variables = dict((k, Variable(name=k, dtype=np.float64)) for k in known) + variables = {k: Variable(name=k, dtype=np.float64) for k in known} known |= STANDARD_IDENTIFIERS scalar_stmts, vector_stmts = make_statements( code, variables, np.float64, optimise=False ) stmts = scalar_stmts + vector_stmts - defined = set(stmt.var for stmt in stmts if stmt.op == ":=") + defined = {stmt.var for stmt in stmts if stmt.op == ":="} if len(stmts) == 0: allids = set() elif recursive: @@ -230,9 +230,7 @@ def make_statements(code, variables, dtype, optimise=True, blockname=""): # Do a copy so we can add stuff without altering the original dict variables = dict(variables) # we will do inference to work out which lines are := and which are = - defined = set( - k for k, v in variables.items() if not isinstance(v, AuxiliaryVariable) - ) + defined = {k for k, v in variables.items() if not isinstance(v, AuxiliaryVariable)} for line in lines: statement = None # parse statement into "var op expr" @@ -338,9 +336,9 @@ def make_statements(code, variables, dtype, optimise=True, blockname=""): line.will_write = will_write.copy() will_write.add(line.write) - subexpressions = dict( - (name, val) for name, val in variables.items() if isinstance(val, Subexpression) - ) + subexpressions = { + name: val for name, val in variables.items() if isinstance(val, Subexpression) + } # Check that no scalar subexpression refers to a vectorised function # (e.g. rand()) -- otherwise it would be differently interpreted depending # on whether it is used in a scalar or a vector context (i.e., even though @@ -365,16 +363,16 @@ def make_statements(code, variables, dtype, optimise=True, blockname=""): # sort subexpressions into an order so that subexpressions that don't depend # on other subexpressions are first - subexpr_deps = dict( - (name, [dep for dep in subexpr.identifiers if dep in subexpressions]) + subexpr_deps = { + name: [dep for dep in subexpr.identifiers if dep in subexpressions] for name, subexpr in subexpressions.items() - ) + } sorted_subexpr_vars = topsort(subexpr_deps) statements = [] # none are yet defined (or declared) - subdefined = dict((name, None) for name in subexpressions) + subdefined = {name: None for name in subexpressions} for line in lines: # update/define all subexpressions needed by this statement for var in sorted_subexpr_vars: diff --git a/brian2/core/clocks.py b/brian2/core/clocks.py index b2dd2673f..2749c4796 100644 --- a/brian2/core/clocks.py +++ b/brian2/core/clocks.py @@ -212,7 +212,7 @@ def set_interval(self, start, end): epsilon_dt = 1e-4 -class DefaultClockProxy(object): +class DefaultClockProxy: """ Method proxy to access the defaultclock of the currently active device """ diff --git a/brian2/core/functions.py b/brian2/core/functions.py index c68ce9821..30c897a34 100644 --- a/brian2/core/functions.py +++ b/brian2/core/functions.py @@ -78,7 +78,7 @@ def annotate_function_with_types(f): return annotate_function_with_types -class Function(object): +class Function: """ An abstract specification of a function that can be used as part of model equations, etc. @@ -271,7 +271,7 @@ def __call__(self, *args): return self.pyfunc(*args) -class FunctionImplementation(object): +class FunctionImplementation: """ A simple container object for function implementations. @@ -698,7 +698,7 @@ class SymbolicConstant(Constant): """ def __init__(self, name, sympy_obj, value): - super(SymbolicConstant, self).__init__(name, value=value) + super().__init__(name, value=value) self.sympy_obj = sympy_obj diff --git a/brian2/core/magic.py b/brian2/core/magic.py index 7056ec40a..ff73a9a50 100644 --- a/brian2/core/magic.py +++ b/brian2/core/magic.py @@ -50,7 +50,7 @@ def _get_contained_objects(obj): def get_objects_in_namespace(level): - """ + r""" Get all the objects in the current namespace that derive from `BrianObject`. Used to determine the objects for the `MagicNetwork`. @@ -148,7 +148,7 @@ def __init__(self): raise ValueError("There can be only one MagicNetwork.") MagicNetwork._already_created = True - super(MagicNetwork, self).__init__(name="magicnetwork*") + super().__init__(name="magicnetwork*") self._previous_refs = set() @@ -231,7 +231,7 @@ def check_dependencies(self): break def after_run(self): - super(MagicNetwork, self).after_run() + super().after_run() self.objects.clear() gc.collect() # Make sure that all unused objects are cleared @@ -260,7 +260,7 @@ def store(self, name="default", filename=None, level=0): See `Network.store`. """ self._update_magic_objects(level=level + 1) - super(MagicNetwork, self).store(name=name, filename=filename) + super().store(name=name, filename=filename) self.objects.clear() def restore( @@ -270,7 +270,7 @@ def restore( See `Network.restore`. """ self._update_magic_objects(level=level + 1) - super(MagicNetwork, self).restore( + super().restore( name=name, filename=filename, restore_random_state=restore_random_state ) self.objects.clear() @@ -280,9 +280,7 @@ def get_states(self, units=True, format="dict", subexpressions=False, level=0): See `Network.get_states`. """ self._update_magic_objects(level=level + 1) - states = super(MagicNetwork, self).get_states( - units, format, subexpressions, level=level + 1 - ) + states = super().get_states(units, format, subexpressions, level=level + 1) self.objects.clear() return states @@ -291,7 +289,7 @@ def set_states(self, values, units=True, format="dict", level=0): See `Network.set_states`. """ self._update_magic_objects(level=level + 1) - super(MagicNetwork, self).set_states(values, units, format, level=level + 1) + super().set_states(values, units, format, level=level + 1) self.objects.clear() def __str__(self): @@ -305,7 +303,7 @@ def __str__(self): def collect(level=0): - """ + r""" Return the list of `BrianObject`\ s that will be simulated if `run` is called. diff --git a/brian2/core/names.py b/brian2/core/names.py index 73d8d7b98..e81e3e022 100644 --- a/brian2/core/names.py +++ b/brian2/core/names.py @@ -37,7 +37,7 @@ def find_name(name, names=None): if names is None: instances = set(Nameable.__instances__()) - allnames = set(obj().name for obj in instances if hasattr(obj(), "name")) + allnames = {obj().name for obj in instances if hasattr(obj(), "name")} else: allnames = names diff --git a/brian2/core/namespace.py b/brian2/core/namespace.py index 0aefbdbc3..fb4416cfc 100644 --- a/brian2/core/namespace.py +++ b/brian2/core/namespace.py @@ -68,11 +68,11 @@ def _get_default_unit_namespace(): # Include all "simple" units from additional_units, i.e. units like mliter # but not "newton * metre" namespace.update( - dict( - (name, unit) + { + name: unit for name, unit in additional_unit_register.units.items() if not unit.iscompound - ) + } ) return namespace diff --git a/brian2/core/network.py b/brian2/core/network.py index f96354d23..466ab0ec0 100644 --- a/brian2/core/network.py +++ b/brian2/core/network.py @@ -92,7 +92,7 @@ def _format_time(time_in_s): return text -class TextReport(object): +class TextReport: """ Helper object to report simulation progress in `Network.run`. @@ -152,7 +152,7 @@ def _format_table(header, values, cell_formats): return "\n".join([formatted_header, line] + content) -class SchedulingSummary(object): +class SchedulingSummary: """ Object representing the schedule that is used to simulate the objects in a network. Objects of this type are returned by `scheduling_summary`, they @@ -166,10 +166,10 @@ class SchedulingSummary(object): def __init__(self, objects): # Map each dt to a rank (i.e. smallest dt=0, second smallest=1, etc.) - self.dts = dict( - (dt, rank) + self.dts = { + dt: rank for rank, dt in enumerate(sorted({float(obj.clock.dt) for obj in objects})) - ) + } ScheduleEntry = namedtuple( "ScheduleEntry", field_names=[ @@ -224,7 +224,7 @@ def __repr__(self): str(entry.dt), "step" if self.steps[float(entry.dt)] == 1 - else "{} steps".format(self.steps[float(entry.dt)]), + else f"{self.steps[float(entry.dt)]} steps", ), entry.when, entry.order, @@ -247,8 +247,8 @@ def _repr_html_(self): {} """.format( - "{} ({})".format(entry.name, entry.type), - "{} ({})".format(entry.owner_name, entry.owner_type) + f"{entry.name} ({entry.type})", + f"{entry.owner_name} ({entry.owner_type})" if entry.owner_name is not None else "–", "{} (every {})".format( @@ -887,7 +887,7 @@ def sorted_objects(self): # before_... names are assigned positions 0, 3, 6, ... # after_... names are assigned positions 2, 5, 8, ... all_objects = _get_all_objects(self.objects) - when_to_int = dict((when, 1 + i * 3) for i, when in enumerate(self.schedule)) + when_to_int = {when: 1 + i * 3 for i, when in enumerate(self.schedule)} when_to_int.update( (f"before_{when}", i * 3) for i, when in enumerate(self.schedule) ) @@ -1285,14 +1285,14 @@ def stop(self): self._stopped = True def __repr__(self): - objects = ", ".join((obj.__repr__() for obj in _get_all_objects(self.objects))) + objects = ", ".join(obj.__repr__() for obj in _get_all_objects(self.objects)) return ( f"<{self.__class__.__name__} at time t={self.t!s}, containing " f"objects: {objects}>" ) -class ProfilingSummary(object): +class ProfilingSummary: """ Class to nicely display the results of profiling. Objects of this class are returned by `profiling_summary`. diff --git a/brian2/core/operations.py b/brian2/core/operations.py index 1500dd175..83a4f880b 100644 --- a/brian2/core/operations.py +++ b/brian2/core/operations.py @@ -203,7 +203,7 @@ def f(): # Here, the 'function factory' is the locally defined class # do_network_operation, which is a callable object that takes a function # as argument and returns a NetworkOperation object. - class do_network_operation(object): + class do_network_operation: def __init__(self, **kwds): self.kwds = kwds diff --git a/brian2/core/preferences.py b/brian2/core/preferences.py index 95b897a74..ccaaccffe 100644 --- a/brian2/core/preferences.py +++ b/brian2/core/preferences.py @@ -80,7 +80,7 @@ class PreferenceError(Exception): pass -class DefaultValidator(object): +class DefaultValidator: """ Default preference validator @@ -102,7 +102,7 @@ def __call__(self, value): return True -class BrianPreference(object): +class BrianPreference: """ Used for defining a Brian preference. @@ -442,7 +442,7 @@ def read_preference_file(self, file): """ if isinstance(file, str): filename = file - file = open(file, "r") + file = open(file) else: filename = repr(file) lines = file.readlines() @@ -494,7 +494,7 @@ def load_preferences(self): for file in files: try: self.read_preference_file(file) - except IOError: + except OSError: pass # The "default_preferences" file is no longer used, but we raise a @@ -720,7 +720,7 @@ def __repr__(self): # Simple class to give a useful error message when using `brian_prefs` -class ErrorRaiser(object): +class ErrorRaiser: def __getattr__(self, item): raise AttributeError( "The global preferences object has been renamed " diff --git a/brian2/core/spikesource.py b/brian2/core/spikesource.py index 0867c1b97..63f7bd24e 100644 --- a/brian2/core/spikesource.py +++ b/brian2/core/spikesource.py @@ -1,7 +1,7 @@ __all__ = ["SpikeSource"] -class SpikeSource(object): +class SpikeSource: """ A source of spikes. diff --git a/brian2/core/tracking.py b/brian2/core/tracking.py index ba3773248..966f7ac1d 100644 --- a/brian2/core/tracking.py +++ b/brian2/core/tracking.py @@ -34,7 +34,7 @@ def remove(self, value): pass -class InstanceFollower(object): +class InstanceFollower: """ Keep track of all instances of classes derived from `Trackable` @@ -58,7 +58,7 @@ def get(self, cls): return self.instance_sets[cls] -class Trackable(object): +class Trackable: """ Classes derived from this will have their instances tracked. diff --git a/brian2/core/variables.py b/brian2/core/variables.py index 8ce33be56..e7871b659 100644 --- a/brian2/core/variables.py +++ b/brian2/core/variables.py @@ -89,17 +89,15 @@ def get_dtype_str(val): def variables_by_owner(variables, owner): owner_name = getattr(owner, "name", None) - return dict( - [ - (varname, var) - for varname, var in variables.items() - if getattr(var.owner, "name", None) is owner_name - ] - ) + return { + varname: var + for varname, var in variables.items() + if getattr(var.owner, "name", None) is owner_name + } class Variable(CacheKey): - """ + r""" An object providing information about model variables (including implicit variables such as ``t`` or ``xi``). This class should never be instantiated outside of testing code, use one of its subclasses instead. @@ -360,7 +358,7 @@ def __init__(self, name, value, dimensions=DIMENSIONLESS, owner=None): #: The constant's value self.value = value - super(Constant, self).__init__( + super().__init__( dimensions=dimensions, name=name, owner=owner, @@ -397,9 +395,7 @@ class AuxiliaryVariable(Variable): """ def __init__(self, name, dimensions=DIMENSIONLESS, dtype=None, scalar=False): - super(AuxiliaryVariable, self).__init__( - dimensions=dimensions, name=name, dtype=dtype, scalar=scalar - ) + super().__init__(dimensions=dimensions, name=name, dtype=dtype, scalar=scalar) def get_value(self): raise TypeError( @@ -466,7 +462,7 @@ def __init__( dynamic=False, unique=False, ): - super(ArrayVariable, self).__init__( + super().__init__( dimensions=dimensions, name=name, owner=owner, @@ -605,7 +601,7 @@ def __init__( #: Whether this array will be only resized along the first dimension self.resize_along_first = resize_along_first - super(DynamicArrayVariable, self).__init__( + super().__init__( dimensions=dimensions, owner=owner, name=name, @@ -683,7 +679,7 @@ def __init__( dtype=None, scalar=False, ): - super(Subexpression, self).__init__( + super().__init__( dimensions=dimensions, owner=owner, name=name, @@ -731,7 +727,7 @@ def __repr__(self): # ------------------------------------------------------------------------------ # Classes providing views on variables and storing variables information # ------------------------------------------------------------------------------ -class LinkedVariable(object): +class LinkedVariable: """ A simple helper class to make linking variables explicit. Users should use `linked_var` instead. @@ -802,7 +798,7 @@ def linked_var(group_or_variable, name=None, index=None): ) -class VariableView(object): +class VariableView: """ A view on a variable that allows to treat it as an numpy array while allowing special indexing (e.g. with strings) in the context of a `Group`. diff --git a/brian2/devices/cpp_standalone/codeobject.py b/brian2/devices/cpp_standalone/codeobject.py index 4ac03f91f..9778c538d 100644 --- a/brian2/devices/cpp_standalone/codeobject.py +++ b/brian2/devices/cpp_standalone/codeobject.py @@ -105,7 +105,7 @@ class CPPStandaloneCodeObject(CodeObject): generator_class = CPPCodeGenerator def __init__(self, *args, **kwds): - super(CPPStandaloneCodeObject, self).__init__(*args, **kwds) + super().__init__(*args, **kwds) #: Store whether this code object defines before/after blocks self.before_after_blocks = [] diff --git a/brian2/devices/cpp_standalone/device.py b/brian2/devices/cpp_standalone/device.py index 6c645dc8f..15cd097de 100644 --- a/brian2/devices/cpp_standalone/device.py +++ b/brian2/devices/cpp_standalone/device.py @@ -102,7 +102,7 @@ ) -class CPPWriter(object): +class CPPWriter: def __init__(self, project_dir): self.project_dir = project_dir self.source_files = set() @@ -120,7 +120,7 @@ def write(self, filename, contents): return fullfilename = os.path.join(self.project_dir, filename) if os.path.exists(fullfilename): - with open(fullfilename, "r") as f: + with open(fullfilename) as f: if f.read() == contents: return with open(fullfilename, "w") as f: @@ -128,7 +128,7 @@ def write(self, filename, contents): def invert_dict(x): - return dict((v, k) for k, v in x.items()) + return {v: k for k, v in x.items()} class CPPStandaloneDevice(Device): @@ -137,7 +137,7 @@ class CPPStandaloneDevice(Device): """ def __init__(self): - super(CPPStandaloneDevice, self).__init__() + super().__init__() #: Dictionary mapping `ArrayVariable` objects to their globally #: unique name self.arrays = {} @@ -205,7 +205,7 @@ def __init__(self): #: Dictionary storing compile and binary execution times self.timers = {"run_binary": None, "compile": {"clean": None, "make": None}} - self.clocks = set([]) + self.clocks = set() self.extra_compile_args = [] self.define_macros = [] @@ -234,7 +234,7 @@ def reinit(self): build_on_run = self.build_on_run build_options = self.build_options self.__init__() - super(CPPStandaloneDevice, self).reinit() + super().reinit() self.build_on_run = build_on_run self.build_options = build_options @@ -676,7 +676,7 @@ def code_object( ) do_not_invalidate.add(var) - codeobj = super(CPPStandaloneDevice, self).code_object( + codeobj = super().code_object( owner, name, abstract_code, @@ -993,7 +993,7 @@ def generate_makefile( source_list = " ".join(source_bases) source_list_fname = os.path.join(self.project_dir, "sourcefiles.txt") if os.path.exists(source_list_fname): - with open(source_list_fname, "r") as f: + with open(source_list_fname) as f: if f.read() == source_list: return with open(source_list_fname, "w") as f: @@ -1142,7 +1142,7 @@ def compile_source(self, directory, compiler, debug, clean): if x != 0: if os.path.exists("winmake.log"): - with open("winmake.log", "r") as f: + with open("winmake.log") as f: print(f.read()) error_message = ( "Project compilation failed (error code: %u)." % x @@ -1225,7 +1225,7 @@ def run(self, directory, with_output, run_args): stdout.close() if x: if os.path.exists("results/stdout.txt"): - with open("results/stdout.txt", "r") as f: + with open("results/stdout.txt") as f: print(f.read()) raise RuntimeError( "Project run failed (project directory:" @@ -1233,7 +1233,7 @@ def run(self, directory, with_output, run_args): ) self.has_been_run = True if os.path.isfile("results/last_run_info.txt"): - with open("results/last_run_info.txt", "r") as f: + with open("results/last_run_info.txt") as f: last_run_info = f.read() run_time, completed_fraction = last_run_info.split() self._last_run_time = float(run_time) @@ -1563,7 +1563,7 @@ def delete(self, code=True, data=True, directory=True, force=False): full_fname = os.path.join(self.project_dir, fname) try: os.remove(full_fname) - except (OSError, IOError) as ex: + except OSError as ex: logger.debug(f'File "{full_fname}" could not be deleted: {str(ex)}') # Delete directories @@ -1845,7 +1845,7 @@ def run_function(self, name, include_in_parent=True): return RunFunctionContext(name, include_in_parent) -class RunFunctionContext(object): +class RunFunctionContext: def __init__(self, name, include_in_parent): self.name = name self.include_in_parent = include_in_parent diff --git a/brian2/devices/device.py b/brian2/devices/device.py index 9b8a24d2a..70703da4d 100644 --- a/brian2/devices/device.py +++ b/brian2/devices/device.py @@ -55,11 +55,9 @@ def auto_target(): """ global _auto_target if _auto_target is None: - target_dict = dict( - (target.class_name, target) - for target in codegen_targets - if target.class_name - ) + target_dict = { + target.class_name: target for target in codegen_targets if target.class_name + } using_fallback = False if "cython" in target_dict and target_dict["cython"].is_available(): _auto_target = target_dict["cython"] @@ -85,7 +83,7 @@ def auto_target(): return _auto_target -class Device(object): +class Device: """ Base Device object. """ @@ -473,7 +471,7 @@ class RuntimeDevice(Device): """ def __init__(self): - super(RuntimeDevice, self).__init__() + super().__init__() #: Mapping from `Variable` objects to numpy arrays (or `DynamicArray` #: objects). Arrays in this dictionary will disappear as soon as the #: last reference to the `Variable` object used as a key is gone @@ -581,7 +579,7 @@ def set_random_state(self, state): self.randn_buffer[:] = state["randn_buffer"] -class Dummy(object): +class Dummy: """ Dummy object """ @@ -605,7 +603,7 @@ def __setitem__(self, i, val): pass -class CurrentDeviceProxy(object): +class CurrentDeviceProxy: """ Method proxy for access to the currently active device """ diff --git a/brian2/equations/codestrings.py b/brian2/equations/codestrings.py index 0bbbae0bc..d76e67f31 100644 --- a/brian2/equations/codestrings.py +++ b/brian2/equations/codestrings.py @@ -106,7 +106,7 @@ def __init__(self, code=None, sympy_expression=None): # Just try to convert it to a sympy expression to get syntax errors # for incorrect expressions str_to_sympy(code) - super(Expression, self).__init__(code=code) + super().__init__(code=code) stochastic_variables = property( lambda self: { diff --git a/brian2/equations/equations.py b/brian2/equations/equations.py index 67f4651d2..88c66e174 100644 --- a/brian2/equations/equations.py +++ b/brian2/equations/equations.py @@ -318,9 +318,7 @@ def dimensions_and_type_from_string(unit_string): alternatives = sorted( [tuple(values) for values in base_units_for_dims.values()] ) - _base_units = dict( - [(v, DEFAULT_UNITS[v]) for values in alternatives for v in values] - ) + _base_units = {v: DEFAULT_UNITS[v] for values in alternatives for v in values} # Create a string that lists all allowed base units alternative_strings = [] for units in alternatives: @@ -521,7 +519,7 @@ def __init__( unit = property(lambda self: get_unit(self.dim), doc="The `Unit` of this equation.") identifiers = property( - lambda self: self.expr.identifiers if not self.expr is None else set([]), + lambda self: self.expr.identifiers if not self.expr is None else set(), doc="All identifiers in the RHS of this equation.", ) @@ -1021,7 +1019,7 @@ def _get_stochastic_type(self): ) dimensions = property( - lambda self: dict([(var, eq.dim) for var, eq in self._equations.items()]), + lambda self: {var: eq.dim for var, eq in self._equations.items()}, doc=( "Dictionary of all internal variables and their " "corresponding physical dimensions." @@ -1310,13 +1308,13 @@ def _latex(self, *args): else: flag_str = "" if eq.type == PARAMETER: - eq_latex = r"%s &&& \text{(unit: $%s$%s)}" % ( + eq_latex = r"{} &&& \text{{(unit: ${}${})}}".format( sympy.latex(lhs), sympy.latex(get_unit(eq.dim)), flag_str, ) else: - eq_latex = r"%s &= %s && \text{(unit of $%s$: $%s$%s)}" % ( + eq_latex = r"{} &= {} && \text{{(unit of ${}$: ${}${})}}".format( lhs, # already a string sympy.latex(rhs), sympy.latex(varname), diff --git a/brian2/groups/group.py b/brian2/groups/group.py index 59bc90a09..7616a9dd0 100644 --- a/brian2/groups/group.py +++ b/brian2/groups/group.py @@ -212,7 +212,7 @@ def _same_function(func1, func2): return func1 is func2 -class Indexing(object): +class Indexing: """ Object responsible for calculating flat index arrays from arbitrary group- specific indices. Stores strong references to the necessary variables so @@ -293,7 +293,7 @@ def __call__(self, item=slice(None), index_var=None): return index_array -class IndexWrapper(object): +class IndexWrapper: """ Convenience class to allow access to the indices via indexing syntax. This allows for example to get all indices for synapses originating from neuron @@ -1254,4 +1254,4 @@ def create_code_objects(self, run_namespace): def before_run(self, run_namespace): self.create_code_objects(run_namespace) - super(CodeRunner, self).before_run(run_namespace) + super().before_run(run_namespace) diff --git a/brian2/groups/neurongroup.py b/brian2/groups/neurongroup.py index 9aa46e8a4..cca738c59 100644 --- a/brian2/groups/neurongroup.py +++ b/brian2/groups/neurongroup.py @@ -991,7 +991,7 @@ def before_run(self, run_namespace=None): # Check that subexpressions that refer to stateful functions are labeled # as "constant over dt" check_subexpressions(self, self.equations, run_namespace) - super(NeuronGroup, self).before_run(run_namespace=run_namespace) + super().before_run(run_namespace=run_namespace) def _repr_html_(self): text = [rf"NeuronGroup '{self.name}' with {self._N} neurons.
"] diff --git a/brian2/importexport/importexport.py b/brian2/importexport/importexport.py index 32848173b..9537e6995 100644 --- a/brian2/importexport/importexport.py +++ b/brian2/importexport/importexport.py @@ -8,7 +8,7 @@ from abc import abstractmethod, abstractproperty -class ImportExport(object, metaclass=abc.ABCMeta): +class ImportExport(metaclass=abc.ABCMeta): """ Class for registering new import/export methods (via static methods). Also the base class that should be extended for such methods diff --git a/brian2/input/binomial.py b/brian2/input/binomial.py index 84e8a56c4..9129d9028 100644 --- a/brian2/input/binomial.py +++ b/brian2/input/binomial.py @@ -144,7 +144,7 @@ def _generate_cpp_code(n, p, use_normal, name): class BinomialFunction(Function, Nameable): - """ + r""" BinomialFunction(n, p, approximate=True, name='_binomial*') A function that generates samples from a binomial distribution. diff --git a/brian2/input/poissongroup.py b/brian2/input/poissongroup.py index 749bb04a3..8267f316e 100644 --- a/brian2/input/poissongroup.py +++ b/brian2/input/poissongroup.py @@ -140,7 +140,7 @@ def before_run(self, run_namespace=None): "argument, has to have units " "of Hz", ) - super(PoissonGroup, self).before_run(run_namespace) + super().before_run(run_namespace) @property def spikes(self): diff --git a/brian2/input/spikegeneratorgroup.py b/brian2/input/spikegeneratorgroup.py index a0e4de051..cf76668d0 100644 --- a/brian2/input/spikegeneratorgroup.py +++ b/brian2/input/spikegeneratorgroup.py @@ -191,7 +191,7 @@ def __init__( self.variables["period"].set_value(period) def _full_state(self): - state = super(SpikeGeneratorGroup, self)._full_state() + state = super()._full_state() # Store the internal information we use to decide whether to rebuild # the time bins state["_previous_dt"] = self._previous_dt @@ -202,7 +202,7 @@ def _restore_from_full_state(self, state): state = state.copy() # copy to avoid errors for multiple restores self._previous_dt = state.pop("_previous_dt") self._spikes_changed = state.pop("_spikes_changed") - super(SpikeGeneratorGroup, self)._restore_from_full_state(state) + super()._restore_from_full_state(state) def before_run(self, run_namespace): # Do some checks on the period vs. dt @@ -280,7 +280,7 @@ def before_run(self, run_namespace): self._previous_dt = dt self._spikes_changed = False - super(SpikeGeneratorGroup, self).before_run(run_namespace=run_namespace) + super().before_run(run_namespace=run_namespace) @check_units(indices=1, times=second, period=second) def set_spikes(self, indices, times, period=0 * second, sorted=False): diff --git a/brian2/memory/__init__.py b/brian2/memory/__init__.py index 8b1378917..e69de29bb 100644 --- a/brian2/memory/__init__.py +++ b/brian2/memory/__init__.py @@ -1 +0,0 @@ - diff --git a/brian2/memory/dynamicarray.py b/brian2/memory/dynamicarray.py index 3cc865a48..13a42568a 100644 --- a/brian2/memory/dynamicarray.py +++ b/brian2/memory/dynamicarray.py @@ -14,7 +14,7 @@ def getslices(shape, from_start=True): return tuple(slice(x, None) for x in shape) -class DynamicArray(object): +class DynamicArray: """ An N-dimensional dynamic array class diff --git a/brian2/monitors/spikemonitor.py b/brian2/monitors/spikemonitor.py index 9671ca360..1344d36b5 100644 --- a/brian2/monitors/spikemonitor.py +++ b/brian2/monitors/spikemonitor.py @@ -473,7 +473,7 @@ def __init__( #: The array of spike counts (length = size of target group) self.count = None del self.count # this is handled by the Variable mechanism - super(SpikeMonitor, self).__init__( + super().__init__( source, event="spike", variables=variables, @@ -552,7 +552,7 @@ def values(self, var): >>> print(counter2_values[1]) [100] """ - return super(SpikeMonitor, self).values(var) + return super().values(var) def all_values(self): """ @@ -588,7 +588,7 @@ def all_values(self): >>> print(all_values['t'][1]) [ 9.9] ms """ - return super(SpikeMonitor, self).all_values() + return super().all_values() def __repr__(self): classname = self.__class__.__name__ diff --git a/brian2/monitors/statemonitor.py b/brian2/monitors/statemonitor.py index e1bf2bc52..94f29f94c 100644 --- a/brian2/monitors/statemonitor.py +++ b/brian2/monitors/statemonitor.py @@ -14,7 +14,7 @@ logger = get_logger(__name__) -class StateMonitorView(object): +class StateMonitorView: def __init__(self, monitor, item): self.monitor = monitor self.item = item @@ -322,9 +322,9 @@ def __init__( scalar=var.scalar, ) - self.recorded_variables = dict( - [(varname, self.variables[varname]) for varname in variables] - ) + self.recorded_variables = { + varname: self.variables[varname] for varname in variables + } recorded_names = [varname for varname in variables] self.needed_variables = recorded_names diff --git a/brian2/numpy_.py b/brian2/numpy_.py index 61c0aa64b..870cff7d5 100644 --- a/brian2/numpy_.py +++ b/brian2/numpy_.py @@ -12,7 +12,7 @@ # These will not be imported with a wildcard import to not overwrite the # builtin names (mimicking the numpy behaviour) -from builtins import bool, int, float, complex, object, bytes, str +from builtins import bool, float, complex from numpy.core import round, abs, max, min diff --git a/brian2/parsing/bast.py b/brian2/parsing/bast.py index 0de1d745b..959382024 100644 --- a/brian2/parsing/bast.py +++ b/brian2/parsing/bast.py @@ -121,7 +121,7 @@ def brian_ast(expr, variables): return renderer.render_node(node) -class BrianASTRenderer(object): +class BrianASTRenderer: """ This class is modelled after `NodeRenderer` - see there for details. """ diff --git a/brian2/parsing/dependencies.py b/brian2/parsing/dependencies.py index 6498f7fea..4b0065864 100644 --- a/brian2/parsing/dependencies.py +++ b/brian2/parsing/dependencies.py @@ -7,10 +7,10 @@ def get_read_write_funcs(parsed_code): - allids = set([]) - read = set([]) - write = set([]) - funcs = set([]) + allids = set() + read = set() + write = set() + funcs = set() for node in ast.walk(parsed_code): if node.__class__ is ast.Name: allids.add(node.id) @@ -88,9 +88,9 @@ def abstract_code_dependencies(code, known_vars=None, known_funcs=None): abstract code block. """ if known_vars is None: - known_vars = set([]) + known_vars = set() if known_funcs is None: - known_funcs = set([]) + known_funcs = set() if not isinstance(known_vars, set): known_vars = set(known_vars) if not isinstance(known_funcs, set): @@ -106,8 +106,8 @@ def abstract_code_dependencies(code, known_vars=None, known_funcs=None): # Now check if there are any values that are unknown and read before # they are written to defined = known_vars.copy() - newly_defined = set([]) - undefined_read = set([]) + newly_defined = set() + undefined_read = set() for line in parsed_code.body: _, cur_read, cur_write, _ = get_read_write_funcs(line) undef = cur_read - defined diff --git a/brian2/parsing/functions.py b/brian2/parsing/functions.py index 5c581a0d4..29504b47c 100644 --- a/brian2/parsing/functions.py +++ b/brian2/parsing/functions.py @@ -13,7 +13,7 @@ ] -class AbstractCodeFunction(object): +class AbstractCodeFunction: """ The information defining an abstract code function diff --git a/brian2/parsing/rendering.py b/brian2/parsing/rendering.py index 69217475e..d289123f7 100644 --- a/brian2/parsing/rendering.py +++ b/brian2/parsing/rendering.py @@ -22,7 +22,7 @@ def get_node_value(node): return value -class NodeRenderer(object): +class NodeRenderer: expression_ops = { # BinOp "Add": "+", diff --git a/brian2/parsing/sympytools.py b/brian2/parsing/sympytools.py index 245438cb4..341e05681 100644 --- a/brian2/parsing/sympytools.py +++ b/brian2/parsing/sympytools.py @@ -145,20 +145,20 @@ def sympy_to_str(sympy_expr): A string representing the sympy expression. """ # replace the standard functions by our names if necessary - replacements = dict( - (f.sympy_func, sympy.Function(name)) + replacements = { + f.sympy_func: sympy.Function(name) for name, f in DEFAULT_FUNCTIONS.items() if f.sympy_func is not None and isinstance(f.sympy_func, sympy.FunctionClass) and str(f.sympy_func) != name - ) + } # replace constants with our names as well replacements.update( - dict( - (c.sympy_obj, sympy.Symbol(name)) + { + c.sympy_obj: sympy.Symbol(name) for name, c in DEFAULT_CONSTANTS.items() if str(c.sympy_obj) != name - ) + } ) # Replace the placeholder argument by an empty symbol diff --git a/brian2/spatialneuron/morphology.py b/brian2/spatialneuron/morphology.py index dd7191f1c..f1331625c 100644 --- a/brian2/spatialneuron/morphology.py +++ b/brian2/spatialneuron/morphology.py @@ -43,7 +43,7 @@ def _from_morphology(variable, i, j): return variable[i:j] -class MorphologyIndexWrapper(object): +class MorphologyIndexWrapper: """ A simpler version of `~brian2.groups.group.IndexWrapper`, not allowing for string indexing (`Morphology` is not a `Group`). It allows to use @@ -91,7 +91,7 @@ def _find_start_index(current, target_section, index=0): return index, False -class Topology(object): +class Topology: """ A representation of the topology of a `Morphology`. Has a useful string representation, inspired by NEURON's ``topology`` function. @@ -312,7 +312,7 @@ def _add_coordinates( return section -class Children(object): +class Children: """ Helper class to represent the children (sub trees) of a section. Can be used like a dictionary (mapping names to `Morphology` objects), but iterates @@ -418,7 +418,7 @@ def __repr__(self): return f"{s}>" -class Morphology(object, metaclass=abc.ABCMeta): +class Morphology(metaclass=abc.ABCMeta): """ Neuronal morphology (tree structure). @@ -1306,7 +1306,7 @@ def from_swc_file(filename, spherical_soma=True): # ignored swc_types.update({"1": "soma", "2": "axon", "3": "dend", "4": "apic"}) - with open(filename, "r") as f: + with open(filename) as f: points = [] for line_no, line in enumerate(f): line = line.strip() @@ -1362,7 +1362,7 @@ def from_file(filename, spherical_soma=True): ) -class SubMorphology(object): +class SubMorphology: """ A view on a subset of a section in a morphology. """ diff --git a/brian2/spatialneuron/spatialneuron.py b/brian2/spatialneuron/spatialneuron.py index a371cbb74..6ef06bdd4 100644 --- a/brian2/spatialneuron/spatialneuron.py +++ b/brian2/spatialneuron/spatialneuron.py @@ -38,7 +38,7 @@ logger = get_logger(__name__) -class FlatMorphology(object): +class FlatMorphology: """ Container object to store the flattened representation of a morphology. Note that all values are stored as numpy arrays without unit information @@ -708,7 +708,7 @@ def __init__(self, group, method, clock, order=0): self._ends = group.flat_morphology.ends def before_run(self, run_namespace): - super(SpatialStateUpdater, self).before_run(run_namespace) + super().before_run(run_namespace) # Raise a warning if the slow pure numpy version is used from brian2.codegen.runtime.numpy_rt.numpy_rt import NumpyCodeObject diff --git a/brian2/sphinxext/briandoc.py b/brian2/sphinxext/briandoc.py index 5c7ae4819..7bb0ad0cc 100644 --- a/brian2/sphinxext/briandoc.py +++ b/brian2/sphinxext/briandoc.py @@ -214,11 +214,11 @@ def setup(app, get_doc_object_=get_doc_object): from sphinx.domains.python import PythonDomain -class ManglingDomainBase(object): +class ManglingDomainBase: directive_mangling_map = {} def __init__(self, *a, **kw): - super(ManglingDomainBase, self).__init__(*a, **kw) + super().__init__(*a, **kw) self.wrap_mangling_directives() def wrap_mangling_directives(self): diff --git a/brian2/sphinxext/docscrape.py b/brian2/sphinxext/docscrape.py index c461c6a34..8fbe6f7fb 100644 --- a/brian2/sphinxext/docscrape.py +++ b/brian2/sphinxext/docscrape.py @@ -12,7 +12,7 @@ from sphinx.pycode import ModuleAnalyzer -class Reader(object): +class Reader: """A line-based string reader.""" def __init__(self, data): @@ -88,7 +88,7 @@ def is_empty(self): return not "".join(self._str).strip() -class NumpyDocString(object): +class NumpyDocString: def __init__(self, docstring, config={}): docstring = textwrap.dedent(docstring).split("\n") @@ -478,7 +478,7 @@ def __str__(self): print(f"Warning: invalid role {self._role}") out += f".. {roles.get(self._role, '')}:: {func_name}\n \n\n" - out += super(FunctionDoc, self).__str__(func_role=self._role) + out += super().__str__(func_role=self._role) return out diff --git a/brian2/sphinxext/docscrape_sphinx.py b/brian2/sphinxext/docscrape_sphinx.py index 9638d8eb9..684cb7012 100644 --- a/brian2/sphinxext/docscrape_sphinx.py +++ b/brian2/sphinxext/docscrape_sphinx.py @@ -145,7 +145,7 @@ def _str_section(self, name): def _str_see_also(self, func_role): out = [] if self["See Also"]: - see_also = super(SphinxDocString, self)._str_see_also(func_role) + see_also = super()._str_see_also(func_role) out = [".. seealso::", ""] out += self._str_indent(see_also[2:]) return out diff --git a/brian2/sphinxext/examplefinder.py b/brian2/sphinxext/examplefinder.py index 86b0c4fdc..a5c30d0a3 100644 --- a/brian2/sphinxext/examplefinder.py +++ b/brian2/sphinxext/examplefinder.py @@ -34,7 +34,7 @@ def get_map(environ_var, relrootdir, pattern, the_map, path_exclusions=[]): for fname in shortfnames ] for fname, shortfname, exname in zip(fnames, shortfnames, exnames): - with open(fname, "r") as f: + with open(fname) as f: ex = f.read() ids = get_identifiers(ex) for id in ids: diff --git a/brian2/sphinxext/generate_reference.py b/brian2/sphinxext/generate_reference.py index 933123f35..a814150f9 100644 --- a/brian2/sphinxext/generate_reference.py +++ b/brian2/sphinxext/generate_reference.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Automatically generate Brian's reference documentation. diff --git a/brian2/stateupdaters/GSL.py b/brian2/stateupdaters/GSL.py index ac03ad2f1..1eb91222c 100644 --- a/brian2/stateupdaters/GSL.py +++ b/brian2/stateupdaters/GSL.py @@ -30,7 +30,7 @@ } -class GSLContainer(object): +class GSLContainer: """ Class that contains information (equation- or integrator-related) required for later code generation diff --git a/brian2/stateupdaters/base.py b/brian2/stateupdaters/base.py index 142a46e04..0674d0542 100644 --- a/brian2/stateupdaters/base.py +++ b/brian2/stateupdaters/base.py @@ -83,7 +83,7 @@ def extract_method_options(method_options, default_options): return filled_options -class StateUpdateMethod(object, metaclass=ABCMeta): +class StateUpdateMethod(metaclass=ABCMeta): stateupdaters = dict() @abstractmethod diff --git a/brian2/stateupdaters/explicit.py b/brian2/stateupdaters/explicit.py index ef2b3fe41..f5be7a40e 100644 --- a/brian2/stateupdaters/explicit.py +++ b/brian2/stateupdaters/explicit.py @@ -281,9 +281,7 @@ def __init__(self, description, stochastic=None, custom_check=None): for symbol, unique_symbol in zip(symbols, unique_symbols): expression = expression.subs(symbol, unique_symbol) - self.symbols.update( - dict(((symbol.name, symbol) for symbol in unique_symbols)) - ) + self.symbols.update({symbol.name: symbol for symbol in unique_symbols}) if element.getName() == "statement": self.statements.append((f"__{element.identifier}", expression)) elif element.getName() == "output": @@ -357,22 +355,17 @@ def replace_func(self, x, t, expr, temp_vars, eq_symbols, stochastic_variable=No # e.g. '_k_v' for the state variable 'v' and the temporary # variable 'k'. if stochastic_variable is None: - temp_var_replacements = dict( - ( - (self.symbols[temp_var], _symbol(f"{temp_var}_{var}")) - for temp_var in temp_vars - ) - ) + temp_var_replacements = { + self.symbols[temp_var]: _symbol(f"{temp_var}_{var}") + for temp_var in temp_vars + } else: - temp_var_replacements = dict( - ( - ( - self.symbols[temp_var], - _symbol(f"{temp_var}_{var}_{stochastic_variable}"), - ) - for temp_var in temp_vars + temp_var_replacements = { + self.symbols[temp_var]: _symbol( + f"{temp_var}_{var}_{stochastic_variable}" ) - ) + for temp_var in temp_vars + } # In the expression given as 'x', replace 'x' by the variable # 'var' and all the temporary variables by their # variable-specific counterparts. @@ -415,10 +408,10 @@ def _non_stochastic_part( self.symbols["__x"], eq_symbols[var] ) # Replace intermediate variables - temp_var_replacements = dict( - (self.symbols[temp_var], _symbol(f"{temp_var}_{var}")) + temp_var_replacements = { + self.symbols[temp_var]: _symbol(f"{temp_var}_{var}") for temp_var in temp_vars - ) + } non_stochastic_result = non_stochastic_result.subs(temp_var_replacements) non_stochastic_results.append(non_stochastic_result) elif isinstance(stochastic_variable, str): @@ -434,13 +427,12 @@ def _non_stochastic_part( self.symbols["__x"], eq_symbols[var] ) # Replace intermediate variables - temp_var_replacements = dict( - ( - self.symbols[temp_var], - _symbol(f"{temp_var}_{var}_{stochastic_variable}"), + temp_var_replacements = { + self.symbols[temp_var]: _symbol( + f"{temp_var}_{var}_{stochastic_variable}" ) for temp_var in temp_vars - ) + } non_stochastic_result = non_stochastic_result.subs(temp_var_replacements) non_stochastic_results.append(non_stochastic_result) @@ -457,19 +449,13 @@ def _non_stochastic_part( self.symbols["__x"], eq_symbols[var] ) # Replace intermediate variables - temp_var_replacements = dict( - ( - self.symbols[temp_var], - reduce( - operator.add, - [ - _symbol(f"{temp_var}_{var}_{xi}") - for xi in stochastic_variable - ], - ), + temp_var_replacements = { + self.symbols[temp_var]: reduce( + operator.add, + [_symbol(f"{temp_var}_{var}_{xi}") for xi in stochastic_variable], ) for temp_var in temp_vars - ) + } non_stochastic_result = non_stochastic_result.subs(temp_var_replacements) non_stochastic_results.append(non_stochastic_result) @@ -506,13 +492,12 @@ def _stochastic_part( self.symbols["__dW"], stochastic_variable ) # Replace intermediate variables - temp_var_replacements = dict( - ( - self.symbols[temp_var], - _symbol(f"{temp_var}_{var}_{stochastic_variable}"), + temp_var_replacements = { + self.symbols[temp_var]: _symbol( + f"{temp_var}_{var}_{stochastic_variable}" ) for temp_var in temp_vars - ) + } stochastic_result = stochastic_result.subs(temp_var_replacements) stochastic_results.append(stochastic_result) @@ -534,10 +519,10 @@ def _stochastic_part( stochastic_result = stochastic_result.subs(self.symbols["__dW"], xi) # Replace intermediate variables - temp_var_replacements = dict( - (self.symbols[temp_var], _symbol(f"{temp_var}_{var}_{xi}")) + temp_var_replacements = { + self.symbols[temp_var]: _symbol(f"{temp_var}_{var}_{xi}") for temp_var in temp_vars - ) + } stochastic_result = stochastic_result.subs(temp_var_replacements) stochastic_results.append(stochastic_result) @@ -670,7 +655,7 @@ def __call__(self, eqs, variables=None, method_options=None): # A dictionary mapping all the variables in the equations to their # sympy representations - eq_variables = dict(((var, _symbol(var)) for var in eqs.eq_names)) + eq_variables = {var: _symbol(var) for var in eqs.eq_names} # Generate the random numbers for the stochastic variables for stochastic_variable in stochastic_variables: diff --git a/brian2/synapses/spikequeue.py b/brian2/synapses/spikequeue.py index fdd98386c..f36175364 100644 --- a/brian2/synapses/spikequeue.py +++ b/brian2/synapses/spikequeue.py @@ -18,7 +18,7 @@ INITIAL_MAXSPIKESPER_DT = 1 -class SpikeQueue(object): +class SpikeQueue: """ Data structure saving the spikes and taking care of delays. diff --git a/brian2/synapses/synapses.py b/brian2/synapses/synapses.py index 72d071b79..2909d8af7 100644 --- a/brian2/synapses/synapses.py +++ b/brian2/synapses/synapses.py @@ -170,7 +170,7 @@ def before_run(self, run_namespace): "the same dimensions as the right-hand " f"side expression '{self.expression}'.", ) - super(SummedVariableUpdater, self).before_run(run_namespace) + super().before_run(run_namespace) class SynapticPathway(CodeRunner, Group): @@ -362,7 +362,7 @@ def update_abstract_code(self, run_namespace=None, level=0): @device_override("synaptic_pathway_before_run") def before_run(self, run_namespace): - super(SynapticPathway, self).before_run(run_namespace) + super().before_run(run_namespace) def create_code_objects(self, run_namespace): if self._pushspikes_codeobj is None: @@ -433,7 +433,7 @@ def initialise_queue(self): ) def _full_state(self): - state = super(SynapticPathway, self)._full_state() + state = super()._full_state() if self.queue is not None: state["_spikequeue"] = self.queue._full_state() else: @@ -446,7 +446,7 @@ def _restore_from_full_state(self, state): # get treated as a state variable by the standard mechanism in # `VariableOwner` queue_state = state.pop("_spikequeue") - super(SynapticPathway, self)._restore_from_full_state(state) + super()._restore_from_full_state(state) if self.queue is None: self.queue = get_device().spike_queue(self.source.start, self.source.stop) self.queue._restore_from_full_state(queue_state) @@ -528,7 +528,7 @@ def find_synapses(index, synaptic_neuron): return synapses -class SynapticSubgroup(object): +class SynapticSubgroup: """ A simple subgroup of `Synapses` that can be used for indexing. @@ -567,7 +567,7 @@ def __repr__(self): ) -class SynapticIndexing(object): +class SynapticIndexing: def __init__(self, synapses): self.synapses = weakref.proxy(synapses) self.source = weakproxy_with_fallback(self.synapses.source) @@ -1498,7 +1498,7 @@ def before_run(self, run_namespace): # Check that subexpressions that refer to stateful functions are labeled # as "constant over dt" check_subexpressions(self, self.equations, run_namespace) - super(Synapses, self).before_run(run_namespace=run_namespace) + super().before_run(run_namespace=run_namespace) @device_override("synapses_connect") def connect( diff --git a/brian2/tests/__init__.py b/brian2/tests/__init__.py index 1959b7afb..6e094f470 100644 --- a/brian2/tests/__init__.py +++ b/brian2/tests/__init__.py @@ -20,7 +20,7 @@ class OurDoctestModule(pytest_doctest.DoctestModule): def collect(self): - for item in super(OurDoctestModule, self).collect(): + for item in super().collect(): # Check the object for exclusion from doctests full_name = item.name.split(".") test_name = [] @@ -40,7 +40,7 @@ def collect(self): pytest = None -class PreferencePlugin(object): +class PreferencePlugin: def __init__(self, prefs, fail_for_not_implemented=True): self.prefs = prefs self.device = "runtime" @@ -57,7 +57,7 @@ def pytest_configure(self, config): config.pluginmanager.register(xdist_plugin) -class XDistPreferencePlugin(object): +class XDistPreferencePlugin: def __init__(self, pref_plugin): self._pref_plugin = pref_plugin diff --git a/brian2/tests/features/base.py b/brian2/tests/features/base.py index 52705a147..d8a830419 100644 --- a/brian2/tests/features/base.py +++ b/brian2/tests/features/base.py @@ -36,7 +36,7 @@ def __init__(self, error, *args): AssertionError.__init__(self, *args) -class BaseTest(object): +class BaseTest: """ """ category = None # a string with the category of features @@ -133,7 +133,7 @@ def __call__(self): return self -class Configuration(object): +class Configuration: """ """ name = None # The name of this configuration @@ -388,7 +388,7 @@ def run_feature_tests( return FeatureTestResults(full_results, tag_results, configurations, feature_tests) -class FeatureTestResults(object): +class FeatureTestResults: def __init__(self, full_results, tag_results, configurations, feature_tests): self.full_results = full_results self.tag_results = tag_results @@ -574,7 +574,7 @@ def run_speed_tests( return SpeedTestResults(full_results, configurations, speed_tests) -class SpeedTestResults(object): +class SpeedTestResults: def __init__(self, full_results, configurations, speed_tests): self.full_results = full_results self.configurations = configurations diff --git a/brian2/tests/features/speed.py b/brian2/tests/features/speed.py index 302d1c288..0bdc3ca22 100644 --- a/brian2/tests/features/speed.py +++ b/brian2/tests/features/speed.py @@ -260,7 +260,7 @@ def run(self): self.timed_run(self.duration) -class SynapsesOnly(object): +class SynapsesOnly: category = "Synapses only" tags = ["Synapses"] n_range = [10, 100, 1000, 10000] diff --git a/brian2/tests/test_base.py b/brian2/tests/test_base.py index 5e1063a3a..0ce009e27 100644 --- a/brian2/tests/test_base.py +++ b/brian2/tests/test_base.py @@ -8,7 +8,7 @@ class DerivedBrianObject(BrianObject): def __init__(self, name="derivedbrianobject*"): - super(DerivedBrianObject, self).__init__(name=name) + super().__init__(name=name) def __str__(self): return self.name diff --git a/brian2/tests/test_codegen.py b/brian2/tests/test_codegen.py index 680506230..cf94fd49d 100644 --- a/brian2/tests/test_codegen.py +++ b/brian2/tests/test_codegen.py @@ -592,7 +592,7 @@ def test_msvc_flags(): flag_file = os.path.join(user_dir, "cpu_flags.txt") assert len(cpp_prefs.msvc_arch_flag) assert os.path.exists(flag_file) - with open(flag_file, "r", encoding="utf-8") as f: + with open(flag_file, encoding="utf-8") as f: previously_stored_flags = json.load(f) hostname = socket.gethostname() assert hostname in previously_stored_flags diff --git a/brian2/tests/test_cpp_standalone.py b/brian2/tests/test_cpp_standalone.py index 9079a93f5..52a7f18fc 100644 --- a/brian2/tests/test_cpp_standalone.py +++ b/brian2/tests/test_cpp_standalone.py @@ -665,7 +665,7 @@ def test_constant_replacement(): def test_header_file_inclusion(): set_device("cpp_standalone", directory=None, debug=True) with tempfile.TemporaryDirectory() as tmpdir: - with open(os.path.join(tmpdir, "foo.h"), "wt") as f: + with open(os.path.join(tmpdir, "foo.h"), "w") as f: f.write( """ namespace brian_test_namespace { @@ -673,7 +673,7 @@ def test_header_file_inclusion(): } """ ) - with open(os.path.join(tmpdir, "foo.cpp"), "wt") as f: + with open(os.path.join(tmpdir, "foo.cpp"), "w") as f: f.write( """ namespace brian_test_namespace { diff --git a/brian2/tests/test_devices.py b/brian2/tests/test_devices.py index 450293a1b..38f68e20b 100644 --- a/brian2/tests/test_devices.py +++ b/brian2/tests/test_devices.py @@ -18,7 +18,7 @@ class ATestDevice(Device): def activate(self, build_on_run, **kwargs): - super(ATestDevice, self).activate(build_on_run, **kwargs) + super().activate(build_on_run, **kwargs) self.build_on_run = build_on_run self._options = kwargs diff --git a/brian2/tests/test_equations.py b/brian2/tests/test_equations.py index 1bfaf9d26..ed985c465 100644 --- a/brian2/tests/test_equations.py +++ b/brian2/tests/test_equations.py @@ -1,5 +1,3 @@ -# encoding: utf8 - import sys import numpy as np @@ -400,7 +398,7 @@ def test_construction_errors(): @pytest.mark.codegen_independent def test_unit_checking(): # dummy Variable class - class S(object): + class S: def __init__(self, dimensions): self.dim = get_dimensions(dimensions) @@ -469,11 +467,11 @@ def test_properties(): assert len(eqs.eq_names) == 3 and eqs.eq_names == {"v", "I", "f"} assert set(eqs.keys()) == {"v", "I", "f", "freq"} # test that the equations object is iterable itself - assert all((isinstance(eq, SingleEquation) for eq in eqs.values())) - assert all((isinstance(eq, str) for eq in eqs)) + assert all(isinstance(eq, SingleEquation) for eq in eqs.values()) + assert all(isinstance(eq, str) for eq in eqs) assert ( len(eqs.ordered) == 4 - and all((isinstance(eq, SingleEquation) for eq in eqs.ordered)) + and all(isinstance(eq, SingleEquation) for eq in eqs.ordered) and [eq.varname for eq in eqs.ordered] == ["f", "I", "v", "freq"] ) assert [eq.unit for eq in eqs.ordered] == [Hz, volt, volt, 1] @@ -619,7 +617,7 @@ def test_dependency_calculation(): # v depends directly on I_m, on I_ext and I_pas via I_m, and on v via I_m -> I_pas assert len(deps["v"]) == 4 - assert set(d.equation.varname for d in deps["v"]) == {"I_m", "I_ext", "I_pas", "v"} + assert {d.equation.varname for d in deps["v"]} == {"I_m", "I_ext", "I_pas", "v"} expected_via = { "I_m": (), "I_pas": ("I_m",), @@ -630,7 +628,7 @@ def test_dependency_calculation(): # I_m depends directly on I_ext and I_pas, and on v via I_pas assert len(deps["I_m"]) == 3 - assert set(d.equation.varname for d in deps["I_m"]) == {"I_ext", "I_pas", "v"} + assert {d.equation.varname for d in deps["I_m"]} == {"I_ext", "I_pas", "v"} expected_via = {"I_ext": (), "I_pas": (), "v": ("I_pas",)} assert all([d.via == expected_via[d.equation.varname] for d in deps["I_m"]]) diff --git a/brian2/tests/test_functions.py b/brian2/tests/test_functions.py index 4a9855eb9..3941add56 100644 --- a/brian2/tests/test_functions.py +++ b/brian2/tests/test_functions.py @@ -802,7 +802,7 @@ def foo(x): # some basic dictionary properties assert len(container) == 4 - assert set((key for key in container)) == { + assert {key for key in container} == { "A Language", "B", ACodeObject, diff --git a/brian2/tests/test_logger.py b/brian2/tests/test_logger.py index e6ad2a85b..86d307a6b 100644 --- a/brian2/tests/test_logger.py +++ b/brian2/tests/test_logger.py @@ -19,7 +19,7 @@ def test_file_logging(): BrianLogger.file_handler.flush() # By default, only >= debug messages should show up assert os.path.isfile(BrianLogger.tmp_log) - with open(BrianLogger.tmp_log, "r") as f: + with open(BrianLogger.tmp_log) as f: log_content = f.readlines() for level, line in zip(["error", "warning", "info", "debug"], log_content[-4:]): assert "brian2.tests.test_logger" in line @@ -48,7 +48,7 @@ def test_file_logging_multiprocessing(): BrianLogger.file_handler.flush() assert os.path.isfile(BrianLogger.tmp_log) - with open(BrianLogger.tmp_log, "r") as f: + with open(BrianLogger.tmp_log) as f: log_content = f.readlines() # The subprocesses should not have written to the log file assert "info message before multiprocessing" in log_content[-1] @@ -63,7 +63,7 @@ def test_file_logging_multiprocessing_with_loggers(): BrianLogger.file_handler.flush() assert os.path.isfile(BrianLogger.tmp_log) - with open(BrianLogger.tmp_log, "r") as f: + with open(BrianLogger.tmp_log) as f: log_content = f.readlines() # The subprocesses should not have written to the main log file assert "info message before multiprocessing" in log_content[-1] @@ -71,7 +71,7 @@ def test_file_logging_multiprocessing_with_loggers(): # Each subprocess should have their own log file for x, log_file in enumerate(log_files): assert os.path.isfile(log_file) - with open(log_file, "r") as f: + with open(log_file) as f: log_content = f.readlines() assert f"subprocess info message {x}" in log_content[-1] diff --git a/brian2/tests/test_network.py b/brian2/tests/test_network.py index 6d9c5cefd..897732a69 100644 --- a/brian2/tests/test_network.py +++ b/brian2/tests/test_network.py @@ -101,7 +101,7 @@ class Counter(BrianObject): add_to_magic_network = True def __init__(self, **kwds): - super(Counter, self).__init__(**kwds) + super().__init__(**kwds) self.count = 0 self.state = {"state": 0} @@ -120,7 +120,7 @@ class CounterWithContained(Counter): add_to_magic_network = True def __init__(self, **kwds): - super(CounterWithContained, self).__init__(**kwds) + super().__init__(**kwds) self.sub_counter = Counter() self.contained_objects.append(self.sub_counter) @@ -166,7 +166,7 @@ class NameLister(BrianObject): updates = [] def __init__(self, **kwds): - super(NameLister, self).__init__(**kwds) + super().__init__(**kwds) def run(self): NameLister.updates.append(self.name) @@ -309,7 +309,7 @@ def fill_with_array(self, var, arr): class TestDevice2(TestDevice1): def __init__(self): - super(TestDevice2, self).__init__() + super().__init__() self.network_schedule = [ "start", "groups", @@ -478,7 +478,7 @@ class Preparer(BrianObject): add_to_magic_network = True def __init__(self, **kwds): - super(Preparer, self).__init__(**kwds) + super().__init__(**kwds) self.did_reinit = False self.did_pre_run = False self.did_post_run = False @@ -510,7 +510,7 @@ class Stopper(BrianObject): add_to_magic_network = True def __init__(self, stoptime, stopfunc, **kwds): - super(Stopper, self).__init__(**kwds) + super().__init__(**kwds) self.stoptime = stoptime self.stopfunc = stopfunc @@ -555,7 +555,7 @@ def f3(): # In complex frameworks, network operations might be object methods that # access some common data - class Container(object): + class Container: def __init__(self): self.g1_data = "B" self.g2_data = "C" @@ -581,7 +581,7 @@ def test_incorrect_network_operations(): def func(x, y): pass - class Container(object): + class Container: def func(self, x, y): pass @@ -596,7 +596,7 @@ def func(self, x, y): # method try: - class Container(object): + class Container: @network_operation def func(self): pass diff --git a/brian2/tests/test_neurongroup.py b/brian2/tests/test_neurongroup.py index 7e8c864d6..7304556c2 100644 --- a/brian2/tests/test_neurongroup.py +++ b/brian2/tests/test_neurongroup.py @@ -572,8 +572,8 @@ def test_linked_subexpression(): # Due to the linking, the first 5 and the second 5 recorded I vectors should # be identical - assert all((all(mon[i].I == mon[0].I) for i in range(5))) - assert all((all(mon[i + 5].I == mon[5].I) for i in range(5))) + assert all(all(mon[i].I == mon[0].I) for i in range(5)) + assert all(all(mon[i + 5].I == mon[5].I) for i in range(5)) @pytest.mark.standalone_compatible @@ -626,8 +626,8 @@ def test_linked_subexpression_3(): # Due to the linking, the first 5 and the second 5 recorded I vectors should # refer to the - assert all((all(mon[i].I_l == mon1[0].I) for i in range(5))) - assert all((all(mon[i + 5].I_l == mon1[1].I) for i in range(5))) + assert all(all(mon[i].I_l == mon1[0].I) for i in range(5)) + assert all(all(mon[i + 5].I_l == mon1[1].I) for i in range(5)) def test_linked_subexpression_synapse(): @@ -1981,7 +1981,7 @@ def test_random_values_fixed_and_random(): second_run_values = np.array(mon.v[:, [2, 3]]) # First time step should be identical (same seed) - assert all(abs((first_run_values[:, 0] - second_run_values[:, 0])) < 0.0001) + assert all(abs(first_run_values[:, 0] - second_run_values[:, 0]) < 0.0001) # Increase in second time step should be different (random seed) assert all( abs( diff --git a/brian2/tests/test_parsing.py b/brian2/tests/test_parsing.py index fb915a916..4ef38d4b4 100644 --- a/brian2/tests/test_parsing.py +++ b/brian2/tests/test_parsing.py @@ -155,7 +155,7 @@ def test_parse_expressions_sympy(): # sympy expression via str_to_sympy (uses the SympyNodeRenderer internally), # then convert it back to a string via sympy_to_str and evaluate it - class SympyRenderer(object): + class SympyRenderer: def render_expr(self, expr): return str_to_sympy(expr) @@ -217,7 +217,7 @@ def test_is_boolean_expression(): Var = namedtuple("Var", ["is_boolean"]) # dummy function object - class Func(object): + class Func: def __init__(self, returns_bool=False): self._returns_bool = returns_bool @@ -401,7 +401,7 @@ def test_value_from_expression(): constants = {"c": 3} # dummy class - class C(object): + class C: pass variables = {"s_constant_scalar": C(), "s_non_constant": C(), "s_non_scalar": C()} diff --git a/brian2/tests/test_synapses.py b/brian2/tests/test_synapses.py index 54cfaaa72..c63232cdd 100644 --- a/brian2/tests/test_synapses.py +++ b/brian2/tests/test_synapses.py @@ -2119,11 +2119,11 @@ def numerically_check_permutation_code(code): elif var.endswith("_const"): indices[var] = "0" vals[var] = 42 - subs = dict( - (var, var + "[" + idx + "]") + subs = { + var: var + "[" + idx + "]" for var, idx in indices.items() if not var.endswith("_const") - ) + } code = word_substitute(code, subs) code = f""" from numpy import * diff --git a/brian2/units/constants.py b/brian2/units/constants.py index 8f28edd48..ebb398b6f 100644 --- a/brian2/units/constants.py +++ b/brian2/units/constants.py @@ -1,4 +1,3 @@ -# coding=utf-8 r""" A module providing some physical units as `Quantity` objects. Note that these units are not imported by wildcard imports (e.g. `from brian2 import *`), they diff --git a/brian2/units/fundamentalunits.py b/brian2/units/fundamentalunits.py index 3dd2bad10..b2cb01f87 100644 --- a/brian2/units/fundamentalunits.py +++ b/brian2/units/fundamentalunits.py @@ -1,5 +1,3 @@ -# coding=utf-8 - """ Defines physical units and quantities @@ -54,8 +52,7 @@ def _flatten(iterable): """ for e in iterable: if isinstance(e, list): - for f in _flatten(e): - yield f + yield from _flatten(e) else: yield e @@ -443,7 +440,7 @@ def f(x, *args, **kwds): # pylint: disable=C0111 } -class Dimension(object): +class Dimension: """ Stores the indices of the 7 basic SI unit dimension (length, mass, etc.). @@ -976,7 +973,7 @@ def quantity_with_dimensions(floatval, dims): return Quantity(floatval, get_or_create_dimension(dims._dims)) -class Quantity(np.ndarray, object): +class Quantity(np.ndarray): """ A number with an associated physical dimension. In most cases, it is not necessary to create a Quantity object by hand, instead use multiplication @@ -1480,7 +1477,7 @@ def __getitem__(self, key): def __setitem__(self, key, value): fail_for_dimension_mismatch(self, value, "Inconsistent units in assignment") - return super(Quantity, self).__setitem__(key, value) + return super().__setitem__(key, value) #### ARITHMETIC #### def _binary_operation( @@ -1689,7 +1686,7 @@ def __ipow__(self, other): exponent=other, ) other = np.array(other, copy=False) - super(Quantity, self).__ipow__(other) + super().__ipow__(other) self.dim = self.dim**other return self else: @@ -1853,7 +1850,7 @@ def __format__(self, format_spec): if format_spec == "": return str(self) else: - return super(Quantity, self).__format__(format_spec) + return super().__format__(format_spec) #### Mathematic methods #### @@ -1912,14 +1909,14 @@ def ravel(self, *args, **kwds): def fill(self, values): # pylint: disable=C0111 fail_for_dimension_mismatch(self, values, "fill") - super(Quantity, self).fill(values) + super().fill(values) fill.__doc__ = np.ndarray.fill.__doc__ fill._do_not_run_doctests = True def put(self, indices, values, *args, **kwds): # pylint: disable=C0111 fail_for_dimension_mismatch(self, values, "fill") - super(Quantity, self).put(indices, values, *args, **kwds) + super().put(indices, values, *args, **kwds) put.__doc__ = np.ndarray.put.__doc__ put._do_not_run_doctests = True @@ -1952,13 +1949,13 @@ def dot(self, other, **kwds): # pylint: disable=C0111 def searchsorted(self, v, **kwds): # pylint: disable=C0111 fail_for_dimension_mismatch(self, v, "searchsorted") - return super(Quantity, self).searchsorted(np.array(v, copy=False), **kwds) + return super().searchsorted(np.array(v, copy=False), **kwds) searchsorted.__doc__ = np.ndarray.searchsorted.__doc__ searchsorted._do_not_run_doctests = True def prod(self, *args, **kwds): # pylint: disable=C0111 - prod_result = super(Quantity, self).prod(*args, **kwds) + prod_result = super().prod(*args, **kwds) # Calculating the correct dimensions is not completly trivial (e.g. # like doing self.dim**self.size) because prod can be called on # multidimensional arrays along a certain axis. @@ -2104,7 +2101,7 @@ def __new__( ): if dim is None: dim = DIMENSIONLESS - obj = super(Unit, cls).__new__( + obj = super().__new__( cls, arr, dim=dim, dtype=dtype, copy=copy, force_quantity=True ) return obj @@ -2323,7 +2320,7 @@ def __mul__(self, other): ) return u else: - return super(Unit, self).__mul__(other) + return super().__mul__(other) def __rmul__(self, other): return self.__mul__(other) @@ -2345,7 +2342,7 @@ def __div__(self, other): dispname += other.dispname name += other.name - latexname = r"\frac{%s}{%s}" % (self.latexname, other.latexname) + latexname = rf"\frac{{{self.latexname}}}{{{other.latexname}}}" scale = self.scale - other.scale u = Unit( 10.0**scale, @@ -2358,7 +2355,7 @@ def __div__(self, other): ) return u else: - return super(Unit, self).__div__(other) + return super().__div__(other) def __rdiv__(self, other): if isinstance(other, Unit): @@ -2369,7 +2366,7 @@ def __rdiv__(self, other): return self**-1 except (ValueError, TypeError, DimensionMismatchError): pass - return super(Unit, self).__rdiv__(other) + return super().__rdiv__(other) def __pow__(self, other): if is_scalar_type(other): @@ -2396,7 +2393,7 @@ def __pow__(self, other): ) # To avoid issues with units like (second ** -1) ** -1 return u else: - return super(Unit, self).__pow__(other) + return super().__pow__(other) def __iadd__(self, other): raise TypeError("Units cannot be modified in-place") @@ -2435,7 +2432,7 @@ def __hash__(self): return hash((self.dim, self.scale)) -class UnitRegistry(object): +class UnitRegistry: """ Stores known units for printing in best units. diff --git a/brian2/utils/caching.py b/brian2/utils/caching.py index af4818cb9..692dc356e 100644 --- a/brian2/utils/caching.py +++ b/brian2/utils/caching.py @@ -9,7 +9,7 @@ from collections.abc import Mapping -class CacheKey(object): +class CacheKey: """ Mixin class for objects that will be used as keys for caching (e.g. `Variable` objects) and have to define a certain "identity" with respect @@ -42,7 +42,7 @@ def _state_tuple(self): ) -class _CacheStatistics(object): +class _CacheStatistics: """ Helper class to store cache statistics """ diff --git a/brian2/utils/filelock.py b/brian2/utils/filelock.py index 06b185cad..0306ad2a9 100644 --- a/brian2/utils/filelock.py +++ b/brian2/utils/filelock.py @@ -113,7 +113,7 @@ def __str__(self): # automatically. # # :seealso: issue #37 (memory leak) -class _Acquire_ReturnProxy(object): +class _Acquire_ReturnProxy: def __init__(self, lock): self.lock = lock return None @@ -126,7 +126,7 @@ def __exit__(self, exc_type, exc_value, traceback): return None -class BaseFileLock(object): +class BaseFileLock: """ Implements the base class of a file lock. """ @@ -362,7 +362,7 @@ def _acquire(self): else: try: msvcrt.locking(fd, msvcrt.LK_NBLCK, 1) - except (IOError, OSError): + except OSError: os.close(fd) else: self._lock_file_fd = fd @@ -398,7 +398,7 @@ def _acquire(self): try: fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) - except (IOError, OSError): + except OSError: os.close(fd) else: self._lock_file_fd = fd @@ -429,7 +429,7 @@ def _acquire(self): open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC try: fd = os.open(self._lock_file, open_mode) - except (IOError, OSError): + except OSError: pass else: self._lock_file_fd = fd diff --git a/brian2/utils/filetools.py b/brian2/utils/filetools.py index cdd5c6486..21180c5cb 100644 --- a/brian2/utils/filetools.py +++ b/brian2/utils/filetools.py @@ -33,7 +33,7 @@ def ensure_directory(d): return d -class in_directory(object): +class in_directory: """ Safely temporarily work in a subdirectory diff --git a/brian2/utils/logger.py b/brian2/utils/logger.py index 41ce6e30a..a256d7175 100644 --- a/brian2/utils/logger.py +++ b/brian2/utils/logger.py @@ -245,19 +245,19 @@ def clean_up_logging(): if BrianLogger.tmp_log is not None: try: os.remove(BrianLogger.tmp_log) - except (IOError, OSError) as exc: + except OSError as exc: warn(f"Could not delete log file: {exc}") # Remove log files that have been rotated (currently only one) rotated_log = f"{BrianLogger.tmp_log}.1" if os.path.exists(rotated_log): try: os.remove(rotated_log) - except (IOError, OSError) as exc: + except OSError as exc: warn(f"Could not delete log file: {exc}") if BrianLogger.tmp_script is not None: try: os.remove(BrianLogger.tmp_script) - except (IOError, OSError) as exc: + except OSError as exc: warn(f"Could not delete copy of script file: {exc}") std_silent.close() @@ -265,7 +265,7 @@ def clean_up_logging(): atexit.register(clean_up_logging) -class HierarchyFilter(object): +class HierarchyFilter: """ A class for suppressing all log messages in a subtree of the name hierarchy. Does exactly the opposite as the `logging.Filter` class, which allows @@ -289,7 +289,7 @@ def filter(self, record): return not self.orig_filter.filter(record) -class NameFilter(object): +class NameFilter: """ A class for suppressing log messages ending with a certain name. @@ -311,7 +311,7 @@ def filter(self, record): return self.name != record_name -class BrianLogger(object): +class BrianLogger: """ Convenience object for logging. Call `get_logger` to get an instance of this class. @@ -603,7 +603,7 @@ def initialize(): ) logger.addHandler(BrianLogger.file_handler) BrianLogger._pid = os.getpid() - except IOError as ex: + except OSError as ex: warn(f"Could not create log file: {ex}") # Save a copy of the script @@ -629,7 +629,7 @@ def initialize(): with open(os.path.abspath(sys.argv[0]), "rb") as script_file: shutil.copyfileobj(script_file, tmp_file) BrianLogger.tmp_script = tmp_file.name - except IOError as ex: + except OSError as ex: warn(f"Could not copy script file to temp directory: {ex}") if BrianLogger.console_handler is not None: @@ -698,7 +698,7 @@ def get_logger(module_name="brian2"): return BrianLogger(module_name) -class catch_logs(object): +class catch_logs: """ A context manager for catching log messages. Use this for testing the messages that are logged. Defaults to catching warning/error messages and @@ -791,7 +791,7 @@ def uninstall(self): # See http://stackoverflow.com/questions/26126160/redirecting-standard-out-in-err-back-after-os-dup2 # for an explanation of how this function works. Note that 1 and 2 are the file # numbers for stdout and stderr -class std_silent(object): +class std_silent: """ Context manager that temporarily silences stdout and stderr but keeps the output saved in a temporary file and writes it if an exception is raised. @@ -831,9 +831,9 @@ def __exit__(self, exc_type, exc_value, traceback): std_silent.dest_stdout.flush() std_silent.dest_stderr.flush() if exc_type is not None: - with open(std_silent.dest_fname_stdout, "r") as f: + with open(std_silent.dest_fname_stdout) as f: out = f.read() - with open(std_silent.dest_fname_stderr, "r") as f: + with open(std_silent.dest_fname_stderr) as f: err = f.read() os.dup2(self.orig_out_fd, 1) os.dup2(self.orig_err_fd, 2) @@ -850,7 +850,7 @@ def close(cls): if prefs["logging.delete_log_on_exit"]: try: os.remove(std_silent.dest_fname_stdout) - except (IOError, OSError): + except OSError: # TODO: this happens quite frequently - why? # The file objects are closed as far as Python is concerned, # but maybe Windows is still hanging on to them? @@ -860,5 +860,5 @@ def close(cls): if prefs["logging.delete_log_on_exit"]: try: os.remove(std_silent.dest_fname_stderr) - except (IOError, OSError): + except OSError: pass diff --git a/brian2/utils/stringtools.py b/brian2/utils/stringtools.py index e98ede0b9..0034cd0f7 100644 --- a/brian2/utils/stringtools.py +++ b/brian2/utils/stringtools.py @@ -274,7 +274,7 @@ def code_representation(code): # The below is adapted from Peter Norvig's spelling corrector # http://norvig.com/spell.py (MIT licensed) -class SpellChecker(object): +class SpellChecker: """ A simple spell checker that will be used to suggest the correct name if the user made a typo (e.g. for state variable names). @@ -301,12 +301,12 @@ def edits1(self, word): return set(deletes + transposes + replaces + inserts) def known_edits2(self, word): - return set( + return { e2 for e1 in self.edits1(word) for e2 in self.edits1(e1) if e2 in self.words - ) + } def known(self, words): - return set(w for w in words if w in self.words) + return {w for w in words if w in self.words} def suggest(self, word): return self.known(self.edits1(word)) or self.known_edits2(word) or set() diff --git a/brian2/utils/topsort.py b/brian2/utils/topsort.py index c25dba38c..75b293151 100644 --- a/brian2/utils/topsort.py +++ b/brian2/utils/topsort.py @@ -17,7 +17,7 @@ def topsort(graph): except ImportError: # TODO: Can be removed when we depend on Python >= 3.9 # make a copy so as not to destroy original - graph = dict((k, copy(v)) for k, v in graph.items()) + graph = {k: copy(v) for k, v in graph.items()} # Use the standard algorithm for topological sorting: # http://en.wikipedia.org/wiki/Topological_sorting # List that will contain the sorted elements From 880cd2e1c2ec5c111d067e5eaaf7f9f20fc94d86 Mon Sep 17 00:00:00 2001 From: Marcel Stimberg Date: Fri, 2 Dec 2022 15:47:11 +0100 Subject: [PATCH 4/4] chore: exclude pyupgrade modernization from git blame --- .git-blame-ignore-revs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index e94c17e1f..dc10ba951 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -10,3 +10,5 @@ d3ae59251c753ae0737d6ae6242b7e85b60908c4 1e9ea598491444fe7c4ee9ece2ec94ad7c5020ec # Reformatting with isort 67bf6d3760fa3fb8b3aa121b1b972d6cf36ec048 +# Update syntax to Python 3.8 with pyupgrade +28b02c51545298cb9a76d8295e64a5df391b9207