diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000000..bc0554384af7 --- /dev/null +++ b/.flake8 @@ -0,0 +1,8 @@ +[flake8] +exclude = venv, doc/_build, src/ansys/api/fluent/v0/*, src/ansys/fluent/core/meshing/tui.py, src/ansys/fluent/core/solver/tui.py, src/ansys/fluent/core/solver/settings/*, src/ansys/fluent/core/datamodel/* +max-line-length = 88 +count = True +max-complexity = 10 +statistics = True +select = W191 W291 W293 W391 E115 E117 E122 E124 E125 E225 E231 E301 E303 F401 F403 N801 N802 N803 N804 N805 N806 +extend-ignore = E203, E501 \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d5423aee96b4..af31620549d4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,8 +5,7 @@ repos: hooks: - id: black args: [ - --line-length, "79", - --force-exclude, src/ansys/api/fluent/v0/|src/ansys/fluent/core/meshing/tui.py|src/ansys/fluent/core/solver/tui.py|src/ansys/fluent/core/solver/settings.py|src/ansys/fluent/core/datamodel, + --force-exclude, src/ansys/api/fluent/v0/|src/ansys/fluent/core/meshing/tui.py|src/ansys/fluent/core/solver/tui.py|src/ansys/fluent/core/solver/settings/|src/ansys/fluent/core/datamodel, src/ansys, codegen, doc, examples, tests ] @@ -25,11 +24,11 @@ repos: --profile, black, --skip, src/ansys/fluent/core/meshing/tui.py, --skip, src/ansys/fluent/core/solver/tui.py, - --skip, src/ansys/fluent/core/solver/settings.py, --skip-glob, src/ansys/api/fluent/v0/*, --skip-glob, src/ansys/fluent/core/datamodel/*, + --skip-glob, src/ansys/fluent/core/solver/settings/*, --force-sort-within-sections, - --line-length, "79", + --line-length, "88", --section-default, THIRDPARTY, --filter-files, --project, ansys, @@ -50,7 +49,8 @@ repos: --count, --statistics, --max-complexity, "10", - --max-line-length, "79", + --max-line-length, "88", + --extend-ignore, E203 E501, ansys, codegen, doc, examples, tests ] diff --git a/codegen/datamodelgen.py b/codegen/datamodelgen.py index 29457e96a269..56bd453255a8 100644 --- a/codegen/datamodelgen.py +++ b/codegen/datamodelgen.py @@ -42,9 +42,7 @@ def _build_parameter_docstring(name: str, t: str): def _build_command_docstring(name: str, info: Any): return_type = _PY_TYPE_BY_DM_TYPE[info.returntype] - arg_strings = [ - arg.name + ": " + _PY_TYPE_BY_DM_TYPE[arg.type] for arg in info.args - ] + arg_strings = [arg.name + ": " + _PY_TYPE_BY_DM_TYPE[arg.type] for arg in info.args] arg_string = ", ".join(arg_strings) return name + "(" + arg_string + ") -> " + return_type @@ -72,9 +70,7 @@ def __init__(self): "workflow": DataModelStaticInfo("workflow", "meshing"), "meshing": DataModelStaticInfo("meshing", "meshing"), "PartManagement": DataModelStaticInfo("PartManagement", "meshing"), - "PMFileManagement": DataModelStaticInfo( - "PMFileManagement", "meshing" - ), + "PMFileManagement": DataModelStaticInfo("PMFileManagement", "meshing"), } self._delete_generated_files() self._populate_static_info() @@ -98,23 +94,17 @@ def _populate_static_info(self): session = pyfluent.launch_fluent(meshing_mode=True) for _, info in self._static_info.items(): if info.mode == "meshing": - info.static_info = self._get_static_info( - info.rules, session - ) + info.static_info = self._get_static_info(info.rules, session) session.exit() if run_solver_mode: session = pyfluent.launch_fluent() for _, info in self._static_info.items(): if info.mode == "solver": - info.static_info = self._get_static_info( - info.rules, session - ) + info.static_info = self._get_static_info(info.rules, session) session.exit() - def _write_static_info( - self, name: str, info: Any, f: FileIO, level: int = 0 - ): + def _write_static_info(self, name: str, info: Any, f: FileIO, level: int = 0): indent = " " * level * 4 f.write(f"{indent}class {name}(PyMenu):\n") f.write(f'{indent} """\n') @@ -144,14 +134,9 @@ def _write_static_info( f.write(f"{indent} super().__init__(service, rules, path)\n\n") for k in info.namedobjects: f.write(f"{indent} class {k}(PyNamedObjectContainer):\n") - self._write_static_info( - f"_{k}", info.namedobjects[k], f, level + 2 - ) + self._write_static_info(f"_{k}", info.namedobjects[k], f, level + 2) # Specify the concrete named object type for __getitem__ - f.write( - f"{indent} def __getitem__(self, key: str) -> " - f"_{k}:\n" - ) + f.write(f"{indent} def __getitem__(self, key: str) -> " f"_{k}:\n") f.write(f"{indent} return super().__getitem__(key)\n\n") for k in info.singletons: self._write_static_info(k, info.singletons[k], f, level + 1) @@ -182,9 +167,7 @@ def write_static_info(self) -> None: f.write("# This is an auto-generated file. DO NOT EDIT!\n") f.write("#\n") f.write("# pylint: disable=line-too-long\n\n") - f.write( - "from ansys.fluent.core.services.datamodel_se import (\n" - ) + f.write("from ansys.fluent.core.services.datamodel_se import (\n") f.write(" PyMenu,\n") f.write(" PyNamedObjectContainer,\n") f.write(" PyCommand\n") diff --git a/codegen/pyprotogen.py b/codegen/pyprotogen.py index 898a0d79c1fb..a1b58b1b890c 100644 --- a/codegen/pyprotogen.py +++ b/codegen/pyprotogen.py @@ -44,9 +44,7 @@ def build_python_grpc( proto_glob = os.path.join(protos_path, "*.proto") files = glob.glob(proto_glob, recursive=True) if not files: - raise FileNotFoundError( - f"Unable locate any *.proto files at {protos_path}" - ) + raise FileNotFoundError(f"Unable locate any *.proto files at {protos_path}") shutil.rmtree(out_path, ignore_errors=True) Path.mkdir(Path(out_path), parents=True, exist_ok=True) diff --git a/codegen/settingsgen.py b/codegen/settingsgen.py index 90b869966120..5081a372d1c2 100644 --- a/codegen/settingsgen.py +++ b/codegen/settingsgen.py @@ -55,9 +55,7 @@ def _populate_hash_dict(name, info, cls): for child in getattr(cls, "child_names", None): child_cls = getattr(cls, child) if cname == child_cls.fluent_name: - children_hash.append( - _populate_hash_dict(cname, cinfo, child_cls) - ) + children_hash.append(_populate_hash_dict(cname, cinfo, child_cls)) break else: children_hash = None @@ -69,9 +67,7 @@ def _populate_hash_dict(name, info, cls): for command in getattr(cls, "command_names", None): command_cls = getattr(cls, command) if cname == command_cls.fluent_name: - commands_hash.append( - _populate_hash_dict(cname, cinfo, command_cls) - ) + commands_hash.append(_populate_hash_dict(cname, cinfo, command_cls)) break else: commands_hash = None @@ -176,9 +172,7 @@ def _populate_classes(parent_dir): ) in hash_dict.items(): file_name = files_dict.get(key) cls_name = cls.__name__ - filepath = os.path.normpath( - os.path.join(parent_dir, file_name + ".py") - ) + filepath = os.path.normpath(os.path.join(parent_dir, file_name + ".py")) with open(filepath, "w") as f: # disclaimer to py file f.write("#\n") @@ -191,29 +185,21 @@ def _populate_classes(parent_dir): if children_hash: for child in children_hash: pchild_name = hash_dict.get(child)[0].__name__ - f.write( - f"from .{files_dict.get(child)} import {pchild_name}\n" - ) + f.write(f"from .{files_dict.get(child)} import {pchild_name}\n") if commands_hash: for child in commands_hash: pchild_name = hash_dict.get(child)[0].__name__ - f.write( - f"from .{files_dict.get(child)} import {pchild_name}\n" - ) + f.write(f"from .{files_dict.get(child)} import {pchild_name}\n") if arguments_hash: for child in arguments_hash: pchild_name = hash_dict.get(child)[0].__name__ - f.write( - f"from .{files_dict.get(child)} import {pchild_name}\n" - ) + f.write(f"from .{files_dict.get(child)} import {pchild_name}\n") if object_hash: pchild_name = hash_dict.get(object_hash)[0].__name__ - f.write( - f"from .{files_dict.get(object_hash)} import {pchild_name}\n\n" - ) + f.write(f"from .{files_dict.get(object_hash)} import {pchild_name}\n\n") # class name f.write( @@ -237,9 +223,7 @@ def _populate_classes(parent_dir): if child_names: f.write(f"{istr1}child_names = \\\n") strout = io.StringIO() - pprint.pprint( - child_names, stream=strout, compact=True, width=70 - ) + pprint.pprint(child_names, stream=strout, compact=True, width=70) mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) f.write(f"{istr2}{mn}\n\n") @@ -254,9 +238,7 @@ def _populate_classes(parent_dir): if command_names: f.write(f"{istr1}command_names = \\\n") strout = io.StringIO() - pprint.pprint( - command_names, stream=strout, compact=True, width=70 - ) + pprint.pprint(command_names, stream=strout, compact=True, width=70) mn = ("\n" + istr2).join(strout.getvalue().strip().split("\n")) f.write(f"{istr2}{mn}\n\n") @@ -284,9 +266,7 @@ def _populate_classes(parent_dir): # write object type child_object_type = getattr(cls, "child_object_type", None) if child_object_type: - f.write( - f"{istr1}child_object_type: {pchild_name} = {pchild_name}\n" - ) + f.write(f"{istr1}child_object_type: {pchild_name} = {pchild_name}\n") f.write(f'{istr1}"""\n') f.write(f"{istr1}child_object_type of {cls_name}.") f.write(f'\n{istr1}"""\n') diff --git a/codegen/tuigen.py b/codegen/tuigen.py index 0fd0afa19cf5..017b2bba2eee 100644 --- a/codegen/tuigen.py +++ b/codegen/tuigen.py @@ -63,9 +63,7 @@ def _populate_xml_helpstrings(): tree = ET.parse(_XML_HELP_FILE) root = tree.getroot() help_contents_node = root.find(".//*[@id='flu_tui_help_contents']") - field_help_node = help_contents_node.find( - ".//*[@id='fluent_tui_field_help']" - ) + field_help_node = help_contents_node.find(".//*[@id='fluent_tui_field_help']") for node in field_help_node.findall("sect2"): k = node.find("h3").text @@ -150,9 +148,7 @@ def _write_menu_to_tui_file(self, menu: _TUIMenu, indent: int = 0): for line in doc_lines: self._write_code_to_tui_file(f"{line}\n", indent) self._write_code_to_tui_file('"""\n', indent) - self._write_code_to_tui_file( - "def __init__(self, path, service):\n", indent - ) + self._write_code_to_tui_file("def __init__(self, path, service):\n", indent) indent += 1 self._write_code_to_tui_file("self.path = path\n", indent) self._write_code_to_tui_file("self.service = service\n", indent) @@ -163,14 +159,10 @@ def _write_menu_to_tui_file(self, menu: _TUIMenu, indent: int = 0): f'(path + [("{v.tui_name}", None)], service)\n', indent, ) - self._write_code_to_tui_file( - "super().__init__(path, service)\n", indent - ) + self._write_code_to_tui_file("super().__init__(path, service)\n", indent) indent -= 1 - command_names = [ - v.name for _, v in menu.children.items() if v.is_command - ] + command_names = [v.name for _, v in menu.children.items() if v.is_command] if command_names: for command in command_names: self._write_code_to_tui_file( @@ -197,14 +189,10 @@ def generate(self) -> None: with open(self._tui_file, "w", encoding="utf8") as self.__writer: self._populate_menu(self._main_menu) if self._tui_file == _SOLVER_TUI_FILE: - self._write_code_to_tui_file( - '"""Fluent Solver TUI Commands"""\n' - ) + self._write_code_to_tui_file('"""Fluent Solver TUI Commands"""\n') self._main_menu.doc = "Fluent solver main menu." else: - self._write_code_to_tui_file( - '"""Fluent Meshing TUI Commands"""\n' - ) + self._write_code_to_tui_file('"""Fluent Meshing TUI Commands"""\n') self._main_menu.doc = "Fluent meshing main menu." self._write_code_to_tui_file( "#\n" diff --git a/doc/settings_rstgen.py b/doc/settings_rstgen.py index b379f5b4b0f3..33abdda4ffe4 100644 --- a/doc/settings_rstgen.py +++ b/doc/settings_rstgen.py @@ -116,18 +116,14 @@ def _populate_rst_from_settings(rst_dir, cls): rstpath = os.path.normpath(os.path.join(rst_dir, file_name + ".rst")) has_children = hasattr(cls, "child_names") and len(cls.child_names) > 0 has_commands = hasattr(cls, "command_names") and len(cls.command_names) > 0 - has_arguments = ( - hasattr(cls, "argument_names") and len(cls.argument_names) > 0 - ) + has_arguments = hasattr(cls, "argument_names") and len(cls.argument_names) > 0 has_named_object = hasattr(cls, "child_object_type") with open(rstpath, "w") as r: # Populate initial rst r.write(f".. _{file_name}:\n\n") r.write(f"{cls_name}\n") r.write(f'{"="*(len(cls_name))}\n\n') - r.write( - f".. currentmodule:: ansys.fluent.core.solver.settings.{file_name}\n\n" - ) + r.write(f".. currentmodule:: ansys.fluent.core.solver.settings.{file_name}\n\n") r.write(f".. autoclass:: {cls_name}\n") r.write(f"{istr1}:show-inheritance:\n") r.write(f"{istr1}:undoc-members:\n") @@ -147,12 +143,8 @@ def _populate_rst_from_settings(rst_dir, cls): data_dict["Child"] = "Summary" for child in cls.child_names: child_cls = getattr(cls, child) - ref_string = ( - f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" - ) - data_dict[ref_string] = child_cls.__doc__.strip("\n").split( - "\n" - )[0] + ref_string = f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" + data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[0] _generate_table_for_rst(r, data_dict) if has_commands: @@ -161,12 +153,8 @@ def _populate_rst_from_settings(rst_dir, cls): data_dict["Command"] = "Summary" for child in cls.command_names: child_cls = getattr(cls, child) - ref_string = ( - f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" - ) - data_dict[ref_string] = child_cls.__doc__.strip("\n").split( - "\n" - )[0] + ref_string = f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" + data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[0] _generate_table_for_rst(r, data_dict) if has_arguments: @@ -175,21 +163,17 @@ def _populate_rst_from_settings(rst_dir, cls): data_dict["Argument"] = "Summary" for child in cls.argument_names: child_cls = getattr(cls, child) - ref_string = ( - f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" - ) - data_dict[ref_string] = child_cls.__doc__.strip("\n").split( - "\n" - )[0] + ref_string = f":ref:`{child} <{child_cls.__module__.split('.')[-1]}>`" + data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[0] _generate_table_for_rst(r, data_dict) if has_named_object: child_cls = getattr(cls, "child_object_type") - ref_string = f":ref:`{child_cls.__name__} <{child_cls.__module__.split('.')[-1]}>`" + ref_string = ( + f":ref:`{child_cls.__name__} <{child_cls.__module__.split('.')[-1]}>`" + ) data_dict = {} - data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[ - 0 - ] + data_dict[ref_string] = child_cls.__doc__.strip("\n").split("\n")[0] r.write(f".. rubric:: Named object type\n\n") r.write(f"{ref_string}\n\n\n") @@ -200,9 +184,7 @@ def _populate_rst_from_settings(rst_dir, cls): for parent in parents_dict.get(file_name): parent_file = parent.__module__.split(".")[-1] ref_string = f":ref:`{parent.__name__} <{parent_file}>`" - data_dict[ref_string] = parent.__doc__.strip("\n").split("\n")[ - 0 - ] + data_dict[ref_string] = parent.__doc__.strip("\n").split("\n")[0] _generate_table_for_rst(r, data_dict) if not rstpath in rst_list: @@ -220,9 +202,7 @@ def _populate_rst_from_settings(rst_dir, cls): _populate_rst_from_settings(rst_dir, getattr(cls, child)) if has_named_object: - _populate_rst_from_settings( - rst_dir, getattr(cls, "child_object_type") - ) + _populate_rst_from_settings(rst_dir, getattr(cls, "child_object_type")) if __name__ == "__main__": diff --git a/doc/source/conf.py b/doc/source/conf.py index 135f8d40a187..58e2e96cceed 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -21,9 +21,7 @@ pyvista.rcParams["window_size"] = np.array([1024, 768]) # Save figures in specified directory -pyvista.FIGURE_PATH = os.path.join( - os.path.abspath("./images/"), "auto-generated/" -) +pyvista.FIGURE_PATH = os.path.join(os.path.abspath("./images/"), "auto-generated/") if not os.path.exists(pyvista.FIGURE_PATH): os.makedirs(pyvista.FIGURE_PATH) diff --git a/examples/00-fluent/exhaust_system.py b/examples/00-fluent/exhaust_system.py index ee0a6fb41d5e..2db50381ee35 100644 --- a/examples/00-fluent/exhaust_system.py +++ b/examples/00-fluent/exhaust_system.py @@ -68,24 +68,17 @@ session.PMFileManagement.FileManager.LoadFiles() session.part_management.Node["Meshing Model"].Copy( Paths=[ - "/dirty_manifold-for-wrapper," - + "1/dirty_manifold-for-wrapper,1/main,1", - "/dirty_manifold-for-wrapper," - + "1/dirty_manifold-for-wrapper,1/flow-pipe,1", - "/dirty_manifold-for-wrapper," - + "1/dirty_manifold-for-wrapper,1/outpipe3,1", - "/dirty_manifold-for-wrapper," - + "1/dirty_manifold-for-wrapper,1/object2,1", - "/dirty_manifold-for-wrapper," - + "1/dirty_manifold-for-wrapper,1/object1,1", + "/dirty_manifold-for-wrapper," + "1/dirty_manifold-for-wrapper,1/main,1", + "/dirty_manifold-for-wrapper," + "1/dirty_manifold-for-wrapper,1/flow-pipe,1", + "/dirty_manifold-for-wrapper," + "1/dirty_manifold-for-wrapper,1/outpipe3,1", + "/dirty_manifold-for-wrapper," + "1/dirty_manifold-for-wrapper,1/object2,1", + "/dirty_manifold-for-wrapper," + "1/dirty_manifold-for-wrapper,1/object1,1", ] ) -session.part_management.ObjectSetting[ - "DefaultObjectSetting" -].OneZonePer.setState("part") -session.workflow.TaskObject[ - "Import CAD and Part Management" -].Arguments.setState( +session.part_management.ObjectSetting["DefaultObjectSetting"].OneZonePer.setState( + "part" +) +session.workflow.TaskObject["Import CAD and Part Management"].Arguments.setState( { "Context": 0, "CreateObjectPer": "Custom", @@ -133,9 +126,7 @@ ############################################################################### # Cover any openings in your geometry. -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "CreatePatchPreferences": { "ShowCreatePatchPreferences": False, @@ -145,9 +136,7 @@ "ZoneSelectionList": ["inlet.1"], } ) -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "CreatePatchPreferences": { "ShowCreatePatchPreferences": False, @@ -169,25 +158,17 @@ ) session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].AddChildToTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].InsertCompoundChildTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState({}) +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].InsertCompoundChildTask() +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState({}) session.workflow.TaskObject["inlet-1"].Execute() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "inlet-2", "SelectionType": "zone", "ZoneSelectionList": ["inlet.2"], } ) -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "inlet-2", "SelectionType": "zone", @@ -206,25 +187,17 @@ ) session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].AddChildToTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].InsertCompoundChildTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState({}) +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].InsertCompoundChildTask() +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState({}) session.workflow.TaskObject["inlet-2"].Execute() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "inlet-3", "SelectionType": "zone", "ZoneSelectionList": ["inlet"], } ) -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "inlet-3", "SelectionType": "zone", @@ -243,16 +216,10 @@ ) session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].AddChildToTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].InsertCompoundChildTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState({}) +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].InsertCompoundChildTask() +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState({}) session.workflow.TaskObject["inlet-3"].Execute() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "outlet-1", "SelectionType": "zone", @@ -260,9 +227,7 @@ "ZoneType": "pressure-outlet", } ) -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState( +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState( { "PatchName": "outlet-1", "SelectionType": "zone", @@ -282,12 +247,8 @@ ) session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].AddChildToTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].InsertCompoundChildTask() -session.workflow.TaskObject[ - "Enclose Fluid Regions (Capping)" -].Arguments.setState({}) +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].InsertCompoundChildTask() +session.workflow.TaskObject["Enclose Fluid Regions (Capping)"].Arguments.setState({}) session.workflow.TaskObject["outlet-1"].Execute() ############################################################################### @@ -403,9 +364,7 @@ ) session.workflow.TaskObject["Define Leakage Threshold"].AddChildToTask() -session.workflow.TaskObject[ - "Define Leakage Threshold" -].InsertCompoundChildTask() +session.workflow.TaskObject["Define Leakage Threshold"].InsertCompoundChildTask() session.workflow.TaskObject["leakage-1"].Arguments.setState( { "AddChild": "yes", diff --git a/examples/00-fluent/mixing_elbow.py b/examples/00-fluent/mixing_elbow.py index a5ef0885206d..81ef44e1f5b3 100644 --- a/examples/00-fluent/mixing_elbow.py +++ b/examples/00-fluent/mixing_elbow.py @@ -46,9 +46,7 @@ set_config(blocking=True) -import_filename = examples.download_file( - "mixing_elbow.pmdb", "pyfluent/mixing_elbow" -) +import_filename = examples.download_file("mixing_elbow.pmdb", "pyfluent/mixing_elbow") session = pyfluent.launch_fluent( meshing_mode=True, precision="double", processor_count=2 @@ -104,9 +102,7 @@ session.workflow.TaskObject["Describe Geometry"].Arguments = dict( SetupType="The geometry consists of only fluid regions with no voids" ) -session.workflow.TaskObject["Describe Geometry"].UpdateChildTasks( - SetupTypeChanged=True -) +session.workflow.TaskObject["Describe Geometry"].UpdateChildTasks(SetupTypeChanged=True) session.workflow.TaskObject["Describe Geometry"].Execute() ############################################################################### diff --git a/examples/01-parametric/parametric_static_mixer_1.py b/examples/01-parametric/parametric_static_mixer_1.py index ad568bef1d9d..614b2d6fc43f 100755 --- a/examples/01-parametric/parametric_static_mixer_1.py +++ b/examples/01-parametric/parametric_static_mixer_1.py @@ -78,23 +78,15 @@ root.solution.report_definitions.surface[ "outlet-temp-avg" ].report_type = "surface-areaavg" -root.solution.report_definitions.surface[ - "outlet-temp-avg" -].field = "temperature" -root.solution.report_definitions.surface["outlet-temp-avg"].surface_names = [ - "outlet" -] +root.solution.report_definitions.surface["outlet-temp-avg"].field = "temperature" +root.solution.report_definitions.surface["outlet-temp-avg"].surface_names = ["outlet"] root.solution.report_definitions.surface["outlet-vel-avg"] = {} root.solution.report_definitions.surface[ "outlet-vel-avg" ].report_type = "surface-areaavg" -root.solution.report_definitions.surface[ - "outlet-vel-avg" -].field = "velocity-magnitude" -root.solution.report_definitions.surface["outlet-vel-avg"].surface_names = [ - "outlet" -] +root.solution.report_definitions.surface["outlet-vel-avg"].field = "velocity-magnitude" +root.solution.report_definitions.surface["outlet-vel-avg"].surface_names = ["outlet"] session.tui.solver.define.parameters.enable_in_TUI("yes") session.tui.solver.define.parameters.output_parameters.create( @@ -111,9 +103,7 @@ ########################################################################### # Write case with all the settings in place -case_path = str( - Path(pyfluent.EXAMPLES_PATH) / "Static_Mixer_Parameters.cas.h5" -) +case_path = str(Path(pyfluent.EXAMPLES_PATH) / "Static_Mixer_Parameters.cas.h5") session.tui.solver.file.write_case(case_path) ########################################################################### @@ -202,8 +192,6 @@ ######################################################################### # Save parametric project -project_filepath = str( - Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study.flprj" -) +project_filepath = str(Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study.flprj") session.tui.solver.file.parametric_project.save_as(project_filepath) diff --git a/examples/01-parametric/parametric_static_mixer_2.py b/examples/01-parametric/parametric_static_mixer_2.py index b129c0d41f47..d7393ceb9e7d 100755 --- a/examples/01-parametric/parametric_static_mixer_2.py +++ b/examples/01-parametric/parametric_static_mixer_2.py @@ -29,9 +29,7 @@ ######################################################################### # Read the previously saved project - static_mixer_study.flprj -project_filepath_read = str( - Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study.flprj" -) +project_filepath_read = str(Path(pyfluent.EXAMPLES_PATH) / "static_mixer_study.flprj") proj = ParametricProject( root.file.parametric_project, diff --git a/examples/01-parametric/parametric_static_mixer_3.py b/examples/01-parametric/parametric_static_mixer_3.py index 38789cb23cd8..75f99fda3eb2 100755 --- a/examples/01-parametric/parametric_static_mixer_3.py +++ b/examples/01-parametric/parametric_static_mixer_3.py @@ -24,9 +24,7 @@ # Launch parametric session using the hopper/mixer case File # This case file contains pre-created input and output parameters -case_path = str( - Path(pyfluent.EXAMPLES_PATH) / "Static_Mixer_Parameters.cas.h5" -) +case_path = str(Path(pyfluent.EXAMPLES_PATH) / "Static_Mixer_Parameters.cas.h5") session = ParametricSession(case_filepath=case_path) diff --git a/examples/02-postprocessing/post_processing_exhaust_manifold.py b/examples/02-postprocessing/post_processing_exhaust_manifold.py index 28a292fa0cf0..3828701b992f 100644 --- a/examples/02-postprocessing/post_processing_exhaust_manifold.py +++ b/examples/02-postprocessing/post_processing_exhaust_manifold.py @@ -109,9 +109,7 @@ ############################################################################### # Contour plot of temperature on the manifold -temperature_contour_manifold = graphics.Contours[ - "contour-temperature-manifold" -] +temperature_contour_manifold = graphics.Contours["contour-temperature-manifold"] temperature_contour_manifold.field = "temperature" temperature_contour_manifold.surfaces_list = [ "in1", diff --git a/src/ansys/fluent/core/__init__.py b/src/ansys/fluent/core/__init__.py index c87087763894..55e049e5cf26 100644 --- a/src/ansys/fluent/core/__init__.py +++ b/src/ansys/fluent/core/__init__.py @@ -13,9 +13,7 @@ """Global variable indicating the version of the PyFluent package - Empty by default""" _THIS_DIRNAME = os.path.dirname(__file__) -_README_FILE = os.path.normpath( - os.path.join(_THIS_DIRNAME, "docs", "README.rst") -) +_README_FILE = os.path.normpath(os.path.join(_THIS_DIRNAME, "docs", "README.rst")) if os.path.exists(_README_FILE): with open(_README_FILE, encoding="utf8") as f: diff --git a/src/ansys/fluent/core/examples/downloads.py b/src/ansys/fluent/core/examples/downloads.py index 977f2796001f..335cc6b0eb45 100644 --- a/src/ansys/fluent/core/examples/downloads.py +++ b/src/ansys/fluent/core/examples/downloads.py @@ -47,9 +47,7 @@ def _get_file_url(filename: str, directory: Optional[str] = None) -> str: def _retrieve_file(url: str, filename: str): # First check if file has already been downloaded - local_path = os.path.join( - pyfluent.EXAMPLES_PATH, os.path.basename(filename) - ) + local_path = os.path.join(pyfluent.EXAMPLES_PATH, os.path.basename(filename)) local_path_no_zip = local_path.replace(".zip", "") if os.path.isfile(local_path_no_zip) or os.path.isdir(local_path_no_zip): return local_path_no_zip, None diff --git a/src/ansys/fluent/core/launcher/launcher.py b/src/ansys/fluent/core/launcher/launcher.py index c586094ce804..e211830e562f 100644 --- a/src/ansys/fluent/core/launcher/launcher.py +++ b/src/ansys/fluent/core/launcher/launcher.py @@ -42,18 +42,14 @@ def _get_fluent_exe_path(): def _get_server_info_filepath(): server_info_dir = os.getenv("SERVER_INFO_DIR") dir_ = Path(server_info_dir) if server_info_dir else tempfile.gettempdir() - fd, filepath = tempfile.mkstemp( - suffix=".txt", prefix="serverinfo-", dir=str(dir_) - ) + fd, filepath = tempfile.mkstemp(suffix=".txt", prefix="serverinfo-", dir=str(dir_)) os.close(fd) return filepath def _get_subprocess_kwargs_for_fluent(env: Dict[str, Any]) -> Dict[str, Any]: kwargs: Dict[str, Any] = {} - kwargs.update( - stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) + kwargs.update(stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if platform.system() == "Windows": kwargs.update( creationflags=subprocess.CREATE_NEW_PROCESS_GROUP @@ -201,10 +197,7 @@ def launch_fluent( kwargs = _get_subprocess_kwargs_for_fluent(env) subprocess.Popen(launch_string, **kwargs) while True: - if ( - Path(server_info_filepath).stat().st_mtime - > sifile_last_mtime - ): + if Path(server_info_filepath).stat().st_mtime > sifile_last_mtime: time.sleep(1) LOG.info("Fluent process is successfully launched.") break diff --git a/src/ansys/fluent/core/meta.py b/src/ansys/fluent/core/meta.py index 19788c3a6183..d06c3164b21c 100644 --- a/src/ansys/fluent/core/meta.py +++ b/src/ansys/fluent/core/meta.py @@ -15,9 +15,7 @@ class _SurfaceAPI: def __init__(self, obj): self.obj = obj - self._surface_name_on_server = self.surface_name_in_server( - obj._name - ) + self._surface_name_on_server = self.surface_name_in_server(obj._name) @staticmethod def surface_name_in_server(local_surface_name): @@ -51,11 +49,7 @@ def create_surface_on_server(self): self._delete_if_exist_on_server() self._get_api_handle().plane_surface( self._surface_name_on_server, - "xy-plane" - if xy_plane - else "yz-plane" - if yz_plane - else "zx-plane", + "xy-plane" if xy_plane else "yz-plane" if yz_plane else "zx-plane", xy_plane.z() if xy_plane else yz_plane.x() @@ -162,9 +156,7 @@ def wrapper(self, value): ) if attr == "allowed_values": if isinstance(value, list): - if not all( - v in self.allowed_values for v in value - ): + if not all(v in self.allowed_values for v in value): raise ValueError( f"Not all values in {value}, are in the " "list of allowed values " @@ -198,9 +190,7 @@ def wrapper(self, parent): ) if reset_on_change: for obj in reset_on_change: - obj._register_on_change_cb( - lambda: setattr(self, "_value", None) - ) + obj._register_on_change_cb(lambda: setattr(self, "_value", None)) return wrapper @@ -337,9 +327,7 @@ def update_state(clss): if cls.__class__.__name__ == "PyLocalPropertyMeta": state[name] = o() - attrs = show_attributes and getattr( - o, "attributes", None - ) + attrs = show_attributes and getattr(o, "attributes", None) if attrs: for attr in attrs: state[name + "." + attr] = getattr(o, attr) @@ -358,10 +346,7 @@ def update_state(clss): def __create_setattr(cls): def wrapper(self, name, value): attr = getattr(self, name, None) - if ( - attr - and attr.__class__.__class__.__name__ == "PyLocalPropertyMeta" - ): + if attr and attr.__class__.__class__.__name__ == "PyLocalPropertyMeta": attr.set_state(value) else: object.__setattr__(self, name, value) @@ -421,9 +406,7 @@ def update(clss): def __new__(cls, name, bases, attrs): attrs["__init__"] = cls.__create_init() - return super(PyLocalNamedObjectMeta, cls).__new__( - cls, name, bases, attrs - ) + return super(PyLocalNamedObjectMeta, cls).__new__(cls, name, bases, attrs) class PyLocalNamedObjectMetaAbstract(ABCMeta, PyLocalNamedObjectMeta): diff --git a/src/ansys/fluent/core/services/datamodel_se.py b/src/ansys/fluent/core/services/datamodel_se.py index fdf4f3b9ae80..733826ad308e 100644 --- a/src/ansys/fluent/core/services/datamodel_se.py +++ b/src/ansys/fluent/core/services/datamodel_se.py @@ -57,9 +57,7 @@ class DatamodelService: def __init__(self, channel: grpc.Channel, metadata: List[Tuple[str, str]]): tracing_interceptor = TracingInterceptor() - intercept_channel = grpc.intercept_channel( - channel, tracing_interceptor - ) + intercept_channel = grpc.intercept_channel(channel, tracing_interceptor) self.__stub = DataModelGrpcModule.DataModelStub(intercept_channel) self.__metadata = metadata @@ -230,9 +228,7 @@ class PyMenu: docstring = None - def __init__( - self, service: DatamodelService, rules: str, path: Path = None - ): + def __init__(self, service: DatamodelService, rules: str, path: Path = None): self.service = service self.rules = rules if path is None: @@ -359,9 +355,7 @@ class PyNamedObjectContainer: Deletes the child object by name """ - def __init__( - self, service: DatamodelService, rules: str, path: Path = None - ): + def __init__(self, service: DatamodelService, rules: str, path: Path = None): self.service = service self.rules = rules if path is None: @@ -382,9 +376,7 @@ def __get_child_object_names(self): struct_field = getattr(response.member, struct_type) for member in struct_field.members: if member.startswith(child_type_suffix): - child_object_names.append( - member[len(child_type_suffix) :] - ) + child_object_names.append(member[len(child_type_suffix) :]) return child_object_names def __get_child_object_display_names(self): @@ -432,8 +424,7 @@ def __get_item(self, key: str): ) else: raise LookupError( - f"{key} is not found at path " - f"{_convert_path_to_se_path(self.path)}" + f"{key} is not found at path " f"{_convert_path_to_se_path(self.path)}" ) def __del_item(self, key: str): @@ -446,8 +437,7 @@ def __del_item(self, key: str): self.service.delete_object(request) else: raise LookupError( - f"{key} is not found at path " - f"{_convert_path_to_se_path(self.path)}" + f"{key} is not found at path " f"{_convert_path_to_se_path(self.path)}" ) def __getitem__(self, key: str) -> PyMenu: @@ -478,9 +468,7 @@ def __setitem__(self, key: str, value: Any): if isinstance(value, dict) and not value: value["_name_"] = key parent_state = {f"{self.__class__.__name__}:{key}": value} - PyMenu(self.service, self.rules, self.path[:-1]).set_state( - parent_state - ) + PyMenu(self.service, self.rules, self.path[:-1]).set_state(parent_state) def __delitem__(self, key: str): """Deletes the child object by name. diff --git a/src/ansys/fluent/core/services/datamodel_tui.py b/src/ansys/fluent/core/services/datamodel_tui.py index 2366db912cd0..6dca052c9724 100644 --- a/src/ansys/fluent/core/services/datamodel_tui.py +++ b/src/ansys/fluent/core/services/datamodel_tui.py @@ -22,9 +22,7 @@ class DatamodelService: def __init__(self, channel: grpc.Channel, metadata: List[Tuple[str, str]]): tracing_interceptor = TracingInterceptor() - intercept_channel = grpc.intercept_channel( - channel, tracing_interceptor - ) + intercept_channel = grpc.intercept_channel(channel, tracing_interceptor) self.__stub = DataModelGrpcModule.DataModelStub(intercept_channel) self.__metadata = metadata @@ -118,9 +116,7 @@ class PyMenu: def __init__(self, service: DatamodelService, path: Union[Path, str]): self._service = service - self._path = ( - path if isinstance(path, str) else convert_path_to_grpc_path(path) - ) + self._path = path if isinstance(path, str) else convert_path_to_grpc_path(path) def get_child_names(self, include_unavailable: bool = False) -> List[str]: """Get child menu names. diff --git a/src/ansys/fluent/core/services/events.py b/src/ansys/fluent/core/services/events.py index cf607546b24c..81227b621879 100644 --- a/src/ansys/fluent/core/services/events.py +++ b/src/ansys/fluent/core/services/events.py @@ -21,9 +21,7 @@ def begin_streaming(self): Event """ request = EventsProtoModule.BeginStreamingRequest() - self.__streams = self.__stub.BeginStreaming( - request, metadata=self.__metadata - ) + self.__streams = self.__stub.BeginStreaming(request, metadata=self.__metadata) while True: try: diff --git a/src/ansys/fluent/core/services/field_data.py b/src/ansys/fluent/core/services/field_data.py index 4e11530ebb18..af1308294fc9 100644 --- a/src/ansys/fluent/core/services/field_data.py +++ b/src/ansys/fluent/core/services/field_data.py @@ -15,9 +15,7 @@ class FieldDataService: def __init__(self, channel: grpc.Channel, metadata): tracing_interceptor = TracingInterceptor() - intercept_channel = grpc.intercept_channel( - channel, tracing_interceptor - ) + intercept_channel = grpc.intercept_channel(channel, tracing_interceptor) self.__stub = FieldGrpcModule.FieldDataStub(intercept_channel) self.__metadata = metadata @@ -31,9 +29,7 @@ def get_fields_info(self, request): @catch_grpc_error def get_vector_fields_info(self, request): - return self.__stub.GetVectorFieldsInfo( - request, metadata=self.__metadata - ) + return self.__stub.GetVectorFieldsInfo(request, metadata=self.__metadata) @catch_grpc_error def get_surfaces_info(self, request): @@ -220,8 +216,7 @@ def _extract_field(field_datatype, field_size, chunk_iterator): surface_id = payload_info.surfaceId payload_tag_id = reduce( lambda x, y: x | y, - [self._payloadTags[tag] for tag in payload_info.payloadTag] - or [0], + [self._payloadTags[tag] for tag in payload_info.payloadTag] or [0], ) payload_data = fields_data.get(payload_tag_id) if not payload_data: diff --git a/src/ansys/fluent/core/services/settings.py b/src/ansys/fluent/core/services/settings.py index 142e8bdaa372..7e669c169e88 100644 --- a/src/ansys/fluent/core/services/settings.py +++ b/src/ansys/fluent/core/services/settings.py @@ -12,9 +12,7 @@ class _SettingsServiceImpl: def __init__(self, channel: grpc.Channel, metadata): tracing_interceptor = TracingInterceptor() - intercept_channel = grpc.intercept_channel( - channel, tracing_interceptor - ) + intercept_channel = grpc.intercept_channel(channel, tracing_interceptor) self.__stub = SettingsGrpcModule.SettingsStub(intercept_channel) self.__metadata = metadata @@ -52,9 +50,7 @@ def resize_list_object(self, request): @catch_grpc_error def get_obj_static_info(self, request): - return self.__stub.GetObjectStaticInfo( - request, metadata=self.__metadata - ) + return self.__stub.GetObjectStaticInfo(request, metadata=self.__metadata) @catch_grpc_error def get_static_info(self, request): @@ -133,13 +129,10 @@ def _get_state_from_value(self, state): elif t == "string": return state.string elif t == "value_list": - return [ - self._get_state_from_value(v) for v in state.value_list.lst - ] + return [self._get_state_from_value(v) for v in state.value_list.lst] elif t == "value_map": return { - k: self._get_state_from_value(v) - for k, v in state.value_map.m.items() + k: self._get_state_from_value(v) for k, v in state.value_map.m.items() } else: return None @@ -147,27 +140,21 @@ def _get_state_from_value(self, state): @_trace def set_var(self, path: str, value: Any): """Set the value for the given path.""" - request = _get_request_instance_for_path( - SettingsModule.SetVarRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.SetVarRequest, path) self._set_state_from_value(request.value, value) self.__service_impl.set_var(request) @_trace def get_var(self, path: str) -> Any: """Get the value for the given path.""" - request = _get_request_instance_for_path( - SettingsModule.GetVarRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.GetVarRequest, path) response = self.__service_impl.get_var(request) return self._get_state_from_value(response.value) @_trace def rename(self, path: str, new: str, old: str): """Rename the object at the given path.""" - request = _get_request_instance_for_path( - SettingsModule.RenameRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.RenameRequest, path) request.old_name = old request.new_name = new @@ -176,9 +163,7 @@ def rename(self, path: str, new: str, old: str): @_trace def create(self, path: str, name: str): """Create a new named object child for the given path.""" - request = _get_request_instance_for_path( - SettingsModule.CreateRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.CreateRequest, path) request.name = name self.__service_impl.create(request) @@ -186,9 +171,7 @@ def create(self, path: str, name: str): @_trace def delete(self, path: str, name: str): """Delete the object with the given name at the give path.""" - request = _get_request_instance_for_path( - SettingsModule.DeleteRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.DeleteRequest, path) request.name = name self.__service_impl.delete(request) @@ -317,8 +300,7 @@ def _parse_attrs(self, response): } if response.list_object_children: ret["list_object_children"] = [ - self._parse_attrs(child) - for child in response.list_object_children + self._parse_attrs(child) for child in response.list_object_children ] if response.arguments: ret["arguments"] = { @@ -330,9 +312,7 @@ def _parse_attrs(self, response): @_trace def get_attrs(self, path: str, attrs: List[str], recursive=False) -> Any: """Return values of given attributes.""" - request = _get_request_instance_for_path( - SettingsModule.GetAttrsRequest, path - ) + request = _get_request_instance_for_path(SettingsModule.GetAttrsRequest, path) request.attrs[:] = attrs request.recursive = recursive diff --git a/src/ansys/fluent/core/services/transcript.py b/src/ansys/fluent/core/services/transcript.py index 932660332ef2..726c79b16c6e 100644 --- a/src/ansys/fluent/core/services/transcript.py +++ b/src/ansys/fluent/core/services/transcript.py @@ -33,9 +33,7 @@ def begin_streaming( A transcript line """ request = TranscriptModule.TranscriptRequest() - self.__streams = self.__stub.BeginStreaming( - request, metadata=self.__metadata - ) + self.__streams = self.__stub.BeginStreaming(request, metadata=self.__metadata) while True: try: diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py index 62e6c2f682cf..6f2b7ab94a03 100644 --- a/src/ansys/fluent/core/session.py +++ b/src/ansys/fluent/core/session.py @@ -26,25 +26,16 @@ from ansys.fluent.core.datamodel.PMFileManagement import ( Root as PMFileManagement_root, ) - from ansys.fluent.core.datamodel.PartManagement import ( - Root as PartManagement_root, - ) + from ansys.fluent.core.datamodel.PartManagement import Root as PartManagement_root from ansys.fluent.core.datamodel.meshing import Root as meshing_root from ansys.fluent.core.datamodel.workflow import Root as workflow_root except ImportError: pass from ansys.fluent.core.services.events import EventsService -from ansys.fluent.core.services.field_data import ( - FieldData, - FieldDataService, - FieldInfo, -) +from ansys.fluent.core.services.field_data import FieldData, FieldDataService, FieldInfo from ansys.fluent.core.services.health_check import HealthCheckService -from ansys.fluent.core.services.scheme_eval import ( - SchemeEval, - SchemeEvalService, -) +from ansys.fluent.core.services.scheme_eval import SchemeEval, SchemeEvalService from ansys.fluent.core.services.settings import SettingsService from ansys.fluent.core.services.transcript import TranscriptService from ansys.fluent.core.solver.events_manager import EventsManager @@ -187,9 +178,7 @@ def __init__( Session._monitor_thread = MonitorThread() Session._monitor_thread.start() - self._transcript_service = TranscriptService( - self._channel, self._metadata - ) + self._transcript_service = TranscriptService(self._channel, self._metadata) self._transcript_thread: Optional[threading.Thread] = None self._events_service = EventsService(self._channel, self._metadata) @@ -199,24 +188,16 @@ def __init__( self._channel, self._metadata ) - self._field_data_service = FieldDataService( - self._channel, self._metadata - ) + self._field_data_service = FieldDataService(self._channel, self._metadata) self.field_info = FieldInfo(self._field_data_service) self.field_data = FieldData(self._field_data_service) self.tui = Session.Tui(self._datamodel_service_tui) - self._datamodel_service_se = DatamodelService_SE( - self._channel, self._metadata - ) + self._datamodel_service_se = DatamodelService_SE(self._channel, self._metadata) if "meshing_root" in globals(): - self.meshing = meshing_root( - self._datamodel_service_se, "meshing", [] - ) + self.meshing = meshing_root(self._datamodel_service_se, "meshing", []) if "workflow_root" in globals(): - self.workflow = workflow_root( - self._datamodel_service_se, "workflow", [] - ) + self.workflow = workflow_root(self._datamodel_service_se, "workflow", []) if "PartManagement_root" in globals(): self.part_management = PartManagement_root( self._datamodel_service_se, "PartManagement", [] @@ -228,13 +209,9 @@ def __init__( ) self.PMFileManagement = self.pm_file_management - self._health_check_service = HealthCheckService( - self._channel, self._metadata - ) + self._health_check_service = HealthCheckService(self._channel, self._metadata) - self._scheme_eval_service = SchemeEvalService( - self._channel, self._metadata - ) + self._scheme_eval_service = SchemeEvalService(self._channel, self._metadata) self.scheme_eval = SchemeEval(self._scheme_eval_service) self._cleanup_on_exit = cleanup_on_exit @@ -282,9 +259,7 @@ def get_settings_root(self) -> root: """Return root settings object.""" if self._settings_root is None: LOG.warning("The settings API is currently experimental.") - self._settings_root = settings_get_root( - flproxy=self.get_settings_service() - ) + self._settings_root = settings_get_root(flproxy=self.get_settings_service()) return self._settings_root def _process_transcript(self): diff --git a/src/ansys/fluent/core/solver/events_manager.py b/src/ansys/fluent/core/solver/events_manager.py index c30b0659e25b..aab29ecf3dba 100644 --- a/src/ansys/fluent/core/solver/events_manager.py +++ b/src/ansys/fluent/core/solver/events_manager.py @@ -46,9 +46,7 @@ def __listen_events(self): response = next(responses) event_name = response.WhichOneof("as") with self.__lock: - callbacks_map = self.__events_to_callbacks_map.get( - event_name, {} - ) + callbacks_map = self.__events_to_callbacks_map.get(event_name, {}) for call_back in callbacks_map.values(): call_back( session_id=self.__session_id, diff --git a/src/ansys/fluent/core/solver/flobject.py b/src/ansys/fluent/core/solver/flobject.py index 246d10fd0f74..667b2b09c4c7 100644 --- a/src/ansys/fluent/core/solver/flobject.py +++ b/src/ansys/fluent/core/solver/flobject.py @@ -47,9 +47,7 @@ ListStateType = List["StateType"] StateType = Union[PrimitiveStateType, DictStateType, ListStateType] -_ttable = str.maketrans( - string.punctuation, "_" * len(string.punctuation), "?'" -) +_ttable = str.maketrans(string.punctuation, "_" * len(string.punctuation), "?'") def to_python_name(fluent_name: str) -> str: @@ -209,17 +207,13 @@ def _print_state_helper(state, out=sys.stdout, indent=0, indent_factor=2): out.write("\n") for index, value in enumerate(state): out.write(f'{indent*indent_factor*" "}{index} : ') - SettingsBase._print_state_helper( - value, out, indent + 1, indent_factor - ) + SettingsBase._print_state_helper(value, out, indent + 1, indent_factor) else: out.write(f"{state}\n") def print_state(self, out=sys.stdout, indent_factor=2): """Print the state of this object.""" - self._print_state_helper( - self.get_state(), out, indent_factor=indent_factor - ) + self._print_state_helper(self.get_state(), out, indent_factor=indent_factor) class Integer(SettingsBase[int]): @@ -634,9 +628,7 @@ def __call__(self, **kwds): newkwds[ccls.fluent_name] = ccls.to_scheme_keys(v) else: raise RuntimeError("Argument '" + str(k) + "' is invalid") - return self.flproxy.execute_cmd( - self._parent.path, self.obj_name, **newkwds - ) + return self.flproxy.execute_cmd(self._parent.path, self.obj_name, **newkwds) _baseTypes = { @@ -757,9 +749,7 @@ def get_cls(name, info, parent=None): cls.__doc__ = doc object_type = info.get("object-type") if object_type: - cls.child_object_type = get_cls( - "child-object-type", object_type, cls - ) + cls.child_object_type = get_cls("child-object-type", object_type, cls) except Exception: print( f"Unable to construct class for '{name}' of " diff --git a/src/ansys/fluent/core/utils/dump_session_data.py b/src/ansys/fluent/core/utils/dump_session_data.py index 23151331dba6..a70cff478639 100644 --- a/src/ansys/fluent/core/utils/dump_session_data.py +++ b/src/ansys/fluent/core/utils/dump_session_data.py @@ -2,9 +2,7 @@ import pickle -def dump_session_data( - session, file_path: str, fields: list = [], surfaces: list = [] -): +def dump_session_data(session, file_path: str, fields: list = [], surfaces: list = []): """Dump session data. Parameters @@ -29,17 +27,12 @@ def dump_session_data( for k, v in session.field_info.get_surfaces_info().items() if (not surfaces or k in surfaces) } - session_data[ - "vector_fields_info" - ] = session.field_info.get_vector_fields_info() + session_data["vector_fields_info"] = session.field_info.get_vector_fields_info() if not fields: fields = [ - v["solver_name"] - for k, v in session_data["scalar_fields_info"].items() + v["solver_name"] for k, v in session_data["scalar_fields_info"].items() ] - surfaces_id = [ - v["surface_id"][0] for k, v in session_data["surfaces_info"].items() - ] + surfaces_id = [v["surface_id"][0] for k, v in session_data["surfaces_info"].items()] session_data["range"] = {} for field in fields: session_data["range"][field] = {} diff --git a/src/ansys/fluent/core/utils/generic.py b/src/ansys/fluent/core/utils/generic.py index 8f92b5786fd5..6fc169a52274 100644 --- a/src/ansys/fluent/core/utils/generic.py +++ b/src/ansys/fluent/core/utils/generic.py @@ -15,9 +15,7 @@ class SingletonMeta(type): def __call__(cls, *args, **kwargs): if not cls._single_instance: - cls._single_instance = super(SingletonMeta, cls).__call__( - *args, **kwargs - ) + cls._single_instance = super(SingletonMeta, cls).__call__(*args, **kwargs) return cls._single_instance diff --git a/src/ansys/fluent/parametric/__init__.py b/src/ansys/fluent/parametric/__init__.py index d70b61e80929..56db514adc0b 100644 --- a/src/ansys/fluent/parametric/__init__.py +++ b/src/ansys/fluent/parametric/__init__.py @@ -225,14 +225,10 @@ def initialize(self) -> "ParametricStudy": project_filename=self.project_filepath.stem ) new_study_names = self._parametric_studies.get_object_names() - self.name = ( - set(new_study_names).difference(set(old_study_names)).pop() - ) + self.name = set(new_study_names).difference(set(old_study_names)).pop() base_design_point = DesignPoint( BASE_DP_NAME, - self._parametric_studies[self.name].design_points[ - BASE_DP_NAME - ], + self._parametric_studies[self.name].design_points[BASE_DP_NAME], ) self.design_points = {BASE_DP_NAME: base_design_point} ParametricStudy.current_study_name = self.name @@ -251,9 +247,7 @@ def rename(self, new_name: str) -> None: self._parametric_studies.rename(new_name, self.name) self.name = new_name self.design_points = { - k: DesignPoint( - k, self._parametric_studies[self.name].design_points[k] - ) + k: DesignPoint(k, self._parametric_studies[self.name].design_points[k]) for k, _ in self.design_points.items() } @@ -282,29 +276,21 @@ def duplicate(self, copy_design_points: bool = True) -> "ParametricStudy": New parametric study instance. """ old_study_names = self._parametric_studies.get_object_names() - self._parametric_studies.duplicate( - copy_design_points=copy_design_points - ) + self._parametric_studies.duplicate(copy_design_points=copy_design_points) new_study_names = self._parametric_studies.get_object_names() - clone_name = ( - set(new_study_names).difference(set(old_study_names)).pop() - ) + clone_name = set(new_study_names).difference(set(old_study_names)).pop() current_study = ParametricStudy.get_all_studies()[ ParametricStudy.current_study_name ] if copy_design_points: clone_design_points = { - k: DesignPoint( - k, self._parametric_studies[clone_name].design_points[k] - ) + k: DesignPoint(k, self._parametric_studies[clone_name].design_points[k]) for k, _ in current_study.design_points.items() } else: base_design_point = DesignPoint( BASE_DP_NAME, - self._parametric_studies[clone_name].design_points[ - BASE_DP_NAME - ], + self._parametric_studies[clone_name].design_points[BASE_DP_NAME], ) clone_design_points = {BASE_DP_NAME: base_design_point} clone = ParametricStudy( @@ -479,9 +465,7 @@ def update_all_design_points(self) -> None: dp_settings = self._parametric_studies[self.name].design_points dp_settings.update_all() - def update_selected_design_points( - self, design_points: List[DesignPoint] - ) -> None: + def update_selected_design_points(self, design_points: List[DesignPoint]) -> None: """Update a list of design points. Parameters @@ -490,9 +474,7 @@ def update_selected_design_points( List of design points to update. """ dp_settings = self._parametric_studies[self.name].design_points - dp_settings.update_selected( - design_points=[dp.name for dp in design_points] - ) + dp_settings.update_selected(design_points=[dp.name for dp in design_points]) class ParametricProject: @@ -569,9 +551,7 @@ def save_as(self, project_filepath: str) -> None: """ self._parametric_project.save_as(project_filename=project_filepath) - def export( - self, project_filepath: str, convert_to_managed: bool = False - ) -> None: + def export(self, project_filepath: str, convert_to_managed: bool = False) -> None: """Save project as a copy. Parameters @@ -595,9 +575,7 @@ def archive(self, archive_path: str = None) -> None: Archive name. """ if not archive_path: - archive_path = str( - Path(self.project_filepath).with_suffix(".flprz") - ) + archive_path = str(Path(self.project_filepath).with_suffix(".flprz")) self._parametric_project.archive(archive_name=archive_path) @@ -668,8 +646,7 @@ def __init__( self._session = launcher() self.scheme_eval = self._session.scheme_eval.scheme_eval self.scheme_eval( - "(set parametric-study-dependents-manager " - "save-project-at-exit? #f)" + "(set parametric-study-dependents-manager " "save-project-at-exit? #f)" ) if start_transcript: self.start_transcript() @@ -699,9 +676,7 @@ def __init__( dp_name, dps_settings[dp_name] ) self.studies[study_name] = study - ParametricStudy.current_study_name = ( - self._root.current_parametric_study() - ) + ParametricStudy.current_study_name = self._root.current_parametric_study() def new_study(self) -> ParametricStudy: """Create new study. diff --git a/src/ansys/fluent/post/__init__.py b/src/ansys/fluent/post/__init__.py index be556be5dc73..21d6412ecef0 100644 --- a/src/ansys/fluent/post/__init__.py +++ b/src/ansys/fluent/post/__init__.py @@ -44,10 +44,7 @@ def _update_vtk_version(): import_errors = [] if missing_libraries: import_errors.append( - ( - f"Required libraries {missing_libraries} " - "are missing to use this feature." - ) + (f"Required libraries {missing_libraries} " "are missing to use this feature.") ) for lib in missing_libraries: import_errors.append( @@ -63,9 +60,9 @@ def _update_vtk_version(): for lib in installed_libraries: required_version = required_libraries[lib] installed_version = pkg_resources.get_distribution(lib).version - if pkg_resources.parse_version( - installed_version - ) < pkg_resources.parse_version(required_version): + if pkg_resources.parse_version(installed_version) < pkg_resources.parse_version( + required_version + ): if not versions_mismatched_message: import_errors.append( ( diff --git a/src/ansys/fluent/post/matplotlib/matplot_objects.py b/src/ansys/fluent/post/matplotlib/matplot_objects.py index 4150ea8bbb54..17e2df843fd2 100644 --- a/src/ansys/fluent/post/matplotlib/matplot_objects.py +++ b/src/ansys/fluent/post/matplotlib/matplot_objects.py @@ -4,9 +4,7 @@ from typing import Optional from ansys.fluent.core.meta import PyLocalContainer -from ansys.fluent.post.matplotlib.matplot_windows_manager import ( - matplot_windows_manager, -) +from ansys.fluent.post.matplotlib.matplot_windows_manager import matplot_windows_manager from ansys.fluent.post.post_object_defns import XYPlotDefn @@ -33,8 +31,8 @@ def __init__(self, session, local_surfaces_provider=None): self._init_module(self, sys.modules[__name__]) else: self.__dict__ = session_state - self._local_surfaces_provider = ( - lambda: local_surfaces_provider or getattr(self, "Surfaces", []) + self._local_surfaces_provider = lambda: local_surfaces_provider or getattr( + self, "Surfaces", [] ) def _init_module(self, obj, mod): diff --git a/src/ansys/fluent/post/matplotlib/matplot_windows_manager.py b/src/ansys/fluent/post/matplotlib/matplot_windows_manager.py index 560d89e55d25..024830645768 100644 --- a/src/ansys/fluent/post/matplotlib/matplot_windows_manager.py +++ b/src/ansys/fluent/post/matplotlib/matplot_windows_manager.py @@ -11,10 +11,7 @@ from ansys.fluent.post import get_config from ansys.fluent.post.matplotlib.plotter_defns import Plotter, ProcessPlotter from ansys.fluent.post.post_object_defns import GraphicsDefn, PlotDefn -from ansys.fluent.post.post_windows_manager import ( - PostWindow, - PostWindowsManager, -) +from ansys.fluent.post.post_windows_manager import PostWindow, PostWindowsManager class _ProcessPlotterHandle: @@ -81,9 +78,7 @@ def __init__(self, id: str, post_object: Union[GraphicsDefn, PlotDefn]): self.post_object: Union[GraphicsDefn, PlotDefn] = post_object self.id: str = id self.properties: dict = None - self.plotter: Union[ - _ProcessPlotterHandle, Plotter - ] = self._get_plotter() + self.plotter: Union[_ProcessPlotterHandle, Plotter] = self._get_plotter() self.animate: bool = False self.close: bool = False self.refresh: bool = False @@ -195,9 +190,7 @@ def _get_xy_plot_data(self): return xy_plots_data -class MatplotWindowsManager( - PostWindowsManager, metaclass=AbstractSingletonMeta -): +class MatplotWindowsManager(PostWindowsManager, metaclass=AbstractSingletonMeta): """Class for matplot windows manager.""" def __init__(self): @@ -375,18 +368,13 @@ def close_windows( # private methods - def _open_window( - self, window_id: str - ) -> Union[Plotter, _ProcessPlotterHandle]: + def _open_window(self, window_id: str) -> Union[Plotter, _ProcessPlotterHandle]: window = self._post_windows.get(window_id) plotter = None if ( window and not window.plotter.is_closed() - and ( - not (in_notebook() or get_config()["blocking"]) - or window.refresh - ) + and (not (in_notebook() or get_config()["blocking"]) or window.refresh) ): window.refresh = False else: diff --git a/src/ansys/fluent/post/matplotlib/plotter_defns.py b/src/ansys/fluent/post/matplotlib/plotter_defns.py index 3bab3c5d8d81..2aabe6cae8d3 100644 --- a/src/ansys/fluent/post/matplotlib/plotter_defns.py +++ b/src/ansys/fluent/post/matplotlib/plotter_defns.py @@ -74,18 +74,10 @@ def plot(self, data: dict) -> None: max_x_value = np.amax(data[curve]["xvalues"]) self._data[curve]["xvalues"] += data[curve]["xvalues"].tolist() self._data[curve]["yvalues"] += data[curve]["yvalues"].tolist() - self._min_y = ( - min(self._min_y, min_y_value) if self._min_y else min_y_value - ) - self._max_y = ( - max(self._max_y, max_y_value) if self._max_y else max_y_value - ) - self._min_x = ( - min(self._min_x, min_x_value) if self._min_x else min_x_value - ) - self._max_x = ( - max(self._max_x, max_x_value) if self._max_x else max_x_value - ) + self._min_y = min(self._min_y, min_y_value) if self._min_y else min_y_value + self._max_y = max(self._max_y, max_y_value) if self._max_y else max_y_value + self._min_x = min(self._min_x, min_x_value) if self._min_x else min_x_value + self._max_x = max(self._max_x, max_x_value) if self._max_x else max_x_value curve_lines = self.ax.lines for curve, curve_line in zip(self._curves, curve_lines): @@ -95,9 +87,7 @@ def plot(self, data: dict) -> None: x_range = max_x_value - min_x_value y_range = max_y_value - min_y_value self.ax.set_xlim(self._min_x, self._max_x) - self.ax.set_ylim( - self._min_y - y_range * 0.2, self._max_y + y_range * 0.2 - ) + self.ax.set_ylim(self._min_y - y_range * 0.2, self._max_y + y_range * 0.2) if not self._visible: self._visible = True plt.show() diff --git a/src/ansys/fluent/post/post_object_defns.py b/src/ansys/fluent/post/post_object_defns.py index 1d0a71e92974..b6c49e773ebd 100644 --- a/src/ansys/fluent/post/post_object_defns.py +++ b/src/ansys/fluent/post/post_object_defns.py @@ -14,9 +14,7 @@ class BasePostObjectDefn: """Base class for post objects.""" def _pre_display(self): - local_surfaces_provider = ( - self._get_top_most_parent()._local_surfaces_provider() - ) + local_surfaces_provider = self._get_top_most_parent()._local_surfaces_provider() for surf_name in self.surfaces_list(): if surf_name in list(local_surfaces_provider): surf_obj = local_surfaces_provider[surf_name] @@ -24,9 +22,7 @@ def _pre_display(self): surf_api.create_surface_on_server() def _post_display(self): - local_surfaces_provider = ( - self._get_top_most_parent()._local_surfaces_provider() - ) + local_surfaces_provider = self._get_top_most_parent()._local_surfaces_provider() for surf_name in self.surfaces_list(): if surf_name in list(local_surfaces_provider): surf_obj = local_surfaces_provider[surf_name] @@ -34,9 +30,7 @@ def _post_display(self): surf_api.delete_surface_on_server() -class GraphicsDefn( - BasePostObjectDefn, metaclass=PyLocalNamedObjectMetaAbstract -): +class GraphicsDefn(BasePostObjectDefn, metaclass=PyLocalNamedObjectMetaAbstract): """Abstract base class for graphics objects.""" @abstractmethod @@ -104,9 +98,7 @@ def allowed_values(self): """Y axis function allowed values.""" return [ v["solver_name"] - for k, v in self._data_extractor.field_info() - .get_fields_info() - .items() + for k, v in self._data_extractor.field_info().get_fields_info().items() ] class x_axis_function(metaclass=PyLocalPropertyMeta): @@ -304,9 +296,7 @@ def range(self): """Iso value range.""" field = self._parent.field() if field: - return self._data_extractor.field_info().get_range( - field, True - ) + return self._data_extractor.field_info().get_range(field, True) class ContourDefn(GraphicsDefn): @@ -323,10 +313,7 @@ class field(metaclass=PyLocalPropertyMeta): def allowed_values(self): """Field allowed values.""" field_info = self._data_extractor.field_info() - return [ - v["solver_name"] - for k, v in field_info.get_fields_info().items() - ] + return [v["solver_name"] for k, v in field_info.get_fields_info().items()] class surfaces_list(metaclass=PyLocalPropertyMeta): """Contour surfaces.""" @@ -354,12 +341,8 @@ class node_values(metaclass=PyLocalPropertyMeta): def value(self): """Node value property setter.""" filled = self._get_parent_by_type(ContourDefn).filled() - auto_range_off = self._get_parent_by_type( - ContourDefn - ).range.auto_range_off - if not filled or ( - auto_range_off and auto_range_off.clip_to_range() - ): + auto_range_off = self._get_parent_by_type(ContourDefn).range.auto_range_off + if not filled or (auto_range_off and auto_range_off.clip_to_range()): self._value = True return self._value @@ -438,9 +421,7 @@ def value(self): field_info = self._data_extractor.field_info() field_range = field_info.get_range( field, - self._get_parent_by_type( - ContourDefn - ).node_values(), + self._get_parent_by_type(ContourDefn).node_values(), ) self._value = field_range[0] return self._value @@ -469,9 +450,7 @@ def value(self): field_info = self._data_extractor.field_info() field_range = field_info.get_range( field, - self._get_parent_by_type( - ContourDefn - ).node_values(), + self._get_parent_by_type(ContourDefn).node_values(), ) self._value = field_range[1] @@ -496,9 +475,7 @@ class vectors_of(metaclass=PyLocalPropertyMeta): def allowed_values(self): """Vectors of allowed values.""" return list( - self._data_extractor.field_info() - .get_vector_fields_info() - .keys() + self._data_extractor.field_info().get_vector_fields_info().keys() ) class surfaces_list(metaclass=PyLocalPropertyMeta): diff --git a/src/ansys/fluent/post/pyvista/pyvista_objects.py b/src/ansys/fluent/post/pyvista/pyvista_objects.py index d86f888ba1af..948e68c6db02 100644 --- a/src/ansys/fluent/post/pyvista/pyvista_objects.py +++ b/src/ansys/fluent/post/pyvista/pyvista_objects.py @@ -11,9 +11,7 @@ SurfaceDefn, VectorDefn, ) -from ansys.fluent.post.pyvista.pyvista_windows_manager import ( - pyvista_windows_manager, -) +from ansys.fluent.post.pyvista.pyvista_windows_manager import pyvista_windows_manager class Graphics: @@ -31,20 +29,16 @@ def __init__(self, session, local_surfaces_provider=None): local_surfaces_provider : object, optional Object providing local surfaces. """ - session_state = Graphics._sessions_state.get( - session.id if session else 1 - ) + session_state = Graphics._sessions_state.get(session.id if session else 1) if not session_state: session_state = self.__dict__ - Graphics._sessions_state[ - session.id if session else 1 - ] = session_state + Graphics._sessions_state[session.id if session else 1] = session_state self.session = session self._init_module(self, sys.modules[__name__]) else: self.__dict__ = session_state - self._local_surfaces_provider = ( - lambda: local_surfaces_provider or getattr(self, "Surfaces", []) + self._local_surfaces_provider = lambda: local_surfaces_provider or getattr( + self, "Surfaces", [] ) def _init_module(self, obj, mod): diff --git a/src/ansys/fluent/post/pyvista/pyvista_windows_manager.py b/src/ansys/fluent/post/pyvista/pyvista_windows_manager.py index 1b5b76263a20..b477714132fa 100644 --- a/src/ansys/fluent/post/pyvista/pyvista_windows_manager.py +++ b/src/ansys/fluent/post/pyvista/pyvista_windows_manager.py @@ -12,10 +12,7 @@ from ansys.fluent.core.utils.generic import AbstractSingletonMeta, in_notebook from ansys.fluent.post import get_config from ansys.fluent.post.post_object_defns import GraphicsDefn, PlotDefn -from ansys.fluent.post.post_windows_manager import ( - PostWindow, - PostWindowsManager, -) +from ansys.fluent.post.post_windows_manager import PostWindow, PostWindowsManager class PyVistaWindow(PostWindow): @@ -52,10 +49,7 @@ def plot(self): obj = self.post_object plotter = self.plotter camera = plotter.camera.copy() - if ( - in_notebook() - and self.plotter.theme._jupyter_backend == "pythreejs" - ): + if in_notebook() and self.plotter.theme._jupyter_backend == "pythreejs": plotter.remove_actor(plotter.renderer.actors.copy()) else: plotter.clear() @@ -93,9 +87,7 @@ def _scalar_bar_default_properties(self) -> dict: position_y=0.3, ) - def _display_vector( - self, obj, plotter: Union[BackgroundPlotter, pv.Plotter] - ): + def _display_vector(self, obj, plotter: Union[BackgroundPlotter, pv.Plotter]): if not obj.surfaces_list(): raise RuntimeError("Vector definition is incomplete.") @@ -142,9 +134,7 @@ def _display_vector( faces=mesh_data["faces"], ) mesh.cell_data["vectors"] = mesh_data[obj.vectors_of()] - velocity_magnitude = np.linalg.norm( - mesh_data[obj.vectors_of()], axis=1 - ) + velocity_magnitude = np.linalg.norm(mesh_data[obj.vectors_of()], axis=1) if obj.range.option() == "auto-range-off": auto_range_off = obj.range.auto_range_off range = [auto_range_off.minimum(), auto_range_off.maximum()] @@ -180,9 +170,7 @@ def _display_vector( if obj.show_edges(): plotter.add_mesh(mesh, show_edges=True, color="white") - def _display_contour( - self, obj, plotter: Union[BackgroundPlotter, pv.Plotter] - ): + def _display_contour(self, obj, plotter: Union[BackgroundPlotter, pv.Plotter]): if not obj.surfaces_list() or not obj.field(): raise RuntimeError("Contour definition is incomplete.") @@ -259,10 +247,7 @@ def _display_contour( scalars=field, value=auto_range_off.maximum(), ) - if ( - np.max(maximum_below[field]) - > auto_range_off.minimum() - ): + if np.max(maximum_below[field]) > auto_range_off.minimum(): minimum_above = maximum_below.clip_scalar( scalars=field, invert=False, @@ -280,9 +265,7 @@ def _display_contour( np.min(minimum_above[field]) != np.max(minimum_above[field]) ): - plotter.add_mesh( - minimum_above.contour(isosurfaces=20) - ) + plotter.add_mesh(minimum_above.contour(isosurfaces=20)) else: if filled: plotter.add_mesh( @@ -328,9 +311,7 @@ def _display_contour( ): plotter.add_mesh(mesh.contour(isosurfaces=20)) - def _display_surface( - self, obj, plotter: Union[BackgroundPlotter, pv.Plotter] - ): + def _display_surface(self, obj, plotter: Union[BackgroundPlotter, pv.Plotter]): surface_api = obj._data_extractor.surface_api surface_api.create_surface_on_server() dummy_object = "dummy_object" @@ -354,9 +335,7 @@ def _display_surface( del post_session.Meshes[dummy_object] surface_api.delete_surface_on_server() - def _display_mesh( - self, obj, plotter: Union[BackgroundPlotter, pv.Plotter] - ): + def _display_mesh(self, obj, plotter: Union[BackgroundPlotter, pv.Plotter]): if not obj.surfaces_list(): raise RuntimeError("Mesh definition is incomplete.") field_info = obj._data_extractor.field_info() @@ -387,9 +366,7 @@ def _display_mesh( mesh_data["vertices"], faces=mesh_data["faces"], ) - plotter.add_mesh( - mesh, show_edges=obj.show_edges(), color="lightgrey" - ) + plotter.add_mesh(mesh, show_edges=obj.show_edges(), color="lightgrey") def _get_refresh_for_plotter(self, window: "PyVistaWindow"): def refresh(): @@ -411,9 +388,7 @@ def refresh(): return refresh -class PyVistaWindowsManager( - PostWindowsManager, metaclass=AbstractSingletonMeta -): +class PyVistaWindowsManager(PostWindowsManager, metaclass=AbstractSingletonMeta): """Class for PyVista windows manager.""" _condition = threading.Condition() @@ -624,9 +599,7 @@ def _display(self) -> None: plotter = window.plotter if window else None animate = window.animate if window else False if not plotter or plotter._closed: - window = PyVistaWindow( - self._window_id, self._post_object - ) + window = PyVistaWindow(self._window_id, self._post_object) plotter = window.plotter self._app = plotter.app plotter.add_callback( @@ -658,9 +631,7 @@ def _open_and_plot_console(self, obj: object, window_id: str) -> None: if not self._plotter_thread: if Session._monitor_thread: Session._monitor_thread.cbs.append(self._exit) - self._plotter_thread = threading.Thread( - target=self._display, args=() - ) + self._plotter_thread = threading.Thread(target=self._display, args=()) self._plotter_thread.start() with self._condition: @@ -696,8 +667,7 @@ def _get_windows_id( if not window.plotter._closed and ( not session_id - or session_id - == window.post_object._data_extractor.id() + or session_id == window.post_object._data_extractor.id() ) ] if not windows_id or window_id in windows_id diff --git a/tests/test_flobject.py b/tests/test_flobject.py index a288ecc0905c..4db71a9bfdb8 100644 --- a/tests/test_flobject.py +++ b/tests/test_flobject.py @@ -124,13 +124,9 @@ def get_static_info(cls): if cls.__doc__: ret["help"] = cls.__doc__ if cls.children: - ret["children"] = { - c: v.get_static_info() for c, v in cls.children.items() - } + ret["children"] = {c: v.get_static_info() for c, v in cls.children.items()} if cls.commands: - ret["commands"] = { - c: v.get_static_info() for c, v in cls.commands.items() - } + ret["commands"] = {c: v.get_static_info() for c, v in cls.commands.items()} return ret @@ -167,9 +163,7 @@ def get_child(self, c): return self._objs[c] def rename(self, new, old): - self._objs = { - (new if k == old else k): v for k, v in self._objs.items() - } + self._objs = {(new if k == old else k): v for k, v in self._objs.items()} def get_object_names(self): return list(self._objs.keys()) @@ -191,9 +185,7 @@ def get_static_info(cls): ret["help"] = cls.__doc__ ret["object-type"] = cls.child_object_type.get_static_info() if cls.commands: - ret["commands"] = { - c: v.get_static_info() for c, v in cls.commands.items() - } + ret["commands"] = {c: v.get_static_info() for c, v in cls.commands.items()} return ret @@ -253,9 +245,7 @@ def get_static_info(cls): ret["help"] = cls.__doc__ ret["object-type"] = cls.child_object_type.get_static_info() if cls.commands: - ret["commands"] = { - c: v.get_static_info() for c, v in cls.commands.items() - } + ret["commands"] = {c: v.get_static_info() for c, v in cls.commands.items()} return ret @@ -292,9 +282,7 @@ class Root(Group): class G1(Group): class S1(String): attrs = { - "active?": lambda self: not self.parent.objs[ - "b-3" - ].get_state(), + "active?": lambda self: not self.parent.objs["b-3"].get_state(), "allowed-values": lambda self: ["foo", "bar"], } diff --git a/tests/test_post.py b/tests/test_post.py index 572d29b7748d..bd965a3974bd 100644 --- a/tests/test_post.py +++ b/tests/test_post.py @@ -124,14 +124,10 @@ class MockLocalObjectDataExtractor: def __init__(self, obj=None): if not MockLocalObjectDataExtractor._session_data: with open( - str( - Path(MockLocalObjectDataExtractor._session_dump).resolve() - ), + str(Path(MockLocalObjectDataExtractor._session_dump).resolve()), "rb", ) as pickle_obj: - MockLocalObjectDataExtractor._session_data = pickle.load( - pickle_obj - ) + MockLocalObjectDataExtractor._session_data = pickle.load(pickle_obj) self.field_info = lambda: MockFieldInfo( MockLocalObjectDataExtractor._session_data ) @@ -157,12 +153,8 @@ def test_field_api(): provide_faces_centroid=True, provide_faces=False, ) - field_data.add_get_scalar_fields_request( - surfaces_id[:1], "temperature", True - ) - field_data.add_get_scalar_fields_request( - surfaces_id[:1], "temperature", False - ) + field_data.add_get_scalar_fields_request(surfaces_id[:1], "temperature", True) + field_data.add_get_scalar_fields_request(surfaces_id[:1], "temperature", False) fields = field_data.get_fields() surface_tag = 0 @@ -286,25 +278,19 @@ def test_contour_object(): if k in contour1.surfaces_list() ] - range = field_info.get_range( - contour1.field(), contour1.node_values(), surfaces_id - ) + range = field_info.get_range(contour1.field(), contour1.node_values(), surfaces_id) assert range[0] == pytest.approx(contour1.range.auto_range_off.minimum()) assert range[1] == pytest.approx(contour1.range.auto_range_off.maximum()) # Range should adjust to min/max of cell field values. contour1.node_values = False - range = field_info.get_range( - contour1.field(), contour1.node_values(), surfaces_id - ) + range = field_info.get_range(contour1.field(), contour1.node_values(), surfaces_id) assert range[0] == pytest.approx(contour1.range.auto_range_off.minimum()) assert range[1] == pytest.approx(contour1.range.auto_range_off.maximum()) # Range should adjust to min/max of node field values contour1.field = "pressure" - range = field_info.get_range( - contour1.field(), contour1.node_values(), surfaces_id - ) + range = field_info.get_range(contour1.field(), contour1.node_values(), surfaces_id) assert range[0] == pytest.approx(contour1.range.auto_range_off.minimum()) assert range[1] == pytest.approx(contour1.range.auto_range_off.maximum()) diff --git a/tests/test_scheme_eval.py b/tests/test_scheme_eval.py index cb4e58ddb769..131388f60594 100644 --- a/tests/test_scheme_eval.py +++ b/tests/test_scheme_eval.py @@ -66,9 +66,7 @@ {"a": 5.0}, { "pair": { - "car": { - "pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}} - }, + "car": {"pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}}}, } }, ), @@ -76,9 +74,7 @@ {"a": 5.0, "b": 10.0}, { "pair": { - "car": { - "pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}} - }, + "car": {"pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}}}, "cdr": { "pair": { "car": { @@ -145,9 +141,7 @@ def test_convert_py_value_to_scheme_pointer( {"a": 5.0, "b": 10.0}, { "pair": { - "car": { - "pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}} - }, + "car": {"pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}}}, "cdr": { "pair": { "car": { @@ -165,9 +159,7 @@ def test_convert_py_value_to_scheme_pointer( [("a", 5.0), (5, 10.0)], { "pair": { - "car": { - "pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}} - }, + "car": {"pair": {"car": {"str": "a"}, "cdr": {"flonum": 5.0}}}, "cdr": { "pair": { "car": { @@ -204,9 +196,7 @@ def test_convert_py_value_to_scheme_pointer( "cdr": { "pair": { "car": {"flonum": 10.0}, - "cdr": { - "pair": {"car": {"b": True}} - }, + "cdr": {"pair": {"car": {"b": True}}}, } }, } @@ -239,9 +229,7 @@ def test_convert_py_value_to_scheme_pointer( "cdr": { "pair": { "car": {"flonum": 10.0}, - "cdr": { - "pair": {"car": {"b": True}} - }, + "cdr": {"pair": {"car": {"b": True}}}, } }, }