diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 25bb46b00b2d..4f45db311957 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ env: # You should go up in number, if you go down (or repeat a previous value) # you might end up reusing a previous cache if it haven't been deleted already. # It applies 7 days retention policy by default. - RESET_EXAMPLES_CACHE: 2 + RESET_EXAMPLES_CACHE: 3 jobs: stylecheck: diff --git a/requirements/requirements_tests.txt b/requirements/requirements_tests.txt index a3cd4cfc5647..2b953226d8f2 100644 --- a/requirements/requirements_tests.txt +++ b/requirements/requirements_tests.txt @@ -1,3 +1,5 @@ pytest pytest-mock -pytest-cov \ No newline at end of file +pytest-cov +psutil>=5.9.1 +docker>=5.0.3 \ No newline at end of file diff --git a/src/ansys/fluent/core/launcher/fluent_container.py b/src/ansys/fluent/core/launcher/fluent_container.py index 3a1138957ca0..5e6521e287e1 100644 --- a/src/ansys/fluent/core/launcher/fluent_container.py +++ b/src/ansys/fluent/core/launcher/fluent_container.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import socket import subprocess import tempfile @@ -36,6 +37,7 @@ def start_fluent_container(mounted_from: str, mounted_to: str, args: List[str]) timeout = 100 license_server = os.environ["ANSYSLMD_LICENSE_FILE"] port = _get_free_port() + container_sifile = mounted_to + "/" + Path(sifile).name try: subprocess.run( @@ -56,7 +58,7 @@ def start_fluent_container(mounted_from: str, mounted_to: str, args: List[str]) "FLUENT_LAUNCHED_FROM_PYFLUENT=1", "ghcr.io/pyansys/pyfluent", "-g", - f"-sifile={sifile}", + f"-sifile={container_sifile}", ] + args ) diff --git a/src/ansys/fluent/core/session.py b/src/ansys/fluent/core/session.py index f8cfc7716eef..bacc19de6011 100644 --- a/src/ansys/fluent/core/session.py +++ b/src/ansys/fluent/core/session.py @@ -1,11 +1,11 @@ """Module containing class encapsulating Fluent connection.""" -import atexit from ctypes import c_int, sizeof import itertools import os import threading from typing import Any, Callable, List, Optional, Tuple +import weakref import grpc @@ -243,13 +243,24 @@ def __init__( self.scheme_eval = SchemeEval(self._scheme_eval_service) self._cleanup_on_exit = cleanup_on_exit - Session._monitor_thread.cbs.append(self.exit) if start_transcript: self.start_transcript() self._remote_instance = remote_instance + self._finalizer = weakref.finalize( + self, + Session._exit, + self._channel, + self._cleanup_on_exit, + self.scheme_eval, + self._transcript_service, + self.events_manager, + self._remote_instance, + ) + Session._monitor_thread.cbs.append(self._finalizer) + @classmethod def create_from_server_info_file( cls, @@ -293,18 +304,20 @@ def id(self) -> str: """Return the session id.""" return self._id - def _print_transcript(self, transcript: str): + @staticmethod + def _print_transcript(transcript: str): print(transcript) - def _process_transcript(self): - responses = self._transcript_service.begin_streaming() + @staticmethod + def _process_transcript(transcript_service): + responses = transcript_service.begin_streaming() transcript = "" while True: try: response = next(responses) transcript += response.transcript if transcript[-1] == "\n": - self._print_transcript(transcript[0:-1]) + Session._print_transcript(transcript[0:-1]) transcript = "" except StopIteration: break @@ -312,7 +325,7 @@ def _process_transcript(self): def start_transcript(self) -> None: """Start streaming of Fluent transcript.""" self._transcript_thread = threading.Thread( - target=Session._process_transcript, args=(self,) + target=Session._process_transcript, args=(self._transcript_service,) ) self._transcript_thread.start() @@ -333,32 +346,34 @@ def check_health(self) -> str: def exit(self) -> None: """Close the Fluent connection and exit Fluent.""" - if self._channel: - if self._cleanup_on_exit: - self.scheme_eval.exec(("(exit-server)",)) - self._transcript_service.end_streaming() - self.events_manager.stop() - self._channel.close() - self._channel = None + self._finalizer() - if self._remote_instance: - self._remote_instance.delete() + @staticmethod + def _exit( + channel, + cleanup_on_exit, + scheme_eval, + transcript_service, + events_manager, + remote_instance, + ) -> None: + if channel: + if cleanup_on_exit: + scheme_eval.exec(("(exit-server)",)) + transcript_service.end_streaming() + events_manager.stop() + channel.close() + channel = None + + if remote_instance: + remote_instance.delete() def __enter__(self): """Close the Fluent connection and exit Fluent.""" return self def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any): - self.exit() - - @classmethod - def register_on_exit(cls, callback: Callable) -> None: - cls._on_exit_cbs.append(callback) - - @staticmethod - def exit_all() -> None: - for cb in Session._on_exit_cbs: - cb() + self._finalizer() class Meshing: def __init__( @@ -484,6 +499,3 @@ def root(self): LOG.warning("The settings API is currently experimental.") self._settings_root = settings_get_root(flproxy=self._settings_service) return self._settings_root - - -atexit.register(Session.exit_all) diff --git a/tests/conftest.py b/tests/conftest.py index 17233f1f2c6a..54fc9649d042 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,5 +2,5 @@ @pytest.fixture -def with_running_pytest(monkeypatch: pytest.MonkeyPatch) -> None: +def with_launching_container(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("PYFLUENT_LAUNCH_CONTAINER", "1") diff --git a/tests/test_fluent_session.py b/tests/test_fluent_session.py index 11b96d66d3dd..c7b0dc4a1bd4 100644 --- a/tests/test_fluent_session.py +++ b/tests/test_fluent_session.py @@ -1,9 +1,16 @@ +import os +import time + +import psutil from util.solver_workflow import ( # noqa: F401 new_solver_session, new_solver_session_no_transcript, ) +import ansys.fluent.core as pyfluent from ansys.fluent.core.examples import download_file +from ansys.fluent.core.session import Session +import docker def _read_case(session): @@ -22,7 +29,7 @@ def print_transcript(transcript): print_transcript.called = False print_transcript.transcript = None - session._print_transcript = print_transcript + Session._print_transcript = print_transcript _read_case(session=session) @@ -45,3 +52,46 @@ def print_transcript(transcript): _read_case(session=session) assert not print_transcript.called + + +def test_server_exits_when_session_goes_out_of_scope(with_launching_container) -> None: + def f(): + session = pyfluent.launch_fluent() + f.server_pid = session.scheme_eval.scheme_eval("(%cx-process-id)") + + if os.getenv("PYFLUENT_START_INSTANCE") == "0": + client = docker.from_env() + containers_before = client.containers.list() + f() + time.sleep(10) + containers_after = client.containers.list() + new_containers = set(containers_after) - set(containers_before) + assert not new_containers + else: + f() + time.sleep(10) + assert not psutil.pid_exists(f.server_pid) + + +def test_server_does_not_exit_when_session_goes_out_of_scope( + with_launching_container, +) -> None: + def f(): + session = pyfluent.launch_fluent(cleanup_on_exit=False) + f.server_pid = session.scheme_eval.scheme_eval("(%cx-process-id)") + + if os.getenv("PYFLUENT_START_INSTANCE") == "0": + client = docker.from_env() + containers_before = client.containers.list() + f() + time.sleep(10) + containers_after = client.containers.list() + new_containers = set(containers_after) - set(containers_before) + assert new_containers + for container in new_containers: + container.stop() + else: + f() + time.sleep(10) + assert psutil.pid_exists(f.server_pid) + psutil.Process(f.server_pid).kill() diff --git a/tests/test_tui_api.py b/tests/test_tui_api.py index 7cb97c2092f0..269fa018438e 100644 --- a/tests/test_tui_api.py +++ b/tests/test_tui_api.py @@ -1,6 +1,8 @@ +import pytest from util.solver_workflow import new_solver_session # noqa: F401 +@pytest.mark.skip("randomly failing due to missing transcript capture") def test_report_system_proc_stats_tui(new_solver_session, capsys) -> None: new_solver_session.start_transcript() # Issue: Transcript missing for the first TUI command diff --git a/tests/util/meshing_workflow.py b/tests/util/meshing_workflow.py index ba2f3a78acc1..120776e3e3bc 100644 --- a/tests/util/meshing_workflow.py +++ b/tests/util/meshing_workflow.py @@ -54,7 +54,7 @@ def reset_workflow(mesh_session): @pytest.fixture -def new_mesh_session(with_running_pytest): +def new_mesh_session(with_launching_container): mesher = create_mesh_session() yield mesher mesher.exit() @@ -75,7 +75,7 @@ def new_watertight_workflow(new_watertight_workflow_session): @pytest.fixture -def shared_mesh_session(with_running_pytest): +def shared_mesh_session(with_launching_container): global _mesher if not _mesher: _mesher = create_mesh_session() @@ -128,7 +128,7 @@ def new_fault_tolerant_workflow(new_fault_tolerant_workflow_session): @pytest.fixture -def shared_mesh_session(with_running_pytest): +def shared_mesh_session(with_launching_container): global _mesher if not _mesher: _mesher = create_mesh_session() diff --git a/tests/util/solver_workflow.py b/tests/util/solver_workflow.py index 703a21ad7a99..eb436531edb7 100644 --- a/tests/util/solver_workflow.py +++ b/tests/util/solver_workflow.py @@ -8,14 +8,14 @@ def create_solver_session(*args, **kwargs): @pytest.fixture -def new_solver_session(with_running_pytest): +def new_solver_session(with_launching_container): solver = create_solver_session() yield solver solver.exit() @pytest.fixture -def new_solver_session_no_transcript(with_running_pytest): +def new_solver_session_no_transcript(with_launching_container): solver = create_solver_session(start_transcript=False) yield solver solver.exit()