From e6ef73e95eb62e2c04274fe987370411b976f1d8 Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Wed, 5 Nov 2025 12:07:01 +0100 Subject: [PATCH 1/6] Added basic infrastructure for 3D RBC --- .../problem_classes/RayleighBenard3D.py | 17 ++ .../sweeper_classes/Runge_Kutta.py | 9 + .../analysis_scripts/compute_RBC3D_data.py | 207 +++++++++++++ .../projects/GPU/analysis_scripts/plot_Nu.py | 100 +++++++ pySDC/projects/GPU/configs/RBC3D_configs.py | 280 ++++++++++++++++++ pySDC/projects/GPU/configs/base_config.py | 15 +- pySDC/projects/GPU/run_experiment.py | 3 +- .../test_sweepers/test_Runge_Kutta_sweeper.py | 3 + 8 files changed, 629 insertions(+), 5 deletions(-) create mode 100644 pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py create mode 100644 pySDC/projects/GPU/analysis_scripts/plot_Nu.py create mode 100644 pySDC/projects/GPU/configs/RBC3D_configs.py diff --git a/pySDC/implementations/problem_classes/RayleighBenard3D.py b/pySDC/implementations/problem_classes/RayleighBenard3D.py index 5b1da41d7b..13c2d6c7d0 100644 --- a/pySDC/implementations/problem_classes/RayleighBenard3D.py +++ b/pySDC/implementations/problem_classes/RayleighBenard3D.py @@ -462,3 +462,20 @@ def get_frequency_spectrum(self, u): spectrum[..., index_global] += _spectrum[..., index_local] return xp.array(unique_k_all), spectrum + + def get_vertical_profiles(self, u, components): + if self.spectral_space: + u_hat = u.copy() + else: + u_hat = self.transform(u) + + _u_hat = self.axes[-1].itransform(u_hat, axes=(-1,)) + + avgs = {} + for c in components: + i = self.index(c) + avg = self.xp.ascontiguousarray(_u_hat[i, 0, 0, :].real) / self.axes[0].N / self.axes[1].N + self.comm.Bcast(avg, root=0) + avgs[c] = avg + + return avgs diff --git a/pySDC/implementations/sweeper_classes/Runge_Kutta.py b/pySDC/implementations/sweeper_classes/Runge_Kutta.py index 6f9c03df89..72a99f33a8 100644 --- a/pySDC/implementations/sweeper_classes/Runge_Kutta.py +++ b/pySDC/implementations/sweeper_classes/Runge_Kutta.py @@ -510,6 +510,15 @@ class IMEXEuler(RungeKuttaIMEX): matrix_explicit = ForwardEuler.matrix +class IMEXEulerStifflyAccurate(RungeKuttaIMEX): + nodes = np.array([0, 1]) + weights = np.array([0, 1]) + weights_explicit = np.array([1, 0]) + + matrix = np.array([[0, 0], [0, 1]]) + matrix_explicit = np.array([[0, 0], [1, 0]]) + + class CrankNicolson(RungeKutta): """ Implicit Runge-Kutta method of second order, A-stable. diff --git a/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py b/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py new file mode 100644 index 0000000000..f84320b6c8 --- /dev/null +++ b/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py @@ -0,0 +1,207 @@ +from pySDC.projects.GPU.configs.base_config import get_config +from pySDC.projects.GPU.run_experiment import parse_args +from pySDC.helpers.fieldsIO import FieldsIO +import matplotlib.pyplot as plt +from tqdm import tqdm +from mpi4py import MPI +import numpy as np +import pickle +import os + +# global configs +BASE_PATH = 'data/RBC_time_averaged' + + +# prepare problem instance +args = parse_args() +comm = MPI.COMM_WORLD +config = get_config(args) +desc = config.get_description(**args) +P = desc['problem_class']( + **{ + **desc['problem_params'], + 'spectral_space': False, + 'comm': comm, + 'Dirichlet_recombination': False, + 'left_preconditioner': False, + } +) +P.setUpFieldsIO() +zInt = P.axes[-1].get_integration_weights() +xp = P.xp + +# prepare paths +os.makedirs(BASE_PATH, exist_ok=True) +fname = config.get_file_name() +start = fname.index('data') +path = f'{BASE_PATH}/{fname[start + 5:-6]}.pickle' + +# open simulation data +data = FieldsIO.fromFile(fname) + +# prepare arrays to store data in +Nu = { + 'V': [], + 'b': [], + 't': [], + 'thermal': [], + 'kinetic': [], +} +t = [] +T = [] +profiles = {key: [] for key in ['T', 'u', 'v', 'w']} +rms_profiles = {key: [] for key in profiles.keys()} +spectrum = [] +spectrum_all = [] + + +# try to load time averaged values +u_mean_profile = P.u_exact() +if os.path.isfile(path): + with open(path, 'rb') as file: + avg_data = pickle.load(file) + if comm.rank == 0: + print(f'Read data from file {path!r}') + for key in profiles.keys(): + if f'profile_{key}' in avg_data.keys(): + u_mean_profile[P.index(key)] = avg_data[f'profile_{key}'][P.local_slice(False)[-1]] +elif comm.rank == 0: + print('No mean profiles available yet. Please rerun script after completion to get correct RMS profiles') + +# prepare progress bar +indeces = range(args['restart_idx'], data.nFields) +if P.comm.rank == 0: + indeces = tqdm(indeces) + +# loop through all data points and compute stuff +for i in indeces: + _t, u = data.readField(i) + + # Nusselt numbers + _Nu = P.compute_Nusselt_numbers(u) + if any(me > 1e3 for me in _Nu.values()): + continue + + for key in Nu.keys(): + Nu[key].append(_Nu[key]) + + t.append(_t) + + # profiles + _profiles = P.get_vertical_profiles(u, list(profiles.keys())) + _rms_profiles = P.get_vertical_profiles((u - u_mean_profile) ** 2, list(profiles.keys())) + for key in profiles.keys(): + profiles[key].append(_profiles[key]) + rms_profiles[key].append(_rms_profiles[key]) + + # spectrum + k, s = P.get_frequency_spectrum(u) + s_mean = zInt @ P.axes[-1].transform(s[0], axes=(0,)) + spectrum.append(s_mean) + spectrum_all.append(s) + + +# make a plot of the results +t = xp.array(t) +z = P.axes[-1].get_1dgrid() + +if config.converged == 0: + print('Warning: no convergence has been set for this configuration!') + +fig, axs = plt.subplots(1, 4, figsize=(18, 4)) +for key in Nu.keys(): + axs[0].plot(t, Nu[key], label=f'$Nu_{{{key}}}$') + if config.converged > 0: + axs[0].axvline(config.converged, color='black') +axs[0].set_ylabel('$Nu$') +axs[0].set_xlabel('$t$') +axs[0].legend(frameon=False) + +# compute differences in Nusselt numbers +avg_Nu = {} +std_Nu = {} +for key in Nu.keys(): + _Nu = [Nu[key][i] for i in range(len(Nu[key])) if t[i] > config.converged] + avg_Nu[key] = xp.mean(_Nu) + std_Nu[key] = xp.std(_Nu) + +rel_error = { + key: abs(avg_Nu[key] - avg_Nu['V']) / avg_Nu['V'] + for key in [ + 't', + 'b', + 'thermal', + 'kinetic', + ] +} +if comm.rank == 0: + print( + f'With Ra={P.Rayleigh:.0e} got Nu={avg_Nu["V"]:.2f}+-{std_Nu["V"]:.2f} with errors: Top {rel_error["t"]:.2e}, bottom: {rel_error["b"]:.2e}, thermal: {rel_error["thermal"]:.2e}, kinetic: {rel_error["kinetic"]:.2e}' + ) + + +# compute average profiles +avg_profiles = {} +for key, values in profiles.items(): + values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] + + avg_profiles[key] = xp.mean(values_from_convergence, axis=0) + +avg_rms_profiles = {} +for key, values in rms_profiles.items(): + values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] + avg_rms_profiles[key] = xp.sqrt(xp.mean(values_from_convergence, axis=0)) + + +# average T +avg_T = avg_profiles['T'] +axs[1].axvline(0.5, color='black') +axs[1].plot(avg_T, z) +axs[1].set_xlabel('$T$') +axs[1].set_ylabel('$z$') + +# rms profiles +avg_T = avg_rms_profiles['T'] +max_idx = xp.argmax(avg_T) +res_in_boundary_layer = max_idx if max_idx < len(z) / 2 else len(z) - max_idx +boundary_layer = z[max_idx] if max_idx > len(z) / 2 else P.axes[-1].L - z[max_idx] +if comm.rank == 0: + print(f'Thermal boundary layer of thickness {boundary_layer:.2f} is resolved with {res_in_boundary_layer} points') +axs[2].axhline(z[max_idx], color='black') +axs[2].plot(avg_T, z) +axs[2].scatter(avg_T, z) +axs[2].set_xlabel(r'$T_\text{rms}$') +axs[2].set_ylabel('$z$') + +# spectrum +_s = xp.array(spectrum) +avg_spectrum = xp.mean(_s[t >= config.converged], axis=0) +axs[3].loglog(k[avg_spectrum > 1e-15], avg_spectrum[avg_spectrum > 1e-15]) +axs[3].set_xlabel('$k$') +axs[3].set_ylabel(r'$\|\hat{u}_x\|$') + +if P.comm.rank == 0: + write_data = { + 't': t, + 'Nu': Nu, + 'avg_Nu': avg_Nu, + 'std_Nu': std_Nu, + 'z': P.axes[-1].get_1dgrid(), + 'k': k, + 'spectrum': spectrum_all, + 'avg_spectrum': avg_spectrum, + 'boundary_layer_thickness': boundary_layer, + 'res_in_boundary_layer': res_in_boundary_layer, + } + for key, values in avg_profiles.items(): + write_data[f'profile_{key}'] = values + for key, values in avg_rms_profiles.items(): + write_data[f'rms_profile_{key}'] = values + + with open(path, 'wb') as file: + pickle.dump(write_data, file) + print(f'Wrote data to file {path!r}') + + fig.tight_layout() + fig.savefig(f'{BASE_PATH}/{fname[start + 5:-6]}.pdf') + plt.show() diff --git a/pySDC/projects/GPU/analysis_scripts/plot_Nu.py b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py new file mode 100644 index 0000000000..133a5fdd37 --- /dev/null +++ b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py @@ -0,0 +1,100 @@ +import pickle +import matplotlib.pyplot as plt +import numpy as np +from scipy import integrate +from pySDC.helpers.plot_helper import figsize_by_journal, setup_mpl + +setup_mpl() + + +def get_pySDC_data(Ra, RK=False, res=-1, dt=-1, config_name='RBC3DG4'): + assert type(Ra) == str + + base_path = 'data/RBC_time_averaged' + + if RK: + config_name = f'{config_name}RK' + + path = f'{base_path}/{config_name}Ra{Ra}-res{res}-dt{dt:.0e}.pickle' + with open(path, 'rb') as file: + data = pickle.load(file) + + return data + + +def interpolate_NuV_to_reference_times(data, reference_data, order=12): + from qmat.lagrange import getSparseInterpolationMatrix + + t_in = np.array(data['t']) + t_out = np.array([me for me in reference_data['t'] if me <= max(t_in)]) + + interpolation_matrix = getSparseInterpolationMatrix(t_in, t_out, order=order) + return interpolation_matrix @ t_in, interpolation_matrix @ data['Nu']['V'] + + +def plot_Nu(Ra, res, dts, config_name, ref, ax, title): # pragma: no cover + ax.plot(ref['t'], ref['Nu']['V'], color='black', ls='--') + ax.set_title(title) + Nu_ref = np.array(ref['Nu']['V']) + + for dt in dts: + data = get_pySDC_data(Ra=Ra, res=res, dt=dt, config_name=config_name) + t_i, Nu_i = interpolate_NuV_to_reference_times(data, ref) + ax.plot(t_i, Nu_i, label=rf'$\Delta t={{{dt}}}$') + + error = np.abs(Nu_ref[: Nu_i.shape[0]] - Nu_i) / np.abs(Nu_ref[: Nu_i.shape[0]]) + + # compute mean Nu + mask = np.logical_and(t_i >= 100, t_i <= 200) + Nu_mean = np.mean(Nu_i[mask]) + Nu_std = np.std(Nu_i[mask]) + + last_line = ax.get_lines()[-1] + if any(error > 1e-2): + deviates = min(t_i[error > 1e-2]) + ax.axvline(deviates, color=last_line.get_color(), ls=':') + print(f'{title} dt={dt} Nu={Nu_mean:.3f}+={Nu_std:.3f}, deviates more than 1% from t={deviates:.2f}') + else: + print(f'{title} dt={dt} Nu={Nu_mean:.3f}+={Nu_std:.3f}') + ax.legend(frameon=True, loc='upper left') + + +def plot_Nu_over_time_Ra1e5(): # pragma: no cover + Nu_fig, Nu_axs = plt.subplots(4, 1, sharex=True, sharey=True, figsize=figsize_by_journal('Nature_CS', 1, 1.4)) + + Ra = '1e5' + res = 32 + + ref_data = get_pySDC_data(Ra, res=res, dt=0.01, config_name='RBC3DG4R4') + + _Nu_axs = {'SDC 3': Nu_axs[1], 'SDC': Nu_axs[0], 'RK': Nu_axs[2], 'Euler': Nu_axs[3]} + + plot_Nu( + '1e5', + 32, + [ + 0.06, + 0.04, + 0.02, + ], + 'RBC3DG4R4SDC34', + ref_data, + Nu_axs[0], + 'SDC34', + ) + plot_Nu('1e5', 32, [0.06, 0.05, 0.02, 0.01], 'RBC3DG4R4SDC23', ref_data, Nu_axs[1], 'SDC23') + plot_Nu('1e5', 32, [0.05, 0.04, 0.02, 0.01, 0.005], 'RBC3DG4R4RK', ref_data, Nu_axs[2], 'RK443') + plot_Nu('1e5', 32, [0.02, 0.01, 0.005], 'RBC3DG4R4Euler', ref_data, Nu_axs[3], 'RK111') + + Nu_axs[-1].set_xlabel('$t$') + Nu_axs[-1].set_ylabel('$Nu$') + + Nu_fig.tight_layout() + Nu_fig.savefig('./plots/Nu_over_time_Ra1e5.pdf', bbox_inches='tight') + + +if __name__ == '__main__': + + plot_Nu_over_time_Ra1e5() + + plt.show() diff --git a/pySDC/projects/GPU/configs/RBC3D_configs.py b/pySDC/projects/GPU/configs/RBC3D_configs.py new file mode 100644 index 0000000000..37db0e35bb --- /dev/null +++ b/pySDC/projects/GPU/configs/RBC3D_configs.py @@ -0,0 +1,280 @@ +from pySDC.projects.GPU.configs.base_config import Config + + +def get_config(args): + name = args['config'] + if name == 'RBC3D': + return RayleighBenard3DRegular(args) + elif name in globals().keys(): + return globals()[name](args) + else: + raise NotImplementedError(f'There is no configuration called {name!r}!') + + +class RayleighBenard3DRegular(Config): + sweeper_type = 'IMEX' + Tend = 50 + gamma = 1 + res_ratio = 1 + dealiasing = 3.0 / 2.0 + + def get_file_name(self): + res = self.args['res'] + return f'{self.base_path}/data/{type(self).__name__}-res{res}.pySDC' + + def get_LogToFile(self, *args, **kwargs): + if self.comms[1].rank > 0: + return None + import numpy as np + from pySDC.implementations.hooks.log_solution import LogToFile + + LogToFile.filename = self.get_file_name() + LogToFile.time_increment = 5e-1 + # LogToFile.allow_overwriting = True + + return LogToFile + + def get_controller_params(self, *args, **kwargs): + from pySDC.implementations.hooks.log_step_size import LogStepSize + + controller_params = super().get_controller_params(*args, **kwargs) + controller_params['hook_class'] += [LogStepSize] + return controller_params + + def get_description(self, *args, MPIsweeper=False, res=-1, **kwargs): + from pySDC.implementations.problem_classes.RayleighBenard3D import ( + RayleighBenard3D, + ) + from pySDC.implementations.problem_classes.generic_spectral import ( + compute_residual_DAE, + compute_residual_DAE_MPI, + ) + from pySDC.implementations.convergence_controller_classes.step_size_limiter import StepSizeSlopeLimiter + from pySDC.implementations.convergence_controller_classes.crash import StopAtNan + + desc = super().get_description(*args, MPIsweeper=MPIsweeper, **kwargs) + + if MPIsweeper: + desc['sweeper_class'].compute_residual = compute_residual_DAE_MPI + else: + desc['sweeper_class'].compute_residual = compute_residual_DAE + + desc['level_params']['dt'] = 0.01 + desc['level_params']['restol'] = 1e-7 + + desc['convergence_controllers'][StepSizeSlopeLimiter] = {'dt_rel_min_slope': 0.1} + desc['convergence_controllers'][StopAtNan] = {} + + desc['sweeper_params']['quad_type'] = 'RADAU-RIGHT' + desc['sweeper_params']['num_nodes'] = 2 + desc['sweeper_params']['QI'] = 'MIN-SR-S' + desc['sweeper_params']['QE'] = 'PIC' + + res = 64 if res == -1 else res + desc['problem_params']['Rayleigh'] = 1e8 + desc['problem_params']['nx'] = self.res_ratio * res + desc['problem_params']['ny'] = self.res_ratio * res + desc['problem_params']['nz'] = res + desc['problem_params']['Lx'] = self.gamma + desc['problem_params']['Ly'] = self.gamma + desc['problem_params']['Lz'] = 1 + desc['problem_params']['heterogeneous'] = True + desc['problem_params']['dealiasing'] = self.dealiasing + + desc['step_params']['maxiter'] = 3 + + desc['problem_class'] = RayleighBenard3D + + return desc + + def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): + + if restart_idx == 0: + u0 = P.u_exact(t=0, seed=P.comm.rank, noise_level=1e-3) + u0_with_pressure = P.solve_system(u0, 1e-9, u0) + P.cached_factorizations.pop(1e-9) + return u0_with_pressure, 0 + else: + from pySDC.helpers.fieldsIO import FieldsIO + + P.setUpFieldsIO() + outfile = FieldsIO.fromFile(self.get_file_name()) + + t0, solution = outfile.readField(restart_idx) + + u0 = P.u_init + + if P.spectral_space: + u0[...] = P.transform(solution) + else: + u0[...] = solution + + return u0, t0 + + def prepare_caches(self, prob): + """ + Cache the fft objects, which are expensive to create on GPU because graphs have to be initialized. + """ + prob.eval_f(prob.u_init) + + +class RBC3Dverification(RayleighBenard3DRegular): + converged = 0 + dt = 1e-2 + ic_config = None + res = None + Ra = None + Tend = 100 + res_ratio = 4 + gamma = 4 + + def get_file_name(self): + res = self.args['res'] + dt = self.args['dt'] + return f'{self.base_path}/data/{type(self).__name__}-res{res}-dt{dt:.0e}.pySDC' + + def get_description(self, *args, res=-1, dt=-1, **kwargs): + desc = super().get_description(*args, **kwargs) + desc['level_params']['nsweeps'] = 4 + desc['level_params']['restol'] = -1 + desc['step_params']['maxiter'] = 1 + desc['sweeper_params']['QI'] = 'MIN-SR-S' + desc['sweeper_params']['skip_residual_computation'] = ('IT_CHECK', 'IT_DOWN', 'IT_UP', 'IT_FINE', 'IT_COARSE') + desc['sweeper_params']['num_nodes'] = 4 + Ra = int(type(self).__name__[-3]) * 10 ** int(type(self).__name__[-1]) + desc['problem_params']['Rayleigh'] = Ra + desc['problem_params']['Prandtl'] = 0.7 + + _res = self.res if res == -1 else res + desc['problem_params']['nx'] = _res * self.res_ratio + desc['problem_params']['ny'] = _res * self.res_ratio + desc['problem_params']['nz'] = _res + + _dt = self.dt if dt == -1 else dt + desc['level_params']['dt'] = _dt + + desc['problem_params']['Lx'] = float(self.gamma) + desc['problem_params']['Ly'] = float(self.gamma) + desc['problem_params']['Lz'] = 1.0 + return desc + + def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): + if self.ic_config is None or restart_idx != 0: + return super().get_initial_condition(P, *args, restart_idx=restart_idx, **kwargs) + + # read initial conditions + from pySDC.helpers.fieldsIO import FieldsIO + + ic_config = self.ic_config(args={**self.args, 'res': -1, 'dt': -1}) + desc = ic_config.get_description() + ic_nx = desc['problem_params']['nx'] + ic_ny = desc['problem_params']['ny'] + ic_nz = desc['problem_params']['nz'] + + _P = type(P)(nx=ic_nx, ny=ic_ny, nz=ic_nz, comm=P.comm, useGPU=P.useGPU) + _P.setUpFieldsIO() + filename = ic_config.get_file_name() + ic_file = FieldsIO.fromFile(filename) + t0, ics = ic_file.readField(-1) + P.logger.info(f'Loaded initial conditions from {filename!r} at t={t0}.') + + # interpolate the initial conditions using padded transforms + padding = (P.nx / ic_nx, P.ny / ic_ny, P.nz / ic_nz) + + ics = _P.xp.array(ics) + _ics_hat = _P.transform(ics) + ics_interpolated = _P.itransform(_ics_hat, padding=padding) + + self.get_LogToFile() + + P.setUpFieldsIO() + if P.spectral_space: + u0_hat = P.u_init_forward + u0_hat[...] = P.transform(ics_interpolated) + return u0_hat, 0 + else: + return ics_interpolated, 0 + + +class RBC3DM2K3(RBC3Dverification): + + def get_description(self, *args, **kwargs): + desc = super().get_description(*args, **kwargs) + desc['level_params']['nsweeps'] = 3 + desc['sweeper_params']['num_nodes'] = 2 + return desc + + +class RBC3DM3K4(RBC3Dverification): + + def get_description(self, *args, **kwargs): + desc = super().get_description(*args, **kwargs) + desc['level_params']['nsweeps'] = 4 + desc['sweeper_params']['num_nodes'] = 3 + return desc + + +class RBC3DverificationRK(RBC3Dverification): + + def get_description(self, *args, res=-1, dt=-1, **kwargs): + from pySDC.implementations.sweeper_classes.Runge_Kutta import ARK3 + + desc = super().get_description(*args, res=res, dt=dt, **kwargs) + desc['level_params']['nsweeps'] = 1 + desc['level_params']['restol'] = -1 + desc['step_params']['maxiter'] = 1 + desc['sweeper_params']['skip_residual_computation'] = ('IT_CHECK', 'IT_DOWN', 'IT_UP', 'IT_FINE', 'IT_COARSE') + desc['sweeper_params'].pop('QI') + desc['sweeper_params'].pop('num_nodes') + desc['sweeper_class'] = ARK3 + return desc + + +class RBC3DverificationEuler(RBC3DverificationRK): + + def get_description(self, *args, res=-1, dt=-1, **kwargs): + from pySDC.implementations.sweeper_classes.Runge_Kutta import IMEXEulerStifflyAccurate + + desc = super().get_description(*args, res=res, dt=dt, **kwargs) + desc['sweeper_class'] = IMEXEulerStifflyAccurate + return desc + + +class RBC3DG4R4Ra1e5(RBC3Dverification): + Tend = 200 + dt = 6e-2 + ic_config = None + res = 32 + converged = 50 + + +class RBC3DG4R4SDC23Ra1e5(RBC3DM2K3): + Tend = 200 + dt = 6e-2 + ic_config = None + res = 32 + converged = 50 + + +class RBC3DG4R4SDC34Ra1e5(RBC3DM3K4): + Tend = 200 + dt = 6e-2 + ic_config = None + res = 32 + converged = 50 + + +class RBC3DG4R4RKRa1e5(RBC3DverificationRK): + Tend = 200 + dt = 8e-2 + ic_config = None + res = 32 + converged = 50 + + +class RBC3DG4R4EulerRa1e5(RBC3DverificationEuler): + Tend = 200 + dt = 8e-2 + ic_config = None + res = 32 + converged = 50 diff --git a/pySDC/projects/GPU/configs/base_config.py b/pySDC/projects/GPU/configs/base_config.py index 565af7221a..fe0754bc31 100644 --- a/pySDC/projects/GPU/configs/base_config.py +++ b/pySDC/projects/GPU/configs/base_config.py @@ -7,6 +7,8 @@ def get_config(args): name = args['config'] if name[:2] == 'GS': from pySDC.projects.GPU.configs.GS_configs import get_config as _get_config + elif name[:5] == 'RBC3D': + from pySDC.projects.GPU.configs.RBC3D_configs import get_config as _get_config elif name[:3] == 'RBC': from pySDC.projects.GPU.configs.RBC_configs import get_config as _get_config else: @@ -68,7 +70,9 @@ def __init__(self, args, comm_world=None): self.comm_world = MPI.COMM_WORLD if comm_world is None else comm_world self.n_procs_list = args["procs"] if args['mode'] == 'run': - self.comms = get_comms(n_procs_list=self.n_procs_list, useGPU=args['useGPU'], comm_world=self.comm_world) + self.comms = get_comms( + n_procs_list=self.n_procs_list[::-1], useGPU=args['useGPU'], comm_world=self.comm_world + )[::-1] else: self.comms = [MPI.COMM_SELF, MPI.COMM_SELF, MPI.COMM_SELF] self.ranks = [me.rank for me in self.comms] @@ -199,9 +203,12 @@ def merge_all_stats(self, controller): stats = {} for i in range(hook.counter - 1): - with open(self.get_stats_path(index=i), 'rb') as file: - _stats = pickle.load(file) - stats = {**stats, **_stats} + try: + with open(self.get_stats_path(index=i), 'rb') as file: + _stats = pickle.load(file) + stats = {**stats, **_stats} + except (FileNotFoundError, EOFError): + print(f'Warning: No stats found at path {self.get_stats_path(index=i)}') stats = {**stats, **controller.return_stats()} return stats diff --git a/pySDC/projects/GPU/run_experiment.py b/pySDC/projects/GPU/run_experiment.py index c39bdf353d..11f94a2c0a 100644 --- a/pySDC/projects/GPU/run_experiment.py +++ b/pySDC/projects/GPU/run_experiment.py @@ -22,6 +22,7 @@ def str_to_procs(me): parser.add_argument('--restart_idx', type=int, help='Restart from file by index', default=0) parser.add_argument('--procs', type=str_to_procs, help='Processes in steps/sweeper/space', default='1/1/1') parser.add_argument('--res', type=int, help='Space resolution along first axis', default=-1) + parser.add_argument('--dt', type=float, help='(Starting) Step size', default=-1) parser.add_argument( '--logger_level', type=int, help='Logger level on the first rank in space and in the sweeper', default=15 ) @@ -42,7 +43,7 @@ def run_experiment(args, config, **kwargs): os.makedirs(f'{args["o"]}/data', exist_ok=True) description = config.get_description( - useGPU=args['useGPU'], MPIsweeper=args['procs'][1] > 1, res=args['res'], **kwargs + useGPU=args['useGPU'], MPIsweeper=args['procs'][1] > 1, res=args['res'], dt=args['dt'], **kwargs ) controller_params = config.get_controller_params(logger_level=args['logger_level']) diff --git a/pySDC/tests/test_sweepers/test_Runge_Kutta_sweeper.py b/pySDC/tests/test_sweepers/test_Runge_Kutta_sweeper.py index eee15b791f..1a1c1afba5 100644 --- a/pySDC/tests/test_sweepers/test_Runge_Kutta_sweeper.py +++ b/pySDC/tests/test_sweepers/test_Runge_Kutta_sweeper.py @@ -25,6 +25,7 @@ 'ARK54', 'ARK548L2SA', 'IMEXEuler', + 'IMEXEulerStifflyAccurate', 'ARK32', 'ARK2', 'ARK3', @@ -168,6 +169,7 @@ def test_order(sweeper_name, useGPU=False): 'ARK54': 6, 'ARK548L2SA': 6, 'IMEXEuler': 2, + 'IMEXEulerStifflyAccurate': 2, 'ARK2': 3, 'ARK3': 4, 'ARK32': 4, @@ -185,6 +187,7 @@ def test_order(sweeper_name, useGPU=False): 'ARK54': 5e-2, 'ARK548L2SA': 5e-2, 'IMEXEuler': 1e-2, + 'IMEXEulerStifflyAccurate': 6e-2, } lambdas = [[-1.0e-1 + 0j]] From 84cb1d7a1713f5eb062beb4b28e7f199b1a4e942 Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Thu, 6 Nov 2025 09:28:00 +0100 Subject: [PATCH 2/6] Added tests for processing of 3D RBC --- .../analysis_scripts/compute_RBC3D_data.py | 207 ----------------- .../projects/GPU/analysis_scripts/plot_Nu.py | 27 +-- .../analysis_scripts/process_RBC3D_data.py | 209 ++++++++++++++++++ .../GPU/tests/test_RBC_3D_analysis.py | 110 +++++++++ 4 files changed, 328 insertions(+), 225 deletions(-) delete mode 100644 pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py create mode 100644 pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py create mode 100644 pySDC/projects/GPU/tests/test_RBC_3D_analysis.py diff --git a/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py b/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py deleted file mode 100644 index f84320b6c8..0000000000 --- a/pySDC/projects/GPU/analysis_scripts/compute_RBC3D_data.py +++ /dev/null @@ -1,207 +0,0 @@ -from pySDC.projects.GPU.configs.base_config import get_config -from pySDC.projects.GPU.run_experiment import parse_args -from pySDC.helpers.fieldsIO import FieldsIO -import matplotlib.pyplot as plt -from tqdm import tqdm -from mpi4py import MPI -import numpy as np -import pickle -import os - -# global configs -BASE_PATH = 'data/RBC_time_averaged' - - -# prepare problem instance -args = parse_args() -comm = MPI.COMM_WORLD -config = get_config(args) -desc = config.get_description(**args) -P = desc['problem_class']( - **{ - **desc['problem_params'], - 'spectral_space': False, - 'comm': comm, - 'Dirichlet_recombination': False, - 'left_preconditioner': False, - } -) -P.setUpFieldsIO() -zInt = P.axes[-1].get_integration_weights() -xp = P.xp - -# prepare paths -os.makedirs(BASE_PATH, exist_ok=True) -fname = config.get_file_name() -start = fname.index('data') -path = f'{BASE_PATH}/{fname[start + 5:-6]}.pickle' - -# open simulation data -data = FieldsIO.fromFile(fname) - -# prepare arrays to store data in -Nu = { - 'V': [], - 'b': [], - 't': [], - 'thermal': [], - 'kinetic': [], -} -t = [] -T = [] -profiles = {key: [] for key in ['T', 'u', 'v', 'w']} -rms_profiles = {key: [] for key in profiles.keys()} -spectrum = [] -spectrum_all = [] - - -# try to load time averaged values -u_mean_profile = P.u_exact() -if os.path.isfile(path): - with open(path, 'rb') as file: - avg_data = pickle.load(file) - if comm.rank == 0: - print(f'Read data from file {path!r}') - for key in profiles.keys(): - if f'profile_{key}' in avg_data.keys(): - u_mean_profile[P.index(key)] = avg_data[f'profile_{key}'][P.local_slice(False)[-1]] -elif comm.rank == 0: - print('No mean profiles available yet. Please rerun script after completion to get correct RMS profiles') - -# prepare progress bar -indeces = range(args['restart_idx'], data.nFields) -if P.comm.rank == 0: - indeces = tqdm(indeces) - -# loop through all data points and compute stuff -for i in indeces: - _t, u = data.readField(i) - - # Nusselt numbers - _Nu = P.compute_Nusselt_numbers(u) - if any(me > 1e3 for me in _Nu.values()): - continue - - for key in Nu.keys(): - Nu[key].append(_Nu[key]) - - t.append(_t) - - # profiles - _profiles = P.get_vertical_profiles(u, list(profiles.keys())) - _rms_profiles = P.get_vertical_profiles((u - u_mean_profile) ** 2, list(profiles.keys())) - for key in profiles.keys(): - profiles[key].append(_profiles[key]) - rms_profiles[key].append(_rms_profiles[key]) - - # spectrum - k, s = P.get_frequency_spectrum(u) - s_mean = zInt @ P.axes[-1].transform(s[0], axes=(0,)) - spectrum.append(s_mean) - spectrum_all.append(s) - - -# make a plot of the results -t = xp.array(t) -z = P.axes[-1].get_1dgrid() - -if config.converged == 0: - print('Warning: no convergence has been set for this configuration!') - -fig, axs = plt.subplots(1, 4, figsize=(18, 4)) -for key in Nu.keys(): - axs[0].plot(t, Nu[key], label=f'$Nu_{{{key}}}$') - if config.converged > 0: - axs[0].axvline(config.converged, color='black') -axs[0].set_ylabel('$Nu$') -axs[0].set_xlabel('$t$') -axs[0].legend(frameon=False) - -# compute differences in Nusselt numbers -avg_Nu = {} -std_Nu = {} -for key in Nu.keys(): - _Nu = [Nu[key][i] for i in range(len(Nu[key])) if t[i] > config.converged] - avg_Nu[key] = xp.mean(_Nu) - std_Nu[key] = xp.std(_Nu) - -rel_error = { - key: abs(avg_Nu[key] - avg_Nu['V']) / avg_Nu['V'] - for key in [ - 't', - 'b', - 'thermal', - 'kinetic', - ] -} -if comm.rank == 0: - print( - f'With Ra={P.Rayleigh:.0e} got Nu={avg_Nu["V"]:.2f}+-{std_Nu["V"]:.2f} with errors: Top {rel_error["t"]:.2e}, bottom: {rel_error["b"]:.2e}, thermal: {rel_error["thermal"]:.2e}, kinetic: {rel_error["kinetic"]:.2e}' - ) - - -# compute average profiles -avg_profiles = {} -for key, values in profiles.items(): - values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] - - avg_profiles[key] = xp.mean(values_from_convergence, axis=0) - -avg_rms_profiles = {} -for key, values in rms_profiles.items(): - values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] - avg_rms_profiles[key] = xp.sqrt(xp.mean(values_from_convergence, axis=0)) - - -# average T -avg_T = avg_profiles['T'] -axs[1].axvline(0.5, color='black') -axs[1].plot(avg_T, z) -axs[1].set_xlabel('$T$') -axs[1].set_ylabel('$z$') - -# rms profiles -avg_T = avg_rms_profiles['T'] -max_idx = xp.argmax(avg_T) -res_in_boundary_layer = max_idx if max_idx < len(z) / 2 else len(z) - max_idx -boundary_layer = z[max_idx] if max_idx > len(z) / 2 else P.axes[-1].L - z[max_idx] -if comm.rank == 0: - print(f'Thermal boundary layer of thickness {boundary_layer:.2f} is resolved with {res_in_boundary_layer} points') -axs[2].axhline(z[max_idx], color='black') -axs[2].plot(avg_T, z) -axs[2].scatter(avg_T, z) -axs[2].set_xlabel(r'$T_\text{rms}$') -axs[2].set_ylabel('$z$') - -# spectrum -_s = xp.array(spectrum) -avg_spectrum = xp.mean(_s[t >= config.converged], axis=0) -axs[3].loglog(k[avg_spectrum > 1e-15], avg_spectrum[avg_spectrum > 1e-15]) -axs[3].set_xlabel('$k$') -axs[3].set_ylabel(r'$\|\hat{u}_x\|$') - -if P.comm.rank == 0: - write_data = { - 't': t, - 'Nu': Nu, - 'avg_Nu': avg_Nu, - 'std_Nu': std_Nu, - 'z': P.axes[-1].get_1dgrid(), - 'k': k, - 'spectrum': spectrum_all, - 'avg_spectrum': avg_spectrum, - 'boundary_layer_thickness': boundary_layer, - 'res_in_boundary_layer': res_in_boundary_layer, - } - for key, values in avg_profiles.items(): - write_data[f'profile_{key}'] = values - for key, values in avg_rms_profiles.items(): - write_data[f'rms_profile_{key}'] = values - - with open(path, 'wb') as file: - pickle.dump(write_data, file) - print(f'Wrote data to file {path!r}') - - fig.tight_layout() - fig.savefig(f'{BASE_PATH}/{fname[start + 5:-6]}.pdf') - plt.show() diff --git a/pySDC/projects/GPU/analysis_scripts/plot_Nu.py b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py index 133a5fdd37..94fcbf707e 100644 --- a/pySDC/projects/GPU/analysis_scripts/plot_Nu.py +++ b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py @@ -7,15 +7,8 @@ setup_mpl() -def get_pySDC_data(Ra, RK=False, res=-1, dt=-1, config_name='RBC3DG4'): - assert type(Ra) == str - - base_path = 'data/RBC_time_averaged' - - if RK: - config_name = f'{config_name}RK' - - path = f'{base_path}/{config_name}Ra{Ra}-res{res}-dt{dt:.0e}.pickle' +def get_pySDC_data(res=-1, dt=-1, config_name='RBC3DG4', base_path='data/RBC_time_averaged'): + path = f'{base_path}/{config_name}-res{res}-dt{dt:.0e}.pickle' with open(path, 'rb') as file: data = pickle.load(file) @@ -32,13 +25,13 @@ def interpolate_NuV_to_reference_times(data, reference_data, order=12): return interpolation_matrix @ t_in, interpolation_matrix @ data['Nu']['V'] -def plot_Nu(Ra, res, dts, config_name, ref, ax, title): # pragma: no cover +def plot_Nu(res, dts, config_name, ref, ax, title): # pragma: no cover ax.plot(ref['t'], ref['Nu']['V'], color='black', ls='--') ax.set_title(title) Nu_ref = np.array(ref['Nu']['V']) for dt in dts: - data = get_pySDC_data(Ra=Ra, res=res, dt=dt, config_name=config_name) + data = get_pySDC_data(res=res, dt=dt, config_name=config_name) t_i, Nu_i = interpolate_NuV_to_reference_times(data, ref) ax.plot(t_i, Nu_i, label=rf'$\Delta t={{{dt}}}$') @@ -62,29 +55,27 @@ def plot_Nu(Ra, res, dts, config_name, ref, ax, title): # pragma: no cover def plot_Nu_over_time_Ra1e5(): # pragma: no cover Nu_fig, Nu_axs = plt.subplots(4, 1, sharex=True, sharey=True, figsize=figsize_by_journal('Nature_CS', 1, 1.4)) - Ra = '1e5' res = 32 - ref_data = get_pySDC_data(Ra, res=res, dt=0.01, config_name='RBC3DG4R4') + ref_data = get_pySDC_data(res=res, dt=0.01, config_name='RBC3DG4R4Ra1e5') _Nu_axs = {'SDC 3': Nu_axs[1], 'SDC': Nu_axs[0], 'RK': Nu_axs[2], 'Euler': Nu_axs[3]} plot_Nu( - '1e5', 32, [ 0.06, 0.04, 0.02, ], - 'RBC3DG4R4SDC34', + 'RBC3DG4R4SDC34Ra1e5', ref_data, Nu_axs[0], 'SDC34', ) - plot_Nu('1e5', 32, [0.06, 0.05, 0.02, 0.01], 'RBC3DG4R4SDC23', ref_data, Nu_axs[1], 'SDC23') - plot_Nu('1e5', 32, [0.05, 0.04, 0.02, 0.01, 0.005], 'RBC3DG4R4RK', ref_data, Nu_axs[2], 'RK443') - plot_Nu('1e5', 32, [0.02, 0.01, 0.005], 'RBC3DG4R4Euler', ref_data, Nu_axs[3], 'RK111') + plot_Nu(32, [0.06, 0.05, 0.02, 0.01], 'RBC3DG4R4SDC23Ra1e5', ref_data, Nu_axs[1], 'SDC23') + plot_Nu(32, [0.05, 0.04, 0.02, 0.01, 0.005], 'RBC3DG4R4RKRa1e5', ref_data, Nu_axs[2], 'RK443') + plot_Nu(32, [0.02, 0.01, 0.005], 'RBC3DG4R4EulerRa1e5', ref_data, Nu_axs[3], 'RK111') Nu_axs[-1].set_xlabel('$t$') Nu_axs[-1].set_ylabel('$Nu$') diff --git a/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py b/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py new file mode 100644 index 0000000000..ecfc5725e9 --- /dev/null +++ b/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py @@ -0,0 +1,209 @@ +from pySDC.projects.GPU.configs.base_config import get_config +from pySDC.projects.GPU.run_experiment import parse_args +from pySDC.helpers.fieldsIO import FieldsIO +import matplotlib.pyplot as plt +from tqdm import tqdm +from mpi4py import MPI +import numpy as np +import pickle +import os + +def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=None, config=None): + # prepare problem instance + args = args if args else parse_args() + comm = MPI.COMM_WORLD + config = config if config else get_config(args) + desc = config.get_description(**args) + P = desc['problem_class']( + **{ + **desc['problem_params'], + 'spectral_space': False, + 'comm': comm, + 'Dirichlet_recombination': False, + 'left_preconditioner': False, + } + ) + P.setUpFieldsIO() + zInt = P.axes[-1].get_integration_weights() + xp = P.xp + + # prepare paths + os.makedirs(base_path, exist_ok=True) + fname = config.get_file_name() + fname_trim = fname[fname.index('RBC3D'):fname.index('.pySDC')] + path = f'{base_path}/{fname_trim}.pickle' + + + # open simulation data + data = FieldsIO.fromFile(fname) + + # prepare arrays to store data in + Nu = { + 'V': [], + 'b': [], + 't': [], + 'thermal': [], + 'kinetic': [], + } + t = [] + profiles = {key: [] for key in ['T', 'u', 'v', 'w']} + rms_profiles = {key: [] for key in profiles.keys()} + spectrum = [] + spectrum_all = [] + + + # try to load time averaged values + u_mean_profile = P.u_exact() + if os.path.isfile(path): + with open(path, 'rb') as file: + avg_data = pickle.load(file) + if comm.rank == 0: + print(f'Read data from file {path!r}') + for key in profiles.keys(): + if f'profile_{key}' in avg_data.keys(): + u_mean_profile[P.index(key)] = avg_data[f'profile_{key}'][P.local_slice(False)[-1]] + elif comm.rank == 0: + print('No mean profiles available yet. Please rerun script after completion to get correct RMS profiles') + + # prepare progress bar + indeces = range(args['restart_idx'], data.nFields) + if P.comm.rank == 0: + indeces = tqdm(indeces) + + # loop through all data points and compute stuff + for i in indeces: + _t, u = data.readField(i) + + # Nusselt numbers + _Nu = P.compute_Nusselt_numbers(u) + if any(me > 1e3 for me in _Nu.values()): + continue + + for key in Nu.keys(): + Nu[key].append(_Nu[key]) + + t.append(_t) + + # profiles + _profiles = P.get_vertical_profiles(u, list(profiles.keys())) + _rms_profiles = P.get_vertical_profiles((u - u_mean_profile) ** 2, list(profiles.keys())) + for key in profiles.keys(): + profiles[key].append(_profiles[key]) + rms_profiles[key].append(_rms_profiles[key]) + + # spectrum + k, s = P.get_frequency_spectrum(u) + s_mean = zInt @ P.axes[-1].transform(s[0], axes=(0,)) + spectrum.append(s_mean) + spectrum_all.append(s) + + + # make a plot of the results + t = xp.array(t) + z = P.axes[-1].get_1dgrid() + + if config.converged == 0: + print('Warning: no convergence has been set for this configuration!') + + fig, axs = plt.subplots(1, 4, figsize=(18, 4)) + for key in Nu.keys(): + axs[0].plot(t, Nu[key], label=f'$Nu_{{{key}}}$') + if config.converged > 0: + axs[0].axvline(config.converged, color='black') + axs[0].set_ylabel('$Nu$') + axs[0].set_xlabel('$t$') + axs[0].legend(frameon=False) + + # compute differences in Nusselt numbers + avg_Nu = {} + std_Nu = {} + for key in Nu.keys(): + _Nu = [Nu[key][i] for i in range(len(Nu[key])) if t[i] > config.converged] + avg_Nu[key] = xp.mean(_Nu) + std_Nu[key] = xp.std(_Nu) + + rel_error = { + key: abs(avg_Nu[key] - avg_Nu['V']) / avg_Nu['V'] + for key in [ + 't', + 'b', + 'thermal', + 'kinetic', + ] + } + if comm.rank == 0: + print( + f'With Ra={P.Rayleigh:.0e} got Nu={avg_Nu["V"]:.2f}+-{std_Nu["V"]:.2f} with errors: Top {rel_error["t"]:.2e}, bottom: {rel_error["b"]:.2e}, thermal: {rel_error["thermal"]:.2e}, kinetic: {rel_error["kinetic"]:.2e}' + ) + + + # compute average profiles + avg_profiles = {} + for key, values in profiles.items(): + values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] + + avg_profiles[key] = xp.mean(values_from_convergence, axis=0) + + avg_rms_profiles = {} + for key, values in rms_profiles.items(): + values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] + avg_rms_profiles[key] = xp.sqrt(xp.mean(values_from_convergence, axis=0)) + + + # average T + avg_T = avg_profiles['T'] + axs[1].axvline(0.5, color='black') + axs[1].plot(avg_T, z) + axs[1].set_xlabel('$T$') + axs[1].set_ylabel('$z$') + + # rms profiles + avg_T = avg_rms_profiles['T'] + max_idx = xp.argmax(avg_T) + res_in_boundary_layer = max_idx if max_idx < len(z) / 2 else len(z) - max_idx + boundary_layer = z[max_idx] if max_idx > len(z) / 2 else P.axes[-1].L - z[max_idx] + if comm.rank == 0: + print(f'Thermal boundary layer of thickness {boundary_layer:.2f} is resolved with {res_in_boundary_layer} points') + axs[2].axhline(z[max_idx], color='black') + axs[2].plot(avg_T, z) + axs[2].scatter(avg_T, z) + axs[2].set_xlabel(r'$T_\text{rms}$') + axs[2].set_ylabel('$z$') + + # spectrum + _s = xp.array(spectrum) + avg_spectrum = xp.mean(_s[t >= config.converged], axis=0) + axs[3].loglog(k[avg_spectrum > 1e-15], avg_spectrum[avg_spectrum > 1e-15]) + axs[3].set_xlabel('$k$') + axs[3].set_ylabel(r'$\|\hat{u}_x\|$') + + if P.comm.rank == 0: + write_data = { + 't': t, + 'Nu': Nu, + 'avg_Nu': avg_Nu, + 'std_Nu': std_Nu, + 'z': P.axes[-1].get_1dgrid(), + 'k': k, + 'spectrum': spectrum_all, + 'avg_spectrum': avg_spectrum, + 'boundary_layer_thickness': boundary_layer, + 'res_in_boundary_layer': res_in_boundary_layer, + } + for key, values in avg_profiles.items(): + write_data[f'profile_{key}'] = values + for key, values in avg_rms_profiles.items(): + write_data[f'rms_profile_{key}'] = values + + with open(path, 'wb') as file: + pickle.dump(write_data, file) + print(f'Wrote data to file {path!r}') + + if plot: + fig.tight_layout() + fig.savefig(f'{base_path}/{fname_trim}.pdf') + plt.show() + return path + +if __name__ == '__main__': + process_RBC3D_data() diff --git a/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py b/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py new file mode 100644 index 0000000000..d8accd5dfb --- /dev/null +++ b/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py @@ -0,0 +1,110 @@ +import pytest + + +def get_args(path): + args = {} + + args['mode'] = 'run' + args['dt'] = 0.1 + args['res'] = 4 + args['config'] = 'RBC3DG4R4SDC23Ra1e5' + args['o'] = path + args['useGPU'] = False + args['restart_idx'] = 0 + args['procs'] = [1, 1, 1] + args['logger_level'] = 15 + + return args + + +def get_config(args): + from pySDC.projects.GPU.configs.base_config import get_config + + config = get_config(args) + config.Tend = 1 + config.converged = 0 + return config + + +def generate_simulation_file(path): + from pySDC.projects.GPU.run_experiment import run_experiment + + args = get_args(path) + config = get_config(args) + + run_experiment(args, config) + return f'{path}/{config.get_file_name()}' + + +def generate_processed_file(path): + from pySDC.projects.GPU.analysis_scripts.process_RBC3D_data import process_RBC3D_data + + args = get_args(path) + config = get_config(args) + process_RBC3D_data(path, args=args, config=config, plot=False) + return process_RBC3D_data(path, args=args, config=config, plot=False) + + +@pytest.fixture +def tmp_sim_data(tmp_path): + return generate_simulation_file(tmp_path) + + +@pytest.fixture +def tmp_processed_data(tmp_sim_data, tmp_path): + return generate_processed_file(tmp_path) + + +def test_processing(tmp_processed_data): + import pickle + + with open(tmp_processed_data, 'rb') as file: + data = pickle.load(file) + + for me in ['t', 'Nu', 'spectrum', 'z', 'k', 'profile_T', 'rms_profile_T']: + assert me in data.keys() + + +def test_get_pySDC_data(tmp_processed_data, tmp_path): + from pySDC.projects.GPU.analysis_scripts.plot_Nu import get_pySDC_data + + args = get_args(tmp_path) + data = get_pySDC_data(res=args['res'], dt=args['dt'], config_name=args['config'], base_path=tmp_path) + + for me in ['t', 'Nu', 'spectrum', 'z', 'k', 'profile_T', 'rms_profile_T']: + assert me in data.keys() + + +@pytest.mark.base +def test_Nu_interpolation(): + from pySDC.projects.GPU.analysis_scripts.plot_Nu import interpolate_NuV_to_reference_times + import numpy as np + + t = sorted(np.random.rand(128)) + t_ref = np.linspace(0, max(t), 128) + + def _get_Nu(_t): + return np.array(_t) ** 8 + + ref_data = {'t': t_ref, 'Nu': {'V': _get_Nu(t_ref)}} + data = {'t': t, 'Nu': {'V': _get_Nu(t)}} + + # interpolate to sufficient order + tI, NuI = interpolate_NuV_to_reference_times(data, ref_data, order=8) + assert np.allclose(NuI, ref_data['Nu']['V']) + assert not np.allclose(data['Nu']['V'], ref_data['Nu']['V']) + assert np.allclose(tI, ref_data['t']) + + # interpolate to insufficient order + tI, NuI = interpolate_NuV_to_reference_times(data, ref_data, order=4) + assert not np.allclose(NuI, ref_data['Nu']['V']) + assert not np.allclose(data['Nu']['V'], ref_data['Nu']['V']) + assert np.allclose(tI, ref_data['t']) + + +if __name__ == '__main__': + path = 'tmp' + # generate_simulation_file(path) + processed_path = generate_processed_file(path) + # test_processing(processed_path) + test_get_pySDC_data(None, path) From 2f0df966c89a0943de698ea9a04893cefbe40e8e Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Thu, 6 Nov 2025 09:57:11 +0100 Subject: [PATCH 3/6] Added test for interpolation of initial conditions --- .../projects/GPU/analysis_scripts/plot_Nu.py | 15 +----------- pySDC/projects/GPU/configs/RBC3D_configs.py | 20 +++++++++------- .../GPU/tests/test_RBC_3D_analysis.py | 24 +++++++++++-------- 3 files changed, 26 insertions(+), 33 deletions(-) diff --git a/pySDC/projects/GPU/analysis_scripts/plot_Nu.py b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py index 94fcbf707e..f0b9ba0e62 100644 --- a/pySDC/projects/GPU/analysis_scripts/plot_Nu.py +++ b/pySDC/projects/GPU/analysis_scripts/plot_Nu.py @@ -59,20 +59,7 @@ def plot_Nu_over_time_Ra1e5(): # pragma: no cover ref_data = get_pySDC_data(res=res, dt=0.01, config_name='RBC3DG4R4Ra1e5') - _Nu_axs = {'SDC 3': Nu_axs[1], 'SDC': Nu_axs[0], 'RK': Nu_axs[2], 'Euler': Nu_axs[3]} - - plot_Nu( - 32, - [ - 0.06, - 0.04, - 0.02, - ], - 'RBC3DG4R4SDC34Ra1e5', - ref_data, - Nu_axs[0], - 'SDC34', - ) + plot_Nu(32, [0.06, 0.04, 0.02], 'RBC3DG4R4SDC34Ra1e5', ref_data, Nu_axs[0], 'SDC34') plot_Nu(32, [0.06, 0.05, 0.02, 0.01], 'RBC3DG4R4SDC23Ra1e5', ref_data, Nu_axs[1], 'SDC23') plot_Nu(32, [0.05, 0.04, 0.02, 0.01, 0.005], 'RBC3DG4R4RKRa1e5', ref_data, Nu_axs[2], 'RK443') plot_Nu(32, [0.02, 0.01, 0.005], 'RBC3DG4R4EulerRa1e5', ref_data, Nu_axs[3], 'RK111') diff --git a/pySDC/projects/GPU/configs/RBC3D_configs.py b/pySDC/projects/GPU/configs/RBC3D_configs.py index 37db0e35bb..e12eb2e192 100644 --- a/pySDC/projects/GPU/configs/RBC3D_configs.py +++ b/pySDC/projects/GPU/configs/RBC3D_configs.py @@ -121,7 +121,11 @@ def prepare_caches(self, prob): class RBC3Dverification(RayleighBenard3DRegular): converged = 0 dt = 1e-2 - ic_config = None + ic_config = { + 'config': None, + 'res': -1, + 'dt': -1, + } res = None Ra = None Tend = 100 @@ -159,14 +163,16 @@ def get_description(self, *args, res=-1, dt=-1, **kwargs): return desc def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): - if self.ic_config is None or restart_idx != 0: + if self.ic_config['config'] is None or restart_idx != 0: return super().get_initial_condition(P, *args, restart_idx=restart_idx, **kwargs) # read initial conditions from pySDC.helpers.fieldsIO import FieldsIO - ic_config = self.ic_config(args={**self.args, 'res': -1, 'dt': -1}) - desc = ic_config.get_description() + ic_config = self.ic_config['config']( + args={**self.args, 'res': self.ic_config['res'], 'dt': self.ic_config['dt']} + ) + desc = ic_config.get_description(res=self.ic_config['res'], dt=self.ic_config['dt']) ic_nx = desc['problem_params']['nx'] ic_ny = desc['problem_params']['ny'] ic_nz = desc['problem_params']['nz'] @@ -180,6 +186,7 @@ def get_initial_condition(self, P, *args, restart_idx=0, **kwargs): # interpolate the initial conditions using padded transforms padding = (P.nx / ic_nx, P.ny / ic_ny, P.nz / ic_nz) + P.logger.info(f'Interpolating initial conditions from {ic_nx}x{ic_ny}x{ic_nz} to {P.nx}x{P.ny}x{P.nz}') ics = _P.xp.array(ics) _ics_hat = _P.transform(ics) @@ -243,7 +250,6 @@ def get_description(self, *args, res=-1, dt=-1, **kwargs): class RBC3DG4R4Ra1e5(RBC3Dverification): Tend = 200 dt = 6e-2 - ic_config = None res = 32 converged = 50 @@ -251,7 +257,6 @@ class RBC3DG4R4Ra1e5(RBC3Dverification): class RBC3DG4R4SDC23Ra1e5(RBC3DM2K3): Tend = 200 dt = 6e-2 - ic_config = None res = 32 converged = 50 @@ -259,7 +264,6 @@ class RBC3DG4R4SDC23Ra1e5(RBC3DM2K3): class RBC3DG4R4SDC34Ra1e5(RBC3DM3K4): Tend = 200 dt = 6e-2 - ic_config = None res = 32 converged = 50 @@ -267,7 +271,6 @@ class RBC3DG4R4SDC34Ra1e5(RBC3DM3K4): class RBC3DG4R4RKRa1e5(RBC3DverificationRK): Tend = 200 dt = 8e-2 - ic_config = None res = 32 converged = 50 @@ -275,6 +278,5 @@ class RBC3DG4R4RKRa1e5(RBC3DverificationRK): class RBC3DG4R4EulerRa1e5(RBC3DverificationEuler): Tend = 200 dt = 8e-2 - ic_config = None res = 32 converged = 50 diff --git a/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py b/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py index d8accd5dfb..696f5840e1 100644 --- a/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py +++ b/pySDC/projects/GPU/tests/test_RBC_3D_analysis.py @@ -55,6 +55,20 @@ def tmp_processed_data(tmp_sim_data, tmp_path): return generate_processed_file(tmp_path) +def test_ic_interpolation(tmp_sim_data, tmp_path): + from pySDC.projects.GPU.run_experiment import run_experiment + + args = get_args(tmp_path) + + ic_res = args['res'] * 1 + args['res'] *= 2 + config = get_config(args) + + config.ic_config = {'config': type(config), 'res': ic_res, 'dt': args['dt']} + u = run_experiment(args, config) + assert u.shape[-1] == args['res'] + + def test_processing(tmp_processed_data): import pickle @@ -75,7 +89,6 @@ def test_get_pySDC_data(tmp_processed_data, tmp_path): assert me in data.keys() -@pytest.mark.base def test_Nu_interpolation(): from pySDC.projects.GPU.analysis_scripts.plot_Nu import interpolate_NuV_to_reference_times import numpy as np @@ -98,13 +111,4 @@ def _get_Nu(_t): # interpolate to insufficient order tI, NuI = interpolate_NuV_to_reference_times(data, ref_data, order=4) assert not np.allclose(NuI, ref_data['Nu']['V']) - assert not np.allclose(data['Nu']['V'], ref_data['Nu']['V']) assert np.allclose(tI, ref_data['t']) - - -if __name__ == '__main__': - path = 'tmp' - # generate_simulation_file(path) - processed_path = generate_processed_file(path) - # test_processing(processed_path) - test_get_pySDC_data(None, path) From 271ca6ab8f026cca3d1bbb8fdb80c6b0f4ff7fcc Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Thu, 6 Nov 2025 09:58:19 +0100 Subject: [PATCH 4/6] Fix linting --- .../GPU/analysis_scripts/process_RBC3D_data.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py b/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py index ecfc5725e9..5fb4e3a501 100644 --- a/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py +++ b/pySDC/projects/GPU/analysis_scripts/process_RBC3D_data.py @@ -8,6 +8,7 @@ import pickle import os + def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=None, config=None): # prepare problem instance args = args if args else parse_args() @@ -30,10 +31,9 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non # prepare paths os.makedirs(base_path, exist_ok=True) fname = config.get_file_name() - fname_trim = fname[fname.index('RBC3D'):fname.index('.pySDC')] + fname_trim = fname[fname.index('RBC3D') : fname.index('.pySDC')] path = f'{base_path}/{fname_trim}.pickle' - # open simulation data data = FieldsIO.fromFile(fname) @@ -51,7 +51,6 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non spectrum = [] spectrum_all = [] - # try to load time averaged values u_mean_profile = P.u_exact() if os.path.isfile(path): @@ -97,7 +96,6 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non spectrum.append(s_mean) spectrum_all.append(s) - # make a plot of the results t = xp.array(t) z = P.axes[-1].get_1dgrid() @@ -136,7 +134,6 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non f'With Ra={P.Rayleigh:.0e} got Nu={avg_Nu["V"]:.2f}+-{std_Nu["V"]:.2f} with errors: Top {rel_error["t"]:.2e}, bottom: {rel_error["b"]:.2e}, thermal: {rel_error["thermal"]:.2e}, kinetic: {rel_error["kinetic"]:.2e}' ) - # compute average profiles avg_profiles = {} for key, values in profiles.items(): @@ -149,7 +146,6 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non values_from_convergence = [values[i] for i in range(len(values)) if t[i] >= config.converged] avg_rms_profiles[key] = xp.sqrt(xp.mean(values_from_convergence, axis=0)) - # average T avg_T = avg_profiles['T'] axs[1].axvline(0.5, color='black') @@ -163,7 +159,9 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non res_in_boundary_layer = max_idx if max_idx < len(z) / 2 else len(z) - max_idx boundary_layer = z[max_idx] if max_idx > len(z) / 2 else P.axes[-1].L - z[max_idx] if comm.rank == 0: - print(f'Thermal boundary layer of thickness {boundary_layer:.2f} is resolved with {res_in_boundary_layer} points') + print( + f'Thermal boundary layer of thickness {boundary_layer:.2f} is resolved with {res_in_boundary_layer} points' + ) axs[2].axhline(z[max_idx], color='black') axs[2].plot(avg_T, z) axs[2].scatter(avg_T, z) @@ -205,5 +203,6 @@ def process_RBC3D_data(base_path='./data/RBC_time_averaged', plot=True, args=Non plt.show() return path + if __name__ == '__main__': process_RBC3D_data() From 4688744184fa111f76b75e4006b9d737854d7c33 Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Thu, 6 Nov 2025 10:46:48 +0100 Subject: [PATCH 5/6] Added test for vertical profile computation in RBC --- .../test_problems/test_RayleighBenard3D.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/pySDC/tests/test_problems/test_RayleighBenard3D.py b/pySDC/tests/test_problems/test_RayleighBenard3D.py index 47d52c2cfe..04ad9b5bf1 100644 --- a/pySDC/tests/test_problems/test_RayleighBenard3D.py +++ b/pySDC/tests/test_problems/test_RayleighBenard3D.py @@ -398,6 +398,25 @@ def test_spectrum_computation(mpi_ranks): assert xp.allclose(spectrum[iu, :, 0], 0) +@pytest.mark.mpi4py +def test_vertical_profiles(): + from pySDC.implementations.problem_classes.RayleighBenard3D import RayleighBenard3D + + N = 4 + prob = RayleighBenard3D(nx=N, ny=N, nz=4, dealiasing=1.0, spectral_space=False, Rayleigh=1.0) + xp = prob.xp + iu, iv = prob.index(['u', 'v']) + X, Y, Z = prob.X, prob.Y, prob.Z + z = Z[0, 0] + + u = prob.u_init_physical + u[iu] = (Z**2) * (1 + xp.sin(X * 2 * xp.pi / prob.Lx) + xp.sin(Y * 2 * xp.pi / prob.Ly)) + expect = z**2 + + profile = prob.get_vertical_profiles(u, 'u') + assert xp.allclose(expect, profile['u']) + + if __name__ == '__main__': # test_eval_f(2**2, 2**1, 'x', False) # test_libraries() @@ -406,5 +425,6 @@ def test_spectrum_computation(mpi_ranks): # test_solver_convergence('bicgstab+ilu', 32, False, True) # test_banded_matrix(False) # test_heterogeneous_implementation() - test_Nusselt_number_computation(N=6, c=3) + # test_Nusselt_number_computation(N=6, c=3) + test_vertical_profiles() # test_spectrum_computation(None) From efce949b142ff31e7894d37c15882302db91797f Mon Sep 17 00:00:00 2001 From: Thomas Baumann <39156931+brownbaerchen@users.noreply.github.com> Date: Thu, 6 Nov 2025 11:06:16 +0100 Subject: [PATCH 6/6] Added some documentation --- pySDC/implementations/sweeper_classes/Runge_Kutta.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pySDC/implementations/sweeper_classes/Runge_Kutta.py b/pySDC/implementations/sweeper_classes/Runge_Kutta.py index 72a99f33a8..2cf95af03c 100644 --- a/pySDC/implementations/sweeper_classes/Runge_Kutta.py +++ b/pySDC/implementations/sweeper_classes/Runge_Kutta.py @@ -511,6 +511,12 @@ class IMEXEuler(RungeKuttaIMEX): class IMEXEulerStifflyAccurate(RungeKuttaIMEX): + """ + This implements u = fI^-1(u0 + fE(u0)) rather than u = fI^-1(u0) + fE(u0) + u0. + This implementation is slightly inefficient with two stages, but the last stage is the solution, making it stiffly + accurate and suitable for some DAEs. + """ + nodes = np.array([0, 1]) weights = np.array([0, 1]) weights_explicit = np.array([1, 0])