Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 29 additions & 9 deletions src/struphy/fields_background/equils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
from time import time

import cunumpy as xp
from psydac.ddm.mpi import MockMPI
from psydac.ddm.mpi import mpi as MPI
from scipy.integrate import odeint, quad
from scipy.interpolate import RectBivariateSpline, UnivariateSpline
from scipy.optimize import fsolve, minimize
Expand All @@ -31,6 +33,17 @@
from struphy.fields_background.mhd_equil.eqdsk import readeqdsk
from struphy.utils.utils import read_state, subp_run

if isinstance(MPI, MockMPI):
comm = None
rank = 0
size = 1
Barrier = lambda: None
else:
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
Barrier = comm.Barrier


class HomogenSlab(CartesianMHDequilibrium):
r"""
Expand Down Expand Up @@ -1748,7 +1761,8 @@ def __init__(
# default input file
if file is None:
file = "AUGNLED_g031213.00830.high"
print(f"EQDSK: taking default file {file}.")
if rank == 0:
print(f"EQDSK: taking default file {file}.")

# no rescaling if units are not provided
if units is None:
Expand Down Expand Up @@ -2131,9 +2145,11 @@ def __init__(
import pytest

with pytest.raises(SystemExit) as exc:
print("Simulation aborted, gvec must be installed (pip install gvec)!")
if rank == 0:
print("Simulation aborted, gvec must be installed (pip install gvec)!")
sys.exit(1)
print(f"{exc.value.code =}")
if rank == 0:
print(f"{exc.value.code =}")

import gvec

Expand Down Expand Up @@ -2410,13 +2426,15 @@ def __init__(
desc_spec = importlib.util.find_spec("desc")

if desc_spec is None:
print("Simulation aborted, desc-opt must be installed!")
print("Install with:\npip install desc-opt")
if rank == 0:
print("Simulation aborted, desc-opt must be installed!")
print("Install with:\npip install desc-opt")
sys.exit(1)

import desc

print(f"DESC import: {time() - t} seconds")
if rank == 0:
print(f"DESC import: {time() - t} seconds")
from struphy.geometry.domains import DESCunit

# no rescaling if units are not provided
Expand Down Expand Up @@ -2449,7 +2467,8 @@ def __init__(
else:
self._eq = desc.io.load(eq_name)

print(f"Eq. load: {time() - t} seconds")
if rank == 0:
print(f"Eq. load: {time() - t} seconds")
self._rmin = self.params["rmin"]
self._use_nfp = self.params["use_nfp"]

Expand Down Expand Up @@ -2878,7 +2897,7 @@ def desc_eval(
assert xp.all(theta == theta1)
assert xp.all(zeta == zeta1)

if verbose:
if verbose and rank == 0:
# import sys
print(f"\n{nfp =}")
print(f"{self.eq.axis =}")
Expand All @@ -2900,7 +2919,8 @@ def desc_eval(

# make c-contiguous
out = xp.ascontiguousarray(out)
print(f"desc_eval for {var}: {time() - ttime} seconds")
if rank == 0:
print(f"desc_eval for {var}: {time() - ttime} seconds")
return out


Expand Down
15 changes: 7 additions & 8 deletions src/struphy/initial/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,15 @@ def __init__(self, derham, name, species, **params):
else:
key = "restart/" + species + "_" + name

if isinstance(data.file[key], h5py.Dataset):
self._vector = data.file[key][-1]
with h5py.File(data.file_path, "a") as file:
if isinstance(file[key], h5py.Dataset):
self._vector = file[key][-1]

else:
self._vector = []

for n in range(3):
self._vector += [data.file[key + "/" + str(n + 1)][-1]]
else:
self._vector = []

data.file.close()
for n in range(3):
self._vector += [file[key + "/" + str(n + 1)][-1]]

@property
def vector(self):
Expand Down
84 changes: 42 additions & 42 deletions src/struphy/io/output_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,10 @@ def __init__(self, path_out, file_name=None, comm=None):
self._file_name = file_name

# file path
file_path = os.path.join(path_out, "data/", self._file_name)
self._file_path = os.path.join(path_out, "data/", self._file_name)

# check if file already exists
file_exists = os.path.exists(file_path)

# open/create file
self._file = h5py.File(file_path, "a")
file_exists = os.path.exists(self.file_path)

# dictionary with pairs (dataset key : object ID)
self._dset_dict = {}
Expand All @@ -53,9 +50,10 @@ def __init__(self, path_out, file_name=None, comm=None):
if file_exists:
dataset_keys = []

self._file.visit(
lambda key: dataset_keys.append(key) if isinstance(self._file[key], h5py.Dataset) else None,
)
with h5py.File(self.file_path, "a") as file:
file.visit(
lambda key: dataset_keys.append(key) if isinstance(self._file[key], h5py.Dataset) else None,
)

for key in dataset_keys:
self._dset_dict[key] = None
Expand All @@ -66,9 +64,9 @@ def file_name(self):
return self._file_name

@property
def file(self):
"""The hdf5 file."""
return self._file
def file_path(self):
"""The absolute path to the hdf5 file."""
return self._file_path

@property
def dset_dict(self):
Expand Down Expand Up @@ -103,20 +101,21 @@ def add_data(self, data_dict):

# create new dataset otherwise and save array
else:
# scalar values are saved as 1d arrays of size 1
if val.size == 1:
assert val.ndim == 1
self._file.create_dataset(key, (1,), maxshape=(None,), dtype=val.dtype, chunks=True)
self._file[key][0] = val[0]
else:
self._file.create_dataset(
key,
(1,) + val.shape,
maxshape=(None,) + val.shape,
dtype=val.dtype,
chunks=True,
)
self._file[key][0] = val
with h5py.File(self.file_path, "a") as file:
# scalar values are saved as 1d arrays of size 1
if val.size == 1:
assert val.ndim == 1
file.create_dataset(key, (1,), maxshape=(None,), dtype=val.dtype, chunks=True)
file[key][0] = val[0]
else:
file.create_dataset(
key,
(1,) + val.shape,
maxshape=(None,) + val.shape,
dtype=val.dtype,
chunks=True,
)
file[key][0] = val

# set object ID
self._dset_dict[key] = id(val)
Expand All @@ -130,25 +129,26 @@ def save_data(self, keys=None):
keys : list
Keys to the data objects specified when using "add_data". Default saves all specified data objects.
"""

# loop over all keys
if keys is None:
for key in self._dset_dict:
self._file[key].resize(self._file[key].shape[0] + 1, axis=0)
self._file[key][-1] = ctypes.cast(self._dset_dict[key], ctypes.py_object).value

# only loop over given keys
else:
for key in keys:
self._file[key].resize(self._file[key].shape[0] + 1, axis=0)
self._file[key][-1] = ctypes.cast(self._dset_dict[key], ctypes.py_object).value
with h5py.File(self.file_path, "a") as file:
# loop over all keys
if keys is None:
for key in self._dset_dict:
file[key].resize(file[key].shape[0] + 1, axis=0)
file[key][-1] = ctypes.cast(self._dset_dict[key], ctypes.py_object).value

# only loop over given keys
else:
for key in keys:
file[key].resize(file[key].shape[0] + 1, axis=0)
file[key][-1] = ctypes.cast(self._dset_dict[key], ctypes.py_object).value

def info(self):
"""Print info of data sets to screen."""

for key in self._dset_dict:
print(f"\nData set name: {key}")
print("Shape:", self._file[key].shape)
print("Attributes:")
for attr, val in self._file[key].attrs.items():
print(attr, val)
with h5py.File(self.file_path, "a") as file:
print(f"\nData set name: {key}")
print("Shape:", file[key].shape)
print("Attributes:")
for attr, val in file[key].attrs.items():
print(attr, val)
65 changes: 31 additions & 34 deletions src/struphy/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,9 +280,10 @@ def run(
if restart:
model.initialize_from_restart(data)

time_state["value"][0] = data.file["restart/time/value"][-1]
time_state["value_sec"][0] = data.file["restart/time/value_sec"][-1]
time_state["index"][0] = data.file["restart/time/index"][-1]
with h5py.File(data.file_path, "a") as file:
time_state["value"][0] = file["restart/time/value"][-1]
time_state["value_sec"][0] = file["restart/time/value_sec"][-1]
time_state["index"][0] = file["restart/time/index"][-1]

total_steps = str(int(round((Tend - time_state["value"][0]) / dt)))
else:
Expand Down Expand Up @@ -317,7 +318,6 @@ def run(
if break_cond_1 or break_cond_2:
# save restart data (other data already saved below)
data.save_data(keys=save_keys_end)
data.file.close()
end_simulation = time.time()
if rank == 0:
print(f"\nTime steps done: {time_state['index'][0]}")
Expand Down Expand Up @@ -474,37 +474,34 @@ def pproc(
return

# check for fields and kinetic data in hdf5 file that need post processing
file = h5py.File(os.path.join(path, "data/", "data_proc0.hdf5"), "r")
with h5py.File(os.path.join(path, "data/", "data_proc0.hdf5"), "r") as file:
# save time grid at which post-processing data is created
xp.save(os.path.join(path_pproc, "t_grid.npy"), file["time/value"][::step].copy())

# save time grid at which post-processing data is created
xp.save(os.path.join(path_pproc, "t_grid.npy"), file["time/value"][::step].copy())

if "feec" in file.keys():
exist_fields = True
else:
exist_fields = False

if "kinetic" in file.keys():
exist_kinetic = {"markers": False, "f": False, "n_sph": False}
kinetic_species = []
kinetic_kinds = []
for name in file["kinetic"].keys():
kinetic_species += [name]
kinetic_kinds += [next(iter(model.species[name].variables.values())).space]

# check for saved markers
if "markers" in file["kinetic"][name]:
exist_kinetic["markers"] = True
# check for saved distribution function
if "f" in file["kinetic"][name]:
exist_kinetic["f"] = True
# check for saved sph density
if "n_sph" in file["kinetic"][name]:
exist_kinetic["n_sph"] = True
else:
exist_kinetic = None

file.close()
if "feec" in file.keys():
exist_fields = True
else:
exist_fields = False

if "kinetic" in file.keys():
exist_kinetic = {"markers": False, "f": False, "n_sph": False}
kinetic_species = []
kinetic_kinds = []
for name in file["kinetic"].keys():
kinetic_species += [name]
kinetic_kinds += [next(iter(model.species[name].variables.values())).space]

# check for saved markers
if "markers" in file["kinetic"][name]:
exist_kinetic["markers"] = True
# check for saved distribution function
if "f" in file["kinetic"][name]:
exist_kinetic["f"] = True
# check for saved sph density
if "n_sph" in file["kinetic"][name]:
exist_kinetic["n_sph"] = True
else:
exist_kinetic = None

# field post-processing
if exist_fields:
Expand Down
Loading
Loading