diff --git a/matflow/data/scripts/damask/high_concurrency/process_HDF5.py b/matflow/data/scripts/damask/high_concurrency/process_HDF5.py index ed90665b..b492ee5e 100644 --- a/matflow/data/scripts/damask/high_concurrency/process_HDF5.py +++ b/matflow/data/scripts/damask/high_concurrency/process_HDF5.py @@ -9,6 +9,7 @@ def process_HDF5( damask_post_processing: list[dict], VE_response_data: dict, damask_viz: list[dict] | dict | bool | None, + remove_damask_hdf5: bool, ): """ Operate on and extract data from an HDF5 file generated by a DAMASK run. @@ -165,4 +166,8 @@ def process_HDF5( ) generate_viz(hdf5_path=damask_hdf5_file, viz_spec=damask_viz, parsed_outs=VE_response) + # remove damask HDF5 file (TODO: needs to be a better way to do this in hpcflow) + if remove_damask_hdf5: + damask_hdf5_file.unlink() + return VE_response diff --git a/matflow/data/scripts/damask/high_concurrency/write_input_files.py b/matflow/data/scripts/damask/high_concurrency/write_input_files.py index 2e98b6d9..4dfe40ca 100644 --- a/matflow/data/scripts/damask/high_concurrency/write_input_files.py +++ b/matflow/data/scripts/damask/high_concurrency/write_input_files.py @@ -1,7 +1,7 @@ from __future__ import annotations import copy from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from damask import __version__ as damask_version from damask_parse.writers import write_geom, write_load_case, write_material @@ -20,6 +20,7 @@ def write_input_files( damask_phases: dict, single_crystal_parameters: dict | None, damask_numerics: dict | None, + initial_conditions: dict[str, Any] | None, ): """ Write all the input files to DAMASK. @@ -45,7 +46,7 @@ def write_input_files( https://damask-multiphysics.org/documentation/file_formats/numerics.html """ geom_path = Path(path) - _write_geom(geom_path, volume_element) + _write_geom(geom_path, volume_element, initial_conditions=initial_conditions) _write_load(load_case, damask_solver) _write_material( volume_element, @@ -57,8 +58,15 @@ def write_input_files( _write_numerics(damask_numerics) -def _write_geom(path: Path, volume_element: dict): - write_geom(dir_path=path.parent, volume_element=volume_element, name=path.name) +def _write_geom( + path: Path, volume_element: dict, initial_conditions: dict[str, Any] | None +): + write_geom( + dir_path=path.parent, + volume_element=volume_element, + name=path.name, + initial_conditions=initial_conditions, + ) def _write_load(load_case: LoadCase, damask_solver: dict[str, str]): diff --git a/matflow/data/template_components/task_schemas.yaml b/matflow/data/template_components/task_schemas.yaml index c1568ede..3092324c 100644 --- a/matflow/data/template_components/task_schemas.yaml +++ b/matflow/data/template_components/task_schemas.yaml @@ -706,9 +706,31 @@ condition: { value.not_equal_to: null } abortable: true commands: + - command: <> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml --numerics numerics.yaml + stdout: stdout.log + stderr: stderr.log + rules: + - path: inputs.damask_numerics + condition: { value.not_equal_to: null } + - path: resources.environments.damask_env.version + default: true + condition: { value.not_equal_to: 3.0.0-alpha7 } + - command: <> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml + stdout: stdout.log + stderr: stderr.log + rules: + - path: inputs.damask_numerics + condition: { value.equal_to: null } + - path: resources.environments.damask_env.version + default: true + condition: { value.not_equal_to: 3.0.0-alpha7 } - command: <> --load load.yaml --geom geom.vti stdout: stdout.log stderr: stderr.log + rules: + - path: resources.environments.damask_env.version + default: false + condition: { value.equal_to: 3.0.0-alpha7 } output_file_parsers: damask_log: from_files: [damask_stdout_file] @@ -761,6 +783,10 @@ default_value: null - parameter: damask_numerics default_value: null + - parameter: remove_damask_hdf5 + default_value: false + - parameter: initial_conditions + default_value: null outputs: - parameter: VE_response actions: @@ -781,18 +807,33 @@ - damask_phases - single_crystal_parameters - damask_numerics + - initial_conditions script: <> abortable: true commands: - - command: <> --load load.yaml --geom geom.vti + - command: <> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml --numerics numerics.yaml + stdout: stdout.log + stderr: stderr.log + rules: + - path: inputs.damask_numerics + condition: { value.not_equal_to: null } + - command: <> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml stdout: stdout.log stderr: stderr.log + rules: + - path: inputs.damask_numerics + condition: { value.equal_to: null } output_file_parsers: VE_response: # this generates VTK files as well if requested from_files: [damask_hdf5_file] - save_files: false + save_files: [damask_viz_files] + clean_up: [damask_viz_files] script: <> - inputs: [damask_post_processing, VE_response_data, damask_viz] + inputs: + - damask_post_processing + - VE_response_data + - damask_viz + - remove_damask_hdf5 - objective: read_tensile_test doc: Read tensile test data from CSV. diff --git a/matflow/param_classes/load.py b/matflow/param_classes/load.py index 375927cb..16025456 100644 --- a/matflow/param_classes/load.py +++ b/matflow/param_classes/load.py @@ -833,6 +833,88 @@ def random_3D( ) return obj._remember_name_args(_method_name, _method_args) + @classmethod + def random_inc( + cls, + total_time: Union[int, float], + num_increments: int, + target_def_grad: float, + start_def_grad: Optional[np.typing.ArrayLike] = None, + dump_frequency: Optional[int] = 1, + ) -> LoadStep: + """Random load step continuing from a start point. + + Parameters + ---------- + total_time : float or int + Total simulation time. + num_increments + Number of simulation increments. + target_def_grad : float + Maximum of each deformation gradient component + start_def_grad : numpy.ndarray of shape (3, 3), optional + Starting deformation gradient of load step. Identity if not given. + dump_frequency : int, optional + By default, 1, meaning results are written out every increment. + """ + if start_def_grad is None: + start_def_grad = np.eye(3) + if start_def_grad.shape != (3, 3): + msg = "start_def_grad must be an array of shape (3, 3)" + raise ValueError(msg) + + dg_arr = np.copy(start_def_grad) + dg_arr += target_def_grad * np.where(np.random.random((3, 3)) > 0.5, 1.0, -1.0) + dg_arr /= np.cbrt(np.linalg.det(dg_arr)) + + return cls( + total_time=total_time, + num_increments=num_increments, + target_def_grad=dg_arr, + dump_frequency=dump_frequency, + ) + + @classmethod + def random_inc( + cls, + total_time: Union[int, float], + num_increments: int, + target_def_grad: float, + start_def_grad: Optional[np.typing.ArrayLike] = None, + dump_frequency: Optional[int] = 1, + ) -> LoadStep: + """Random load step continuing from a start point. + + Parameters + ---------- + total_time : float or int + Total simulation time. + num_increments + Number of simulation increments. + target_def_grad : float + Maximum of each deformation gradient component + start_def_grad : numpy.ndarray of shape (3, 3), optional + Starting deformation gradient of load step. Identity if not given. + dump_frequency : int, optional + By default, 1, meaning results are written out every increment. + """ + if start_def_grad is None: + start_def_grad = np.eye(3) + if start_def_grad.shape != (3, 3): + msg = "start_def_grad must be an array of shape (3, 3)" + raise ValueError(msg) + + dg_arr = np.copy(start_def_grad) + dg_arr += target_def_grad * np.where(np.random.random((3, 3)) > 0.5, 1.0, -1.0) + dg_arr /= np.cbrt(np.linalg.det(dg_arr)) + + return cls( + total_time=total_time, + num_increments=num_increments, + target_def_grad=dg_arr, + dump_frequency=dump_frequency, + ) + @classmethod def uniaxial_cyclic( cls, @@ -1160,3 +1242,50 @@ def from_npz_file(cls, **kwargs) -> Self: See :py:meth:`~LoadStep.from_npz_file` for argument documentation. """ return cls(steps=LoadStep.from_npz_file(**kwargs)) + + @classmethod + def multistep_random_inc( + cls, + steps: List[Dict], + interpolate_steps: int, + interpolate_kind: Optional[Union[str, int]] = 3, + ) -> LoadCase: + """A load case with multiple steps. + + Parameters + ---------- + + """ + from scipy.interpolate import interp1d + + step_objs = [] + dg_arr = [np.eye(3)] + for step_i in steps: + step_i = copy.deepcopy(step_i) # don't mutate + repeats = step_i.pop("repeats", 1) + method = LoadStep.random_inc + for _ in range(repeats): + step_obj = method(**step_i, start_def_grad=dg_arr[-1]) + dg_arr.append(step_obj.target_def_grad) + step_objs.append(step_obj) + dg_arr = np.array(dg_arr) + + dg_interp = interp1d( + np.arange(len(dg_arr)) * interpolate_steps, + dg_arr, + kind=interpolate_kind, + axis=0, + ) + + step_objs_full = [] + for i, step_obj_i in enumerate(step_objs): + step_i = { + "total_time": step_obj_i.total_time / interpolate_steps, + "num_increments": int(step_obj_i.num_increments / interpolate_steps), + "dump_frequency": step_obj_i.dump_frequency, + } + for j in range(interpolate_steps): + dg = dg_interp(i * interpolate_steps + j + 1) + step_objs_full.append(LoadStep(**step_i, target_def_grad=dg)) + + return cls(steps=step_objs_full) diff --git a/poetry.lock b/poetry.lock index 4240918d..ce07925c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1313,14 +1313,14 @@ numpy = ">=1.19.3" [[package]] name = "hpcflow-new2" -version = "0.2.0a261" +version = "0.2.0a262" description = "Computational workflow management" optional = false python-versions = "<3.14,>=3.9" groups = ["main"] files = [ - {file = "hpcflow_new2-0.2.0a261-py3-none-any.whl", hash = "sha256:0ea203bb2be7e0a3aa8b531eff62b7f104f78782235f39e56f97a1b9f2e0df71"}, - {file = "hpcflow_new2-0.2.0a261.tar.gz", hash = "sha256:224d66daa5df50ebd4d8e609c466681514e978b0bcec341618ccff62b70f50a9"}, + {file = "hpcflow_new2-0.2.0a262-py3-none-any.whl", hash = "sha256:61e97fa149c37129c2fba26504d526c4c024ec857b20d5391836ceaa9c314cfb"}, + {file = "hpcflow_new2-0.2.0a262.tar.gz", hash = "sha256:3db7d58b15848d3f6dbb12687aaceb2cb8120f7e3df50c460e93e8af3a8307df"}, ] [package.dependencies] @@ -4896,4 +4896,4 @@ test = ["pytest"] [metadata] lock-version = "2.1" python-versions = ">=3.9,<3.14" -content-hash = "f14863ad8e34346ca0340c9403192a92eb9d3490d1dcefc74e480b7e8184bc55" +content-hash = "26f1a692685c342da36245e2c591899cb46886253d06e2f3636b971c487dea39" diff --git a/pyproject.toml b/pyproject.toml index affeebeb..858a9613 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,7 @@ numpy = [ # copied from hpcflow, shouldn't really be needed? pytest = {version = "^7.2.0", optional = true} matplotlib = "^3.7" typing-extensions = "^4.12.2" -hpcflow-new2 = "0.2.0a261" +hpcflow-new2 = "0.2.0a262" [tool.poetry.group.dev.dependencies] pylint = "^2.12.2"