Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ def process_HDF5(
damask_post_processing: list[dict],
VE_response_data: dict,
damask_viz: list[dict] | dict | bool | None,
remove_damask_hdf5: bool,
):
"""
Operate on and extract data from an HDF5 file generated by a DAMASK run.
Expand Down Expand Up @@ -165,4 +166,8 @@ def process_HDF5(
)
generate_viz(hdf5_path=damask_hdf5_file, viz_spec=damask_viz, parsed_outs=VE_response)

# remove damask HDF5 file (TODO: needs to be a better way to do this in hpcflow)
if remove_damask_hdf5:
damask_hdf5_file.unlink()

return VE_response
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from __future__ import annotations
import copy
from pathlib import Path
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Any

from damask import __version__ as damask_version
from damask_parse.writers import write_geom, write_load_case, write_material
Expand All @@ -20,6 +20,7 @@ def write_input_files(
damask_phases: dict,
single_crystal_parameters: dict | None,
damask_numerics: dict | None,
initial_conditions: dict[str, Any] | None,
):
"""
Write all the input files to DAMASK.
Expand All @@ -45,7 +46,7 @@ def write_input_files(
https://damask-multiphysics.org/documentation/file_formats/numerics.html
"""
geom_path = Path(path)
_write_geom(geom_path, volume_element)
_write_geom(geom_path, volume_element, initial_conditions=initial_conditions)
_write_load(load_case, damask_solver)
_write_material(
volume_element,
Expand All @@ -57,8 +58,15 @@ def write_input_files(
_write_numerics(damask_numerics)


def _write_geom(path: Path, volume_element: dict):
write_geom(dir_path=path.parent, volume_element=volume_element, name=path.name)
def _write_geom(
path: Path, volume_element: dict, initial_conditions: dict[str, Any] | None
):
write_geom(
dir_path=path.parent,
volume_element=volume_element,
name=path.name,
initial_conditions=initial_conditions,
)


def _write_load(load_case: LoadCase, damask_solver: dict[str, str]):
Expand Down
47 changes: 44 additions & 3 deletions matflow/data/template_components/task_schemas.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -706,9 +706,31 @@
condition: { value.not_equal_to: null }
abortable: true
commands:
- command: <<executable:damask_grid>> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml --numerics numerics.yaml
stdout: stdout.log
stderr: stderr.log
rules:
- path: inputs.damask_numerics
condition: { value.not_equal_to: null }
- path: resources.environments.damask_env.version
default: true
condition: { value.not_equal_to: 3.0.0-alpha7 }
- command: <<executable:damask_grid>> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml
stdout: stdout.log
stderr: stderr.log
rules:
- path: inputs.damask_numerics
condition: { value.equal_to: null }
- path: resources.environments.damask_env.version
default: true
condition: { value.not_equal_to: 3.0.0-alpha7 }
- command: <<executable:damask_grid>> --load load.yaml --geom geom.vti
stdout: stdout.log
stderr: stderr.log
rules:
- path: resources.environments.damask_env.version
default: false
condition: { value.equal_to: 3.0.0-alpha7 }
output_file_parsers:
damask_log:
from_files: [damask_stdout_file]
Expand Down Expand Up @@ -761,6 +783,10 @@
default_value: null
- parameter: damask_numerics
default_value: null
- parameter: remove_damask_hdf5
default_value: false
- parameter: initial_conditions
default_value: null
outputs:
- parameter: VE_response
actions:
Expand All @@ -781,18 +807,33 @@
- damask_phases
- single_crystal_parameters
- damask_numerics
- initial_conditions
script: <<script:damask/high_concurrency/write_input_files.py>>
abortable: true
commands:
- command: <<executable:damask_grid>> --load load.yaml --geom geom.vti
- command: <<executable:damask_grid>> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml --numerics numerics.yaml
stdout: stdout.log
stderr: stderr.log
rules:
- path: inputs.damask_numerics
condition: { value.not_equal_to: null }
- command: <<executable:damask_grid>> --jobname geom_load --load load.yaml --geom geom.vti --material material.yaml
stdout: stdout.log
stderr: stderr.log
rules:
- path: inputs.damask_numerics
condition: { value.equal_to: null }
output_file_parsers:
VE_response: # this generates VTK files as well if requested
from_files: [damask_hdf5_file]
save_files: false
save_files: [damask_viz_files]
clean_up: [damask_viz_files]
script: <<script:damask/high_concurrency/process_HDF5.py>>
inputs: [damask_post_processing, VE_response_data, damask_viz]
inputs:
- damask_post_processing
- VE_response_data
- damask_viz
- remove_damask_hdf5

- objective: read_tensile_test
doc: Read tensile test data from CSV.
Expand Down
129 changes: 129 additions & 0 deletions matflow/param_classes/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -833,6 +833,88 @@ def random_3D(
)
return obj._remember_name_args(_method_name, _method_args)

@classmethod
def random_inc(
cls,
total_time: Union[int, float],
num_increments: int,
target_def_grad: float,
start_def_grad: Optional[np.typing.ArrayLike] = None,
dump_frequency: Optional[int] = 1,
) -> LoadStep:
"""Random load step continuing from a start point.

Parameters
----------
total_time : float or int
Total simulation time.
num_increments
Number of simulation increments.
target_def_grad : float
Maximum of each deformation gradient component
start_def_grad : numpy.ndarray of shape (3, 3), optional
Starting deformation gradient of load step. Identity if not given.
dump_frequency : int, optional
By default, 1, meaning results are written out every increment.
"""
if start_def_grad is None:
start_def_grad = np.eye(3)
if start_def_grad.shape != (3, 3):
msg = "start_def_grad must be an array of shape (3, 3)"
raise ValueError(msg)

dg_arr = np.copy(start_def_grad)
dg_arr += target_def_grad * np.where(np.random.random((3, 3)) > 0.5, 1.0, -1.0)
dg_arr /= np.cbrt(np.linalg.det(dg_arr))

return cls(
total_time=total_time,
num_increments=num_increments,
target_def_grad=dg_arr,
dump_frequency=dump_frequency,
)

@classmethod
def random_inc(
cls,
total_time: Union[int, float],
num_increments: int,
target_def_grad: float,
start_def_grad: Optional[np.typing.ArrayLike] = None,
dump_frequency: Optional[int] = 1,
) -> LoadStep:
"""Random load step continuing from a start point.

Parameters
----------
total_time : float or int
Total simulation time.
num_increments
Number of simulation increments.
target_def_grad : float
Maximum of each deformation gradient component
start_def_grad : numpy.ndarray of shape (3, 3), optional
Starting deformation gradient of load step. Identity if not given.
dump_frequency : int, optional
By default, 1, meaning results are written out every increment.
"""
if start_def_grad is None:
start_def_grad = np.eye(3)
if start_def_grad.shape != (3, 3):
msg = "start_def_grad must be an array of shape (3, 3)"
raise ValueError(msg)

dg_arr = np.copy(start_def_grad)
dg_arr += target_def_grad * np.where(np.random.random((3, 3)) > 0.5, 1.0, -1.0)
dg_arr /= np.cbrt(np.linalg.det(dg_arr))

return cls(
total_time=total_time,
num_increments=num_increments,
target_def_grad=dg_arr,
dump_frequency=dump_frequency,
)

@classmethod
def uniaxial_cyclic(
cls,
Expand Down Expand Up @@ -1160,3 +1242,50 @@ def from_npz_file(cls, **kwargs) -> Self:
See :py:meth:`~LoadStep.from_npz_file` for argument documentation.
"""
return cls(steps=LoadStep.from_npz_file(**kwargs))

@classmethod
def multistep_random_inc(
cls,
steps: List[Dict],
interpolate_steps: int,
interpolate_kind: Optional[Union[str, int]] = 3,
) -> LoadCase:
"""A load case with multiple steps.

Parameters
----------

"""
from scipy.interpolate import interp1d

step_objs = []
dg_arr = [np.eye(3)]
for step_i in steps:
step_i = copy.deepcopy(step_i) # don't mutate
repeats = step_i.pop("repeats", 1)
method = LoadStep.random_inc
for _ in range(repeats):
step_obj = method(**step_i, start_def_grad=dg_arr[-1])
dg_arr.append(step_obj.target_def_grad)
step_objs.append(step_obj)
dg_arr = np.array(dg_arr)

dg_interp = interp1d(
np.arange(len(dg_arr)) * interpolate_steps,
dg_arr,
kind=interpolate_kind,
axis=0,
)

step_objs_full = []
for i, step_obj_i in enumerate(step_objs):
step_i = {
"total_time": step_obj_i.total_time / interpolate_steps,
"num_increments": int(step_obj_i.num_increments / interpolate_steps),
"dump_frequency": step_obj_i.dump_frequency,
}
for j in range(interpolate_steps):
dg = dg_interp(i * interpolate_steps + j + 1)
step_objs_full.append(LoadStep(**step_i, target_def_grad=dg))

return cls(steps=step_objs_full)
8 changes: 4 additions & 4 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ numpy = [ # copied from hpcflow, shouldn't really be needed?
pytest = {version = "^7.2.0", optional = true}
matplotlib = "^3.7"
typing-extensions = "^4.12.2"
hpcflow-new2 = "0.2.0a261"
hpcflow-new2 = "0.2.0a262"

[tool.poetry.group.dev.dependencies]
pylint = "^2.12.2"
Expand Down
Loading