Skip to content

Commit

Permalink
Support subcycling, add metadata in result csv.
Browse files Browse the repository at this point in the history
  • Loading branch information
BenjaminRodenberg committed Oct 25, 2023
1 parent 47fde42 commit ae9417a
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 18 deletions.
60 changes: 43 additions & 17 deletions partitioned-heat-conduction/doConvergenceStudy.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,12 @@ def render(precice_config_params):
file.write(precice_config_template.render(precice_config_params))


def do_run(dt, error_tol=10e-3, precice_config_params=default_precice_config_params):
def do_run(dt, n_substeps = 1, error_tol=10e-3, precice_config_params=default_precice_config_params):
time_window_size = dt
time_step_size = time_window_size / n_substeps

fenics = Path(__file__).parent.absolute() / "fenics"
precice_config_params['time_window_size'] = dt
precice_config_params['time_window_size'] = time_window_size
render(precice_config_params)
print(f"{datetime.datetime.now()}: Start run with parameters {precice_config_params}")
print("Running...")
Expand All @@ -51,21 +54,22 @@ def do_run(dt, error_tol=10e-3, precice_config_params=default_precice_config_par

for participant in participants:
with open(fenics / participant['logfile'], "w") as outfile:
p = subprocess.Popen(["python3", fenics / "heat.py", participant["cmd"], f"-e {error_tol}"], cwd=fenics, stdout=outfile)
p = subprocess.Popen(["python3", fenics / "heat.py", participant["cmd"], f"-e {error_tol}", f"-s {n_substeps}"], cwd=fenics, stdout=outfile)
participant["proc"] = p

for participant in participants:
participant["proc"].wait()

for participant in participants:
if participant["proc"].returncode != 0:
raise Exception(f'Experiment with dt={dt} failed. See logs {[p["logfile"] for p in participants]}')
raise Exception(f'Experiment failed. See logs {[p["logfile"] for p in participants]}')

print("Done.")
print("Postprocessing...")
summary = {"dt":dt}
summary = {"time window size": time_window_size}
for participant in participants:
df = pd.read_csv(fenics / f"errors-{participant['name']}.csv", comment="#")
summary[f"time step size {participant['name']}"] = time_step_size
summary[f"error {participant['name']}"] = df["errors"].abs().max()
print("Done.")

Expand All @@ -76,30 +80,52 @@ def do_run(dt, error_tol=10e-3, precice_config_params=default_precice_config_par
min_dt = 0.1
dts = [min_dt * 0.5**i for i in range(5)]

df = pd.DataFrame(columns=["dt", "error Dirichlet", "error Neumann"])
df = pd.DataFrame()

precice_config_params = {
'max_used_iterations': 10,
'time_windows_reused': 5,
}

summary_file = f"convergence-studies/{uuid.uuid4()}.csv"
summary_file = Path("convergence-studies") / f"{uuid.uuid4()}.csv"

for dt in dts:
summary = do_run(dt, error_tol=10e10, precice_config_params=precice_config_params)
df = pd.concat([df, pd.DataFrame(summary, index=[0])], ignore_index=True)
for n in [1]:
summary = do_run(dt, n_substeps=n, error_tol=10e10, precice_config_params=precice_config_params)
df = pd.concat([df, pd.DataFrame(summary, index=[0])], ignore_index=True)

print(f"Write preliminary output to {summary_file}")
df.to_csv(summary_file)
print(f"Write preliminary output to {summary_file}")
df.to_csv(summary_file)

term_size = os.get_terminal_size()
print('-' * term_size.columns)
print(df)
print('-' * term_size.columns)
term_size = os.get_terminal_size()
print('-' * term_size.columns)
print(df)
print('-' * term_size.columns)

df = df.set_index('dt')
df = df.set_index(['time window size', 'time step size Dirichlet', 'time step size Neumann'])
print(f"Write final output to {summary_file}")
df.to_csv(summary_file)

import git

repo_base = Path(__file__).parent / ".."
repo = git.Repo(repo_base)
chash = str(repo.head.commit)[:7]
if repo.is_dirty():
chash += "-dirty"

metadata={
"git repository": repo.remotes.origin.url,
"git commit": chash,
"precice_config_params": precice_config_params,
}

summary_file.unlink()

with open(summary_file, 'a') as f:
for key, value in metadata.items():
f.write(f"# {key}:{value}\n")
df.to_csv(f)

print('-' * term_size.columns)
print(df)
print('-' * term_size.columns)
2 changes: 1 addition & 1 deletion partitioned-heat-conduction/precice-config-template.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@
<acceleration:IQN-ILS>
<data name="Temperature" mesh="Neumann-Mesh" />
<initial-relaxation value="0.1" />
<!-- Quasi-Newton can run into errors for small time steps. Commenting out the PRECICE_CHECK can help. See https://github.com/precice/tutorials/issues/385#issuecomment-1779178024 -->
<max-used-iterations value="{{max_used_iterations}}" />
<!-- Don't reuse windows, for g_poly because this can lead to failures, especially for small time steps. Reuse for g_tri -->
<time-windows-reused value="{{time_windows_reused}}" />
<filter type="QR2" limit="1e-3" />
</acceleration:IQN-ILS>
Expand Down

0 comments on commit ae9417a

Please sign in to comment.