Skip to content

Commit

Permalink
Modify case for convergence studies via runner script.
Browse files Browse the repository at this point in the history
  • Loading branch information
BenjaminRodenberg committed Oct 20, 2023
1 parent c3ccfb6 commit 82ff0b3
Show file tree
Hide file tree
Showing 4 changed files with 113 additions and 6 deletions.
81 changes: 81 additions & 0 deletions partitioned-heat-conduction/doConvergenceStudy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
from jinja2 import Environment, select_autoescape, FileSystemLoader
import pandas as pd
from pathlib import Path
import subprocess
import datetime
import os


def render(dt):
base_path = Path(__file__).parent.absolute()

env = Environment(
loader=FileSystemLoader(base_path),
autoescape=select_autoescape(['xml'])
)

precice_config_template = env.get_template('precice-config-template.xml')

precice_config_name = base_path / "precice-config.xml"

with open(precice_config_name, "w") as file:
file.write(precice_config_template.render(time_window_size=dt))


def do_run(dt, error_tol=10e-3):
fenics = Path(__file__).parent.absolute() / "fenics"
render(dt=dt)
print(f"Start run with dt={dt} at {datetime.datetime.now()} ...")
participants = [
{
"name": "Dirichlet",
"cmd":"-d",
},
{
"name": "Neumann",
"cmd":"-n",
},
]

for participant in participants:
with open(fenics / f"stdout-{participant['name']}.log", "w") as outfile:
p = subprocess.Popen(["python3", fenics / "heat.py", participant["cmd"], f"-e {error_tol}"], cwd=fenics, stdout=outfile)
participant["proc"] = p

for participant in participants:
participant["proc"].wait()

for participant in participants:
if participant["proc"].returncode != 0:
raise Exception(f'Experiment with dt={dt} failed!')

print("Done.")
print("Postprocessing...")
summary = {"dt":dt}
for participant in participants:
df = pd.read_csv(fenics / f"errors-{participant['name']}.csv", comment="#")
summary[f"error {participant['name']}"] = df["errors"].abs().max()
print("Done.")

return summary


if __name__ == "__main__":
min_dt = 0.01
dts = [min_dt * 0.5**i for i in range(10)]

df = pd.DataFrame(columns=["dt", "error Dirichlet", "error Neumann"])

summary_file = "convergence_study.csv"
for dt in dts:
summary = do_run(dt, error_tol=10e10)
df = pd.concat([df, pd.DataFrame(summary, index=[0])], ignore_index=True)

print(f"Write output to {summary_file}")
df.to_csv(summary_file)

term_size = os.get_terminal_size()
print('-' * term_size.columns)
print("Preliminary results:")
print(df)
print('-' * term_size.columns)
24 changes: 24 additions & 0 deletions partitioned-heat-conduction/fenics/heat.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from problem_setup import get_geometry, get_manufactured_solution
import dolfin
from dolfin import FacetNormal, dot
import pandas as pd


def determine_gradient(V_g, u, flux):
Expand Down Expand Up @@ -109,6 +110,7 @@ def determine_gradient(V_g, u, flux):
precice_dt = precice.initialize(coupling_boundary, read_function_space=W, write_object=u_D_function)

dt = Constant(0)
window_dt = precice_dt # store for later
fenics_dt = precice_dt # use window size provided by preCICE as time step size
dt.assign(np.min([fenics_dt, precice_dt]))

Expand Down Expand Up @@ -166,6 +168,8 @@ def determine_gradient(V_g, u, flux):

error_total, error_pointwise = compute_errors(u_n, u_ref, V)
error_out << error_pointwise
errors = []
times = []

# set t_1 = t_0 + dt, this gives u_D^1
# call dt(0) to evaluate FEniCS Constant. Todo: is there a better way?
Expand Down Expand Up @@ -225,10 +229,30 @@ def determine_gradient(V_g, u, flux):
temperature_out << u_n
ref_out << u_ref
error_out << error_pointwise
errors.append(error)
times.append(t)

# Update Dirichlet BC
u_D.t = t + float(dt)
f.t = t + float(dt)

# Hold plot
precice.finalize()

df = pd.DataFrame()
df["times"] = times
df["errors"] = errors
df = df.set_index('times')
metadata = f'''# time_window_size: {window_dt}
# time_step_size: {fenics_dt}
'''

filename = f"errors-{problem.value}.csv"

import os

os.remove(filename)

with open(filename, 'a') as f:
f.write(f"{metadata}")
df.to_csv(f)
4 changes: 2 additions & 2 deletions partitioned-heat-conduction/fenics/my_enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ class ProblemType(Enum):
"""
Enum defines problem type. Details see above.
"""
DIRICHLET = 1 # Dirichlet problem
NEUMANN = 2 # Neumann problem
DIRICHLET = "Dirichlet" # Dirichlet problem
NEUMANN = "Neumann" # Neumann problem


class DomainPart(Enum):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
<coupling-scheme:serial-implicit>
<participants first="Dirichlet" second="Neumann" />
<max-time value="1.0" />
<time-window-size value="0.1" />
<time-window-size value="{{time_window_size}}" />
<max-iterations value="100" />
<exchange data="Heat-Flux" mesh="Dirichlet-Mesh" from="Dirichlet" to="Neumann" />
<exchange
Expand All @@ -61,13 +61,15 @@
from="Neumann"
to="Dirichlet"
initialize="true" />
<relative-convergence-measure data="Heat-Flux" mesh="Dirichlet-Mesh" limit="1e-5" />
<relative-convergence-measure data="Temperature" mesh="Neumann-Mesh" limit="1e-5" />
<!-- Use relatively low convergence limit, because this is not the bottleneck with a first order time stepping scheme -->
<relative-convergence-measure data="Heat-Flux" mesh="Dirichlet-Mesh" limit="1e-3" />
<relative-convergence-measure data="Temperature" mesh="Neumann-Mesh" limit="1e-3" />
<acceleration:IQN-ILS>
<data name="Temperature" mesh="Neumann-Mesh" />
<initial-relaxation value="0.1" />
<max-used-iterations value="10" />
<time-windows-reused value="5" />
<!-- Don't reuse windows, because this can lead to failures, especially for small time steps -->
<time-windows-reused value="0" />
<filter type="QR2" limit="1e-3" />
</acceleration:IQN-ILS>
</coupling-scheme:serial-implicit>
Expand Down

0 comments on commit 82ff0b3

Please sign in to comment.