Skip to content

Commit 99879a3

Browse files
committed
From jobflow to pyiron_base
1 parent bc77c7e commit 99879a3

File tree

3 files changed

+273
-1
lines changed

3 files changed

+273
-1
lines changed

.github/workflows/jobflow.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,4 +25,5 @@ jobs:
2525
pip install -e python_workflow_definition
2626
conda install -c conda-forge jupyter papermill
2727
export ESPRESSO_PSEUDO=$(pwd)/espresso/pseudo
28-
papermill universal_qe_to_jobflow.ipynb universal_qe_to_jobflow_out.ipynb -k "python3"
28+
papermill universal_qe_to_jobflow.ipynb universal_qe_to_jobflow_out.ipynb -k "python3"
29+
papermill jobflow_to_pyiron_base.ipynb jobflow_to_pyiron_base_out.ipynb -k "python3"

jobflow_to_pyiron_base.ipynb

Lines changed: 247 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,247 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "code",
5+
"execution_count": null,
6+
"id": "000bbd4a-f53c-4eea-9d85-76f0aa2ca10b",
7+
"metadata": {},
8+
"outputs": [],
9+
"source": [
10+
"from jobflow import job, Flow"
11+
]
12+
},
13+
{
14+
"cell_type": "code",
15+
"execution_count": null,
16+
"id": "b4a78447-e87c-4fb4-8d17-d9a280eb7254",
17+
"metadata": {},
18+
"outputs": [],
19+
"source": [
20+
"from pyiron_base import Project"
21+
]
22+
},
23+
{
24+
"cell_type": "code",
25+
"execution_count": null,
26+
"id": "06c2bd9e-b2ac-4b88-9158-fa37331c3418",
27+
"metadata": {},
28+
"outputs": [],
29+
"source": [
30+
"from python_workflow_definition.jobflow import write_workflow_json"
31+
]
32+
},
33+
{
34+
"cell_type": "code",
35+
"execution_count": null,
36+
"id": "fb6dbdaa-8cab-48b2-8307-448003eca3f5",
37+
"metadata": {},
38+
"outputs": [],
39+
"source": [
40+
"from python_workflow_definition.pyiron_base import load_workflow_json"
41+
]
42+
},
43+
{
44+
"cell_type": "code",
45+
"execution_count": null,
46+
"id": "fb847d49-7bf9-4839-9b99-c116d1b0e9ee",
47+
"metadata": {},
48+
"outputs": [],
49+
"source": [
50+
"from quantum_espresso_workflow import (\n",
51+
" calculate_qe as _calculate_qe, \n",
52+
" generate_structures as _generate_structures, \n",
53+
" get_bulk_structure as _get_bulk_structure, \n",
54+
" plot_energy_volume_curve as _plot_energy_volume_curve,\n",
55+
")"
56+
]
57+
},
58+
{
59+
"cell_type": "code",
60+
"execution_count": null,
61+
"id": "07598344-0f75-433b-8902-bea21a42088c",
62+
"metadata": {},
63+
"outputs": [],
64+
"source": [
65+
"calculate_qe = job(_calculate_qe, data=[\"energy\", \"volume\", \"structure\"])\n",
66+
"generate_structures = job(_generate_structures, data=[\"i\" for i in range(100)])\n",
67+
"plot_energy_volume_curve = job(_plot_energy_volume_curve)\n",
68+
"get_bulk_structure = job(_get_bulk_structure)"
69+
]
70+
},
71+
{
72+
"cell_type": "code",
73+
"execution_count": null,
74+
"id": "e1ce0a51-0ab9-456c-81f9-d39875c3b709",
75+
"metadata": {},
76+
"outputs": [],
77+
"source": [
78+
"pseudopotentials = {\"Al\": \"Al.pbe-n-kjpaw_psl.1.0.0.UPF\"}"
79+
]
80+
},
81+
{
82+
"cell_type": "code",
83+
"execution_count": null,
84+
"id": "c03753c7-9936-4a80-9e4e-2ac56a7fc114",
85+
"metadata": {},
86+
"outputs": [],
87+
"source": [
88+
"structure = get_bulk_structure(\n",
89+
" name=\"Al\",\n",
90+
" a=4.05,\n",
91+
" cubic=True,\n",
92+
")"
93+
]
94+
},
95+
{
96+
"cell_type": "code",
97+
"execution_count": null,
98+
"id": "ecef1ed5-a8d3-48c3-9e01-4a40e55c1153",
99+
"metadata": {},
100+
"outputs": [],
101+
"source": [
102+
"calc_mini = calculate_qe(\n",
103+
" working_directory=\"mini\",\n",
104+
" input_dict={\n",
105+
" \"structure\": structure.output,\n",
106+
" \"pseudopotentials\": pseudopotentials,\n",
107+
" \"kpts\": (3, 3, 3),\n",
108+
" \"calculation\": \"vc-relax\",\n",
109+
" \"smearing\": 0.02,\n",
110+
" },\n",
111+
")"
112+
]
113+
},
114+
{
115+
"cell_type": "code",
116+
"execution_count": null,
117+
"id": "2b88a30a-e26b-4802-89b7-79ca08cc0af9",
118+
"metadata": {},
119+
"outputs": [],
120+
"source": [
121+
"number_of_strains = 5\n",
122+
"structure_lst = generate_structures(\n",
123+
" structure=calc_mini.output.structure,\n",
124+
" strain_lst=np.linspace(0.9, 1.1, number_of_strains),\n",
125+
")"
126+
]
127+
},
128+
{
129+
"cell_type": "code",
130+
"execution_count": null,
131+
"id": "53e979ac-21db-4aa5-ae58-7cfc08dfa87b",
132+
"metadata": {},
133+
"outputs": [],
134+
"source": [
135+
"job_strain_lst = []\n",
136+
"for i in range(number_of_strains):\n",
137+
" calc_strain = calculate_qe(\n",
138+
" working_directory=\"strain_\" + str(i),\n",
139+
" input_dict={\n",
140+
" \"structure\": getattr(structure_lst.output, str(i)),\n",
141+
" \"pseudopotentials\": pseudopotentials,\n",
142+
" \"kpts\": (3, 3, 3),\n",
143+
" \"calculation\": \"scf\",\n",
144+
" \"smearing\": 0.02,\n",
145+
" },\n",
146+
" )\n",
147+
" job_strain_lst.append(calc_strain)"
148+
]
149+
},
150+
{
151+
"cell_type": "code",
152+
"execution_count": null,
153+
"id": "fbc5285c-7cc5-4318-acf8-06a48a4e2031",
154+
"metadata": {},
155+
"outputs": [],
156+
"source": [
157+
"plot = plot_energy_volume_curve(\n",
158+
" volume_lst=[job.output.volume for job in job_strain_lst],\n",
159+
" energy_lst=[job.output.energy for job in job_strain_lst],\n",
160+
")"
161+
]
162+
},
163+
{
164+
"cell_type": "code",
165+
"execution_count": null,
166+
"id": "299aef9c-7ae7-46f9-a66f-521b05b7fa1c",
167+
"metadata": {},
168+
"outputs": [],
169+
"source": [
170+
"flow = Flow([structure, calc_mini, structure_lst] + job_strain_lst + [plot])"
171+
]
172+
},
173+
{
174+
"cell_type": "code",
175+
"execution_count": null,
176+
"id": "e464da97-16a1-4772-9a07-0a47f152781d",
177+
"metadata": {},
178+
"outputs": [],
179+
"source": [
180+
"write_workflow_json(flow=flow, file_name=\"workflow_jobflow.json\")"
181+
]
182+
},
183+
{
184+
"cell_type": "code",
185+
"execution_count": null,
186+
"id": "bca646b2-0a9a-4271-966a-e5903a8c9031",
187+
"metadata": {},
188+
"outputs": [],
189+
"source": [
190+
"!cat workflow_jobflow.json"
191+
]
192+
},
193+
{
194+
"cell_type": "code",
195+
"execution_count": null,
196+
"id": "f45684a8-2613-415a-ab0a-5cb2bafaffea",
197+
"metadata": {},
198+
"outputs": [],
199+
"source": [
200+
"pr = Project(\"test\")\n",
201+
"pr.remove_jobs(recursive=True, silently=True)"
202+
]
203+
},
204+
{
205+
"cell_type": "code",
206+
"execution_count": null,
207+
"id": "8f2a621d-b533-4ddd-8bcd-c22db2f922ec",
208+
"metadata": {},
209+
"outputs": [],
210+
"source": [
211+
"delayed_object = load_workflow_json(project=pr, file_name=\"workflow_jobflow.json\")\n",
212+
"delayed_object.draw()"
213+
]
214+
},
215+
{
216+
"cell_type": "code",
217+
"execution_count": null,
218+
"id": "cf80267d-c2b0-4236-bf1d-a57596985fc1",
219+
"metadata": {},
220+
"outputs": [],
221+
"source": [
222+
"delayed_object.pull()"
223+
]
224+
}
225+
],
226+
"metadata": {
227+
"kernelspec": {
228+
"display_name": "Python 3 (ipykernel)",
229+
"language": "python",
230+
"name": "python3"
231+
},
232+
"language_info": {
233+
"codemirror_mode": {
234+
"name": "ipython",
235+
"version": 3
236+
},
237+
"file_extension": ".py",
238+
"mimetype": "text/x-python",
239+
"name": "python",
240+
"nbconvert_exporter": "python",
241+
"pygments_lexer": "ipython3",
242+
"version": "3.12.5"
243+
}
244+
},
245+
"nbformat": 4,
246+
"nbformat_minor": 5
247+
}

python_workflow_definition/src/python_workflow_definition/jobflow.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
from importlib import import_module
33
from inspect import isfunction
44

5+
import numpy as np
56
from jobflow import job, Flow
67

78

@@ -215,3 +216,26 @@ def load_workflow_json(file_name):
215216
source_handles_dict=source_handles_dict,
216217
)
217218
return Flow(task_lst)
219+
220+
221+
def write_workflow_json(flow, file_name="workflow.json"):
222+
flow_dict = flow.as_dict()
223+
function_dict = get_function_dict(flow=flow)
224+
nodes_dict, nodes_mapping_dict = get_nodes_dict(function_dict=function_dict)
225+
edges_lst, nodes_dict = get_edges_and_extend_nodes(
226+
flow_dict=flow_dict,
227+
nodes_mapping_dict=nodes_mapping_dict,
228+
nodes_dict=nodes_dict,
229+
)
230+
231+
nodes_store_dict = {}
232+
for k, v in nodes_dict.items():
233+
if isfunction(v):
234+
nodes_store_dict[k] = v.__module__ + "." + v.__name__
235+
elif isinstance(v, np.ndarray):
236+
nodes_store_dict[k] = v.tolist()
237+
else:
238+
nodes_store_dict[k] = v
239+
240+
with open(file_name, "w") as f:
241+
json.dump({"nodes": nodes_store_dict, "edges": edges_lst}, f)

0 commit comments

Comments
 (0)