From ef4b0fbdae32e9b2619b3b0a4fb99592941720a4 Mon Sep 17 00:00:00 2001 From: Wanrun Jiang <58099845+Vibsteamer@users.noreply.github.com> Date: Tue, 31 Aug 2021 10:26:00 +0800 Subject: [PATCH 01/28] Update gen.py | compatibility with VASP (#529) for INCAR param of VASP, NSW = 0 and 1 both lead to the output of 1 converged SCF, thus 1 valid frame of labeled data. NSW = 0 (actually single-point calculation) was not supported to be used as the "md_incar". Though, the previous limitation might be reasonable in semantics, this update just support the practical branching in VASP settings and eliminates the annoying exception for users. --- dpgen/data/gen.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 25c610c61..0437c5368 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -534,6 +534,8 @@ def coll_vasp_md(jdata) : #dlog.info("md_nstep", md_nstep) if nforce == md_nstep : valid_outcars.append(outcar) + elif md_nstep == 0 and nforce == 1 : + valid_outcars.append(outcar) else: dlog.info("WARNING : in directory %s nforce in OUTCAR is not equal to settings in INCAR"%(os.getcwd())) arg_cvt = " " From 6d3f187205fbbb07fef48069789f5e2535c8e4f3 Mon Sep 17 00:00:00 2001 From: Wanrun Jiang <58099845+Vibsteamer@users.noreply.github.com> Date: Tue, 31 Aug 2021 10:26:38 +0800 Subject: [PATCH 02/28] bug_fix | init_bulk | stages 3 | lost the last pert-dir in stages 2 (#530) pert_numb = jdata['pert_numb'] stages 2 pert makes 000000+pert_numb structures (dirs), pert_numb + 1 in total stages 3 was copying those in range(pert_numb), the first pert_numb structures(dir), lost the last one. --- dpgen/data/gen.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 0437c5368..923deecb6 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -464,7 +464,7 @@ def make_vasp_md(jdata, mdata) : for ii in sys_ps : for jj in scale : - for kk in range(pert_numb) : + for kk in range(pert_numb+1) : path_work = path_md path_work = os.path.join(path_work, ii) path_work = os.path.join(path_work, "scale-%.3f" % jj) From b0f7eee953078f00fffdbc2c92fcf57c84d008e2 Mon Sep 17 00:00:00 2001 From: Jinzhe Zeng Date: Thu, 23 Sep 2021 16:03:16 -0400 Subject: [PATCH 03/28] ssh: archieve files the symlink points to instead of symlink iteself (#552) see deepmodeling/dpdispatcher#132 --- dpgen/dispatcher/SSHContext.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dpgen/dispatcher/SSHContext.py b/dpgen/dispatcher/SSHContext.py index 1ce2e5520..9e355b5ed 100644 --- a/dpgen/dispatcher/SSHContext.py +++ b/dpgen/dispatcher/SSHContext.py @@ -307,18 +307,18 @@ def _get_files(self, per_nfile = 100 ntar = len(files) // per_nfile + 1 if ntar <= 1: - self.block_checkcall('tar czf %s %s' % (of, " ".join(files))) + self.block_checkcall('tar czfh %s %s' % (of, " ".join(files))) else: of_tar = self.job_uuid + '.tar' for ii in range(ntar): ff = files[per_nfile * ii : per_nfile * (ii+1)] if ii == 0: # tar cf for the first time - self.block_checkcall('tar cf %s %s' % (of_tar, " ".join(ff))) + self.block_checkcall('tar cfh %s %s' % (of_tar, " ".join(ff))) else: # append using tar rf # -r, --append append files to the end of an archive - self.block_checkcall('tar rf %s %s' % (of_tar, " ".join(ff))) + self.block_checkcall('tar rfh %s %s' % (of_tar, " ".join(ff))) # compress the tar file using gzip, and will get a tar.gz file # overwrite considering dpgen may stop and restart # -f, --force force overwrite of output file and compress links From 62399d31716d9d51379abb3d35fd20bc209a98e6 Mon Sep 17 00:00:00 2001 From: Yingze Wang Date: Sun, 26 Sep 2021 08:03:08 +0800 Subject: [PATCH 04/28] Several enhancement in gromacs engine (#553) * Add type.raw for gromacs engine * Add restarting from .cpt in gmx calculation * Remove catch UnboundLocalError * Resolve conflicts --- dpgen/generator/run.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index ab77da8f9..054692fd7 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -1140,7 +1140,7 @@ def _make_model_devi_native_gromacs(iter_index, jdata, mdata, conf_systems): create_path(task_path) gromacs_settings = jdata.get("gromacs_settings" , "") for key,file in gromacs_settings.items(): - if key != "traj_filename" and key != "mdp_filename" and key != "group_name": + if key != "traj_filename" and key != "mdp_filename" and key != "group_name" and key != "maxwarn": os.symlink(os.path.join(cc,file), os.path.join(task_path, file)) # input.json for DP-Gromacs with open(os.path.join(cc, "input.json")) as f: @@ -1160,6 +1160,8 @@ def _make_model_devi_native_gromacs(iter_index, jdata, mdata, conf_systems): mdp['nstenergy'] = trj_freq # dt mdp['dt'] = model_devi_dt + # nsteps + mdp['nsteps'] = nsteps # temps if "ref_t" in list(mdp.keys()): mdp["ref_t"] = tt @@ -1238,6 +1240,8 @@ def run_model_devi (iter_index, topol_filename = gromacs_settings.get("topol_filename", "processed.top") conf_filename = gromacs_settings.get("conf_filename", "conf.gro") index_filename = gromacs_settings.get("index_filename", "index.raw") + type_filename = gromacs_settings.get("type_filename", "type.raw") + ndx_filename = gromacs_settings.get("ndx_filename", "") # Initial reference to process pbc condition. # Default is em.tpr ref_filename = gromacs_settings.get("ref_filename", "em.tpr") @@ -1245,18 +1249,21 @@ def run_model_devi (iter_index, maxwarn = gromacs_settings.get("maxwarn", 1) traj_filename = gromacs_settings.get("traj_filename", "deepmd_traj.gro") grp_name = gromacs_settings.get("group_name", "Other") - nsteps = cur_job["nsteps"] trj_freq = cur_job.get("trj_freq", 10) command = "%s grompp -f %s -p %s -c %s -o %s -maxwarn %d" % (model_devi_exec, mdp_filename, topol_filename, conf_filename, deffnm, maxwarn) - command += "&& %s mdrun -deffnm %s -nsteps %d" %(model_devi_exec, deffnm, nsteps) - command += "&& echo -e \"%s\n%s\n\" | %s trjconv -s %s -f %s.trr -o %s -pbc mol -ur compact -center" % (grp_name, grp_name, model_devi_exec, ref_filename, deffnm, traj_filename) + command += "&& %s mdrun -deffnm %s -cpi" %(model_devi_exec, deffnm) + if ndx_filename: + command += f"&& echo -e \"{grp_name}\\n{grp_name}\\n\" | {model_devi_exec} trjconv -s {ref_filename} -f {deffnm}.trr -n {ndx_filename} -o {traj_filename} -pbc mol -ur compact -center" + else: + command += "&& echo -e \"%s\\n%s\\n\" | %s trjconv -s %s -f %s.trr -o %s -pbc mol -ur compact -center" % (grp_name, grp_name, model_devi_exec, ref_filename, deffnm, traj_filename) command += "&& if [ ! -d traj ]; then \n mkdir traj; fi\n" command += f"python -c \"import dpdata;system = dpdata.System('{traj_filename}', fmt='gromacs/gro'); [system.to_gromacs_gro('traj/%d.gromacstrj' % (i * {trj_freq}), frame_idx=i) for i in range(system.get_nframes())]; system.to_deepmd_npy('traj_deepmd')\"" command += f"&& dp model-devi -m ../graph.000.pb ../graph.001.pb ../graph.002.pb ../graph.003.pb -s traj_deepmd -o model_devi.out -f {trj_freq}" commands = [command] - forward_files = [mdp_filename, topol_filename, conf_filename, index_filename, ref_filename, "input.json", "job.json" ] + forward_files = [mdp_filename, topol_filename, conf_filename, index_filename, ref_filename, type_filename, "input.json", "job.json" ] + if ndx_filename: forward_files.append(ndx_filename) backward_files = ["%s.tpr" % deffnm, "%s.log" %deffnm , traj_filename, 'model_devi.out', "traj", "traj_deepmd" ] @@ -2693,7 +2700,6 @@ def post_fp_gaussian (iter_index, sys_output = glob.glob(os.path.join(work_path, "task.%s.*/output"%ss)) sys_output.sort() for idx,oo in enumerate(sys_output) : - # TODO : UnboundLocalError sometimes occurs when parsing gaussian log sys = dpdata.LabeledSystem(oo, fmt = 'gaussian/log') if len(sys) > 0: sys.check_type_map(type_map = jdata['type_map']) From a07b6c69455c34a13af3ba90a78981eb54c4a5a5 Mon Sep 17 00:00:00 2001 From: AnguseZhang <529133328@qq.com> Date: Wed, 6 Oct 2021 22:05:28 +0800 Subject: [PATCH 05/28] Change mv to cp when downloading softlinks. (#562) --- dpgen/dispatcher/LocalContext.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/dpgen/dispatcher/LocalContext.py b/dpgen/dispatcher/LocalContext.py index 4c341f7aa..136dea4b0 100644 --- a/dpgen/dispatcher/LocalContext.py +++ b/dpgen/dispatcher/LocalContext.py @@ -117,7 +117,13 @@ def download(self, pass elif (os.path.exists(rfile)) and (not os.path.exists(lfile)) : # trivial case, download happily - shutil.move(rfile, lfile) + # If the file to be downloaded is a softlink, `cp` should be performed instead of `mv`. + # Otherwise, `lfile` is still a file linked to some original file, + # and when this file's removed, `lfile` will be invalid. + if os.path.islink(rfile): + shutil.copyfile(rfile,lfile) + else: + shutil.move(rfile, lfile) elif (os.path.exists(rfile)) and (os.path.exists(lfile)) : # both exists, replace! dlog.info('find existing %s, replacing by %s' % (lfile, rfile)) From d498c6f65a7f8301b86a73892b7c6b72db681bef Mon Sep 17 00:00:00 2001 From: Jinzhe Zeng Date: Mon, 11 Oct 2021 19:43:29 -0400 Subject: [PATCH 06/28] fix the usage of `pair_coeff` (#567) The bare `pair_coeff` is not supported in the latest version of LAMMPS. `pair_coeff * *` should be used instead. Fix deepmodeling/deepmd-kit#1201. --- dpgen/generator/lib/lammps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpgen/generator/lib/lammps.py b/dpgen/generator/lib/lammps.py index 3190d9f01..3b80b8d10 100644 --- a/dpgen/generator/lib/lammps.py +++ b/dpgen/generator/lib/lammps.py @@ -85,7 +85,7 @@ def make_lammps_input(ensemble, if ele_temp_a is not None: keywords += "aparam ${ELE_TEMP}" ret+= "pair_style deepmd %s out_freq ${THERMO_FREQ} out_file model_devi.out %s\n" % (graph_list, keywords) - ret+= "pair_coeff \n" + ret+= "pair_coeff * *\n" ret+= "\n" ret+= "thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz\n" ret+= "thermo ${THERMO_FREQ}\n" From eacf7bc2744c3b62dfc05c84fb4e059427446305 Mon Sep 17 00:00:00 2001 From: AnguseZhang <529133328@qq.com> Date: Tue, 12 Oct 2021 07:45:28 +0800 Subject: [PATCH 07/28] Fix examples in INCAR_md and POTCAR (#570) * Change mv to cp when downloading softlinks. * Add explanations relevant to POTCAR's examples. * Fix CH4's vasp-md INCAR example. --- examples/init/INCAR_methane.md | 30 ++++++------------- examples/run/dp1.x-lammps-vasp/CH4/POT_C | 2 +- examples/run/dp1.x-lammps-vasp/CH4/POT_H | 2 +- .../CH4/param_CH4_deepmd-kit-1.1.0.json | 1 + 4 files changed, 12 insertions(+), 23 deletions(-) diff --git a/examples/init/INCAR_methane.md b/examples/init/INCAR_methane.md index 9831387aa..a0e3ca29b 100644 --- a/examples/init/INCAR_methane.md +++ b/examples/init/INCAR_methane.md @@ -1,33 +1,21 @@ PREC=A -ENCUT=400.000000 +ENCUT=400 ISYM=0 -ALGO=fast -EDIFF=1E-6 -LREAL=F +ALGO=Fast +EDIFF=1.000000e-06 +LREAL=False NPAR=4 KPAR=1 - -NELM=200 -ISTART=0 -ICHARG=2 +NELM=120 +NELMIN=4 ISIF=2 ISMEAR=0 -SIGMA=0.200000 +SIGMA=0.20000 IBRION=0 -MAXMIX=50 -NBLOCK=1 -KBLOCK=100 - -SMASS=0 -POTIM=2g -TEBEG=50 -TEEND=50 - +POTIM=0.5 NSW=10 - LWAVE=F LCHARG=F PSTRESS=0 - KSPACING=0.500000 -KGAMMA=F +KGAMMA=.FALSE. diff --git a/examples/run/dp1.x-lammps-vasp/CH4/POT_C b/examples/run/dp1.x-lammps-vasp/CH4/POT_C index 8b1378917..4c6399dc0 100644 --- a/examples/run/dp1.x-lammps-vasp/CH4/POT_C +++ b/examples/run/dp1.x-lammps-vasp/CH4/POT_C @@ -1 +1 @@ - +# We can only provide empty files in public, you should provide valid POTCARS yourself when running DP-GEN. diff --git a/examples/run/dp1.x-lammps-vasp/CH4/POT_H b/examples/run/dp1.x-lammps-vasp/CH4/POT_H index 8b1378917..81ae32edd 100644 --- a/examples/run/dp1.x-lammps-vasp/CH4/POT_H +++ b/examples/run/dp1.x-lammps-vasp/CH4/POT_H @@ -1 +1 @@ - +# We can only provide empty files in public, you should provide valid POTCARS yourself when running DP-GEN. \ No newline at end of file diff --git a/examples/run/dp1.x-lammps-vasp/CH4/param_CH4_deepmd-kit-1.1.0.json b/examples/run/dp1.x-lammps-vasp/CH4/param_CH4_deepmd-kit-1.1.0.json index b80946710..e63c6236b 100644 --- a/examples/run/dp1.x-lammps-vasp/CH4/param_CH4_deepmd-kit-1.1.0.json +++ b/examples/run/dp1.x-lammps-vasp/CH4/param_CH4_deepmd-kit-1.1.0.json @@ -134,5 +134,6 @@ "POTCAR_H", "POTCAR_C" ], + "_comments" : "We can only provide empty files in public, you should provide valid POTCARS yourself when running DP-GEN", "fp_incar": "/data1/yfb222333/2_dpgen_gpu_multi/INCAR_methane" } From cb677742dfdb900bb1d07402aa12fbc516d02d6b Mon Sep 17 00:00:00 2001 From: shazj99 Date: Tue, 12 Oct 2021 07:50:44 +0800 Subject: [PATCH 08/28] fix dispatcher bugs in autotest run (#568) * fix typo in github actions release to conda * Get from_poscar_path only when from_poscar is true (#537) Change-Id: I17774bee345634e4e72bd783e8112eefaaf9f0d3 Co-authored-by: Zhengju Sha * fix dispatcher bugs in autotest run Change-Id: I0e48ec74d0a0d0d6c26988c6e8c28428a362b5d9 Co-authored-by: Han Wang Co-authored-by: felix5572 Co-authored-by: Zhengju Sha --- .github/workflows/release.yml | 2 +- dpgen/auto_test/common_equi.py | 58 +++++++++++++++++----------------- dpgen/data/gen.py | 2 +- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 939a76699..714f75ee7 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,7 +6,7 @@ on: - 'v*' jobs: - publish-conda: + conda-publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 diff --git a/dpgen/auto_test/common_equi.py b/dpgen/auto_test/common_equi.py index 103e16dcc..437178309 100644 --- a/dpgen/auto_test/common_equi.py +++ b/dpgen/auto_test/common_equi.py @@ -159,35 +159,35 @@ def run_equi(confs, disp = make_dispatcher(machine, resources, work_path, [run_tasks[ii]], group_size) print("%s --> Runing... "%(work_path)) - api_version = mdata.get('api_version', '0.9') - if LooseVersion(api_version) < LooseVersion('1.0'): - warnings.warn(f"the dpdispatcher will be updated to new version." - f"And the interface may be changed. Please check the documents for more details") - disp.run_jobs(resources, - command, - work_path, - [run_tasks[ii]], - group_size, - forward_common_files, - forward_files, - backward_files, - outlog='outlog', - errlog='errlog') - elif LooseVersion(api_version) >= LooseVersion('1.0'): - submission = make_submission( - mdata_machine=machine, - mdata_resource=resources, - commands=[command], - work_path=work_path, - run_tasks=run_tasks, - group_size=group_size, - forward_common_files=forward_common_files, - forward_files=forward_files, - backward_files=backward_files, - outlog = 'outlog', - errlog = 'errlog' - ) - submission.run_submission() + api_version = mdata.get('api_version', '0.9') + if LooseVersion(api_version) < LooseVersion('1.0'): + warnings.warn(f"the dpdispatcher will be updated to new version." + f"And the interface may be changed. Please check the documents for more details") + disp.run_jobs(resources, + command, + work_path, + [run_tasks[ii]], + group_size, + forward_common_files, + forward_files, + backward_files, + outlog='outlog', + errlog='errlog') + elif LooseVersion(api_version) >= LooseVersion('1.0'): + submission = make_submission( + mdata_machine=machine, + mdata_resource=resources, + commands=[command], + work_path=work_path, + run_tasks=run_tasks, + group_size=group_size, + forward_common_files=forward_common_files, + forward_files=forward_files, + backward_files=backward_files, + outlog = 'outlog', + errlog = 'errlog' + ) + submission.run_submission() def post_equi(confs, inter_param): diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 923deecb6..1cac205d0 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -80,9 +80,9 @@ def out_dir_name(jdata) : from_poscar = False if 'from_poscar' in jdata : from_poscar = jdata['from_poscar'] - from_poscar_path = jdata['from_poscar_path'] if from_poscar: + from_poscar_path = jdata['from_poscar_path'] poscar_name = os.path.basename(from_poscar_path) cell_str = "%02d" % (super_cell[0]) for ii in range(1,len(super_cell)) : From f0d0701f3ef371890c80cdfe252f6e040e7ad39b Mon Sep 17 00:00:00 2001 From: shazj99 Date: Fri, 15 Oct 2021 08:03:08 +0800 Subject: [PATCH 09/28] autotest: fix early return and result checking (#574) Change-Id: I4430b2c2e20da6cb28640dbc6139e62bed5357a2 Co-authored-by: Zhengju Sha --- dpgen/auto_test/common_prop.py | 23 +++++++---------------- 1 file changed, 7 insertions(+), 16 deletions(-) diff --git a/dpgen/auto_test/common_prop.py b/dpgen/auto_test/common_prop.py index 00f439d37..92f802275 100644 --- a/dpgen/auto_test/common_prop.py +++ b/dpgen/auto_test/common_prop.py @@ -112,6 +112,7 @@ def run_property(confs, conf_dirs.sort() task_list = [] work_path_list = [] + multiple_ret = [] for ii in conf_dirs: sepline(ch=ii, screen=True) for jj in property_list: @@ -159,7 +160,7 @@ def run_property(confs, all_task = tmp_task_list run_tasks = util.collect_task(all_task, inter_type) if len(run_tasks) == 0: - return + continue else: ret = pool.apply_async(worker, (work_path, all_task, @@ -169,23 +170,13 @@ def run_property(confs, mdata, inter_type, )) - # run_tasks = [os.path.basename(ii) for ii in all_task] - # machine, resources, command, group_size = util.get_machine_info(mdata, inter_type) - # disp = make_dispatcher(machine, resources, work_path, run_tasks, group_size) - # disp.run_jobs(resources, - # command, - # work_path, - # run_tasks, - # group_size, - # forward_common_files, - # forward_files, - # backward_files, - # outlog='outlog', - # errlog='errlog') + multiple_ret.append(ret) pool.close() pool.join() - if ret.successful(): - print('finished') + for ii in range(len(multiple_ret)): + if not multiple_ret[ii].successful(): + raise RuntimeError("Job %d is not successful!" % ii) + print('%d jobs are finished' % len(multiple_ret)) def worker(work_path, From bb1277db9b248cd3803748cbf142a32e45073246 Mon Sep 17 00:00:00 2001 From: shazj99 Date: Fri, 15 Oct 2021 08:06:42 +0800 Subject: [PATCH 10/28] autotest: run relaxation tasks in parallel (#573) Change-Id: I9ef3f65729cc2e2f553ea7ac038e77d219ad4712 Co-authored-by: Zhengju Sha --- dpgen/auto_test/common_equi.py | 96 ++++++++++++++++++++++------------ 1 file changed, 63 insertions(+), 33 deletions(-) diff --git a/dpgen/auto_test/common_equi.py b/dpgen/auto_test/common_equi.py index 437178309..ec4010c70 100644 --- a/dpgen/auto_test/common_equi.py +++ b/dpgen/auto_test/common_equi.py @@ -2,6 +2,7 @@ import os import warnings from monty.serialization import dumpfn +from multiprocessing import Pool import dpgen.auto_test.lib.crys as crys import dpgen.auto_test.lib.util as util @@ -111,6 +112,47 @@ def make_equi(confs, inter.make_input_file(ii, 'relaxation', relax_param) +def worker(work_path, + run_task, + forward_common_files, + forward_files, + backward_files, + mdata, + inter_type): + machine, resources, command, group_size = util.get_machine_info(mdata, inter_type) + disp = make_dispatcher(machine, resources, work_path, [run_task], group_size) + print("%s --> Runing... " % (work_path)) + + api_version = mdata.get('api_version', '0.9') + if LooseVersion(api_version) < LooseVersion('1.0'): + warnings.warn(f"the dpdispatcher will be updated to new version." + f"And the interface may be changed. Please check the documents for more details") + disp.run_jobs(resources, + command, + work_path, + [run_task], + group_size, + forward_common_files, + forward_files, + backward_files, + outlog='outlog', + errlog='errlog') + elif LooseVersion(api_version) >= LooseVersion('1.0'): + submission = make_submission( + mdata_machine=machine, + mdata_resource=resources, + commands=[command], + work_path=work_path, + run_tasks=run_task, + group_size=group_size, + forward_common_files=forward_common_files, + forward_files=forward_files, + backward_files=backward_files, + outlog='outlog', + errlog='errlog' + ) + submission.run_submission() + def run_equi(confs, inter_param, mdata): @@ -120,6 +162,11 @@ def run_equi(confs, for conf in confs: conf_dirs.extend(glob.glob(conf)) conf_dirs.sort() + + processes = len(conf_dirs) + pool = Pool(processes=processes) + print("Submit job via %d processes" % processes) + # generate a list of task names like mp-xxx/relaxation/relax_task # ... work_path_list = [] @@ -150,45 +197,28 @@ def run_equi(confs, if len(run_tasks) == 0: return else: - # if LooseVersion() run_tasks = [os.path.basename(ii) for ii in all_task] machine, resources, command, group_size = util.get_machine_info(mdata, inter_type) print('%d tasks will be submited '%len(run_tasks)) + multiple_ret = [] for ii in range(len(work_path_list)): work_path = work_path_list[ii] - disp = make_dispatcher(machine, resources, work_path, [run_tasks[ii]], group_size) - print("%s --> Runing... "%(work_path)) - - api_version = mdata.get('api_version', '0.9') - if LooseVersion(api_version) < LooseVersion('1.0'): - warnings.warn(f"the dpdispatcher will be updated to new version." - f"And the interface may be changed. Please check the documents for more details") - disp.run_jobs(resources, - command, - work_path, - [run_tasks[ii]], - group_size, - forward_common_files, - forward_files, - backward_files, - outlog='outlog', - errlog='errlog') - elif LooseVersion(api_version) >= LooseVersion('1.0'): - submission = make_submission( - mdata_machine=machine, - mdata_resource=resources, - commands=[command], - work_path=work_path, - run_tasks=run_tasks, - group_size=group_size, - forward_common_files=forward_common_files, - forward_files=forward_files, - backward_files=backward_files, - outlog = 'outlog', - errlog = 'errlog' - ) - submission.run_submission() + ret = pool.apply_async(worker, (work_path, + run_tasks[ii], + forward_common_files, + forward_files, + backward_files, + mdata, + inter_type, + )) + multiple_ret.append(ret) + pool.close() + pool.join() + for ii in range(len(multiple_ret)): + if not multiple_ret[ii].successful(): + raise RuntimeError("Task %d is not successful! work_path: %s " % (ii, work_path_list[ii])) + print('finished') def post_equi(confs, inter_param): # find all POSCARs and their name like mp-xxx From 0cb32d6df80d65021cd59b963eec032c8b71383b Mon Sep 17 00:00:00 2001 From: shazj99 Date: Fri, 15 Oct 2021 08:07:22 +0800 Subject: [PATCH 11/28] autotest: rename task directory to avoid false result in a rerun (#572) Change-Id: Ia138ee7c31b6c41d9f41f5943affa9ebf8803c46 Co-authored-by: Zhengju Sha --- dpgen/auto_test/common_equi.py | 7 +++---- dpgen/auto_test/common_prop.py | 6 ++---- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/dpgen/auto_test/common_equi.py b/dpgen/auto_test/common_equi.py index ec4010c70..8754bb2e6 100644 --- a/dpgen/auto_test/common_equi.py +++ b/dpgen/auto_test/common_equi.py @@ -13,6 +13,8 @@ from distutils.version import LooseVersion from dpgen.dispatcher.Dispatcher import make_submission from dpgen.remote.decide_machine import convert_mdata +from dpgen.auto_test.lib.utils import create_path + lammps_task_type = ['deepmd', 'meam', 'eam_fs', 'eam_alloy'] @@ -78,10 +80,7 @@ def make_equi(confs, if not os.path.exists(poscar): raise FileNotFoundError('no configuration for autotest') relax_dirs = os.path.abspath(os.path.join(ii, 'relaxation', 'relax_task')) # to be consistent with property in make dispatcher - if os.path.exists(relax_dirs): - dlog.warning('%s already exists' % relax_dirs) - else: - os.makedirs(relax_dirs) + create_path(relax_dirs) task_dirs.append(relax_dirs) os.chdir(relax_dirs) # copy POSCARs to mp-xxx/relaxation/relax_task diff --git a/dpgen/auto_test/common_prop.py b/dpgen/auto_test/common_prop.py index 92f802275..ffb767c58 100644 --- a/dpgen/auto_test/common_prop.py +++ b/dpgen/auto_test/common_prop.py @@ -15,6 +15,7 @@ from dpgen.dispatcher.Dispatcher import make_dispatcher from dpgen.dispatcher.Dispatcher import make_submission from dpgen.remote.decide_machine import convert_mdata +from dpgen.auto_test.lib.utils import create_path lammps_task_type = ['deepmd', 'meam', 'eam_fs', 'eam_alloy'] @@ -73,10 +74,7 @@ def make_property(confs, path_to_equi = os.path.join(ii, 'relaxation', 'relax_task') path_to_work = os.path.join(ii, property_type + '_' + suffix) - if os.path.exists(path_to_work): - dlog.warning('%s already exists' % path_to_work) - else: - os.makedirs(path_to_work) + create_path(path_to_work) prop = make_property_instance(jj) task_list = prop.make_confs(path_to_work, path_to_equi, do_refine) From 931f3e73ae5a18ae96493389e38099095c98f9fe Mon Sep 17 00:00:00 2001 From: AnguseZhang <529133328@qq.com> Date: Mon, 18 Oct 2021 22:43:28 +0800 Subject: [PATCH 12/28] Fix bug in make_submission of Auto-test (#577) * Fix bug in make_submission of Auto-test --- dpgen/auto_test/common_equi.py | 2 +- dpgen/auto_test/common_prop.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dpgen/auto_test/common_equi.py b/dpgen/auto_test/common_equi.py index 8754bb2e6..d5dba8227 100644 --- a/dpgen/auto_test/common_equi.py +++ b/dpgen/auto_test/common_equi.py @@ -139,7 +139,7 @@ def worker(work_path, elif LooseVersion(api_version) >= LooseVersion('1.0'): submission = make_submission( mdata_machine=machine, - mdata_resource=resources, + mdata_resources=resources, commands=[command], work_path=work_path, run_tasks=run_task, diff --git a/dpgen/auto_test/common_prop.py b/dpgen/auto_test/common_prop.py index ffb767c58..bda655cf3 100644 --- a/dpgen/auto_test/common_prop.py +++ b/dpgen/auto_test/common_prop.py @@ -204,7 +204,7 @@ def worker(work_path, elif LooseVersion(api_version) >= LooseVersion('1.0'): submission = make_submission( mdata_machine=machine, - mdata_resource=resources, + mdata_resources=resources, commands=[command], work_path=work_path, run_tasks=run_tasks, From a28f26e7ec7544e34b829fad6fa40df5bce0c51f Mon Sep 17 00:00:00 2001 From: cyFortneu <33978601+maki49@users.noreply.github.com> Date: Wed, 20 Oct 2021 08:02:49 +0800 Subject: [PATCH 13/28] update abacus-scf generator (#575) * update abacus_scf * add new parameters in test cases * add assertion info --- dpgen/generator/lib/abacus_pw_scf.py | 95 --------------- dpgen/generator/lib/abacus_scf.py | 141 ++++++++++++++++++++++ dpgen/generator/run.py | 20 +-- tests/generator/param-methane-abacus.json | 18 ++- tests/generator/test_make_fp.py | 15 ++- tests/generator/test_post_fp.py | 2 +- 6 files changed, 179 insertions(+), 112 deletions(-) delete mode 100644 dpgen/generator/lib/abacus_pw_scf.py create mode 100644 dpgen/generator/lib/abacus_scf.py diff --git a/dpgen/generator/lib/abacus_pw_scf.py b/dpgen/generator/lib/abacus_pw_scf.py deleted file mode 100644 index 5db58cc10..000000000 --- a/dpgen/generator/lib/abacus_pw_scf.py +++ /dev/null @@ -1,95 +0,0 @@ -import numpy as np -bohr2ang = 0.52917721067 -def make_abacus_pw_scf_kpt(fp_params): - # Make KPT file for abacus pw scf calculation. - # KPT file is the file containing k points infomation in ABACUS scf calculation. - k_points = [1, 1, 1, 0, 0, 0] - if "k_points" in fp_params: - k_points = fp_params["k_points"] - if len(k_points) != 6: - raise RuntimeError("k_points has to be a list containig 6 integers specifying MP k points generation.") - ret = "K_POINTS\n0\nGamma\n" - for i in range(6): - ret += str(k_points[i]) + " " - return ret - -def make_abacus_pw_scf_input(fp_params): - # Make INPUT file for abacus pw scf calculation. - ret = "INPUT_PARAMETERS\n" - assert(fp_params['ntype'] >= 0 and type(fp_params["ntype"]) == int) - ret += "ntype %d\n" % fp_params['ntype'] - ret += "pseudo_dir ./\n" - if "ecutwfc" in fp_params: - assert(fp_params["ecutwfc"] >= 0) - ret += "ecutwfc %f\n" % fp_params["ecutwfc"] - if "mixing_type" in fp_params: - assert(fp_params["mixing_type"] in ["plain", "kerker", "pulay", "pulay-kerker", "broyden"]) - ret += "mixing_type %s\n" % fp_params["mixing_type"] - if "mixing_beta" in fp_params: - assert(fp_params["mixing_beta"] >= 0 and fp_params["mixing_beta"] < 1) - ret += "mixing_beta %f\n" % fp_params["mixing_beta"] - if "symmetry" in fp_params: - assert(fp_params["symmetry"] == 0 or fp_params["symmetry"] == 1) - ret += "symmetry %d\n" % fp_params["symmetry"] - if "nbands" in fp_params: - assert(fp_params["nbands"] > 0 and type(fp_params["nbands"]) == int) - ret += "nbands %f\n" % fp_params["nbands"] - if "nspin" in fp_params: - assert(fp_params["nspin"] == 1 or fp_params["nspin"] == 2 or fp_params["nspin"] == 4) - ret += "nspin %d\n" % fp_params["nspin"] - if "ks_solver" in fp_params: - assert(fp_params["ks_solver"] in ["cg", "dav", "lapack", "genelpa", "hpseps", "scalapack_gvx"]) - ret += "ks_solver %s\n" % fp_params["ks_solver"] - if "smearing" in fp_params: - assert(fp_params["smearing"] in ["gauss", "fd", "fixed", "mp", "mp2", "mv"]) - ret += "smearing %s\n" % fp_params["smearing"] - if "sigma" in fp_params: - assert(fp_params["sigma"] >= 0) - ret += "sigma %f\n" % fp_params["sigma"] - ret += "force 1\nstress 1\n" - return ret - -def make_abacus_pw_scf_stru(sys_data, fp_pp_files): - atom_names = sys_data['atom_names'] - atom_numbs = sys_data['atom_numbs'] - assert(len(atom_names) == len(fp_pp_files)) - assert(len(atom_names) == len(atom_numbs)) - cell = sys_data["cells"][0].reshape([3, 3]) - coord = sys_data['coords'][0] - #volume = np.linalg.det(cell) - #lattice_const = np.power(volume, 1/3) - lattice_const = 1/bohr2ang # in Bohr, in this way coord and cell are in Angstrom - - ret = "ATOMIC_SPECIES\n" - for iatom in range(len(atom_names)): - ret += atom_names[iatom] + " 1.00 " + fp_pp_files[iatom] + "\n" - - ret += "\nLATTICE_CONSTANT\n" - ret += str(lattice_const) + "\n\n" - - ret += "LATTICE_VECTORS\n" - for ix in range(3): - for iy in range(3): - ret += str(cell[ix][iy]) + " " - ret += "\n" - ret += "\n" - - ret += "ATOMIC_POSITIONS\n" - ret += "Cartesian # Cartesian(Unit is LATTICE_CONSTANT)\n" - natom_tot = 0 - for iele in range(len(atom_names)): - ret += atom_names[iele] + "\n" - ret += "0.0\n" - ret += str(atom_numbs[iele]) + "\n" - for iatom in range(atom_numbs[iele]): - ret += "%.12f %.12f %.12f %d %d %d\n" % (coord[natom_tot, 0], coord[natom_tot, 1], coord[natom_tot, 2], 0, 0, 0) - natom_tot += 1 - assert(natom_tot == sum(atom_numbs)) - - return ret - - -if __name__ == "__main__": - fp_params = {"k_points": [1, 1, 1, 0, 0, 0]} - ret = make_abacus_pw_scf_kpt(fp_params) - print(ret) \ No newline at end of file diff --git a/dpgen/generator/lib/abacus_scf.py b/dpgen/generator/lib/abacus_scf.py new file mode 100644 index 000000000..8e68f91aa --- /dev/null +++ b/dpgen/generator/lib/abacus_scf.py @@ -0,0 +1,141 @@ +import numpy as np +bohr2ang = 0.52917721067 +def make_abacus_scf_kpt(fp_params): + # Make KPT file for abacus pw scf calculation. + # KPT file is the file containing k points infomation in ABACUS scf calculation. + k_points = [1, 1, 1, 0, 0, 0] + if "k_points" in fp_params: + k_points = fp_params["k_points"] + if len(k_points) != 6: + raise RuntimeError("k_points has to be a list containig 6 integers specifying MP k points generation.") + ret = "K_POINTS\n0\nGamma\n" + for i in range(6): + ret += str(k_points[i]) + " " + return ret + +def make_abacus_scf_input(fp_params): + # Make INPUT file for abacus pw scf calculation. + ret = "INPUT_PARAMETERS\n" + ret += "calculation scf\n" + assert(fp_params['ntype'] >= 0 and type(fp_params["ntype"]) == int), "'ntype' should be a positive integer." + ret += "ntype %d\n" % fp_params['ntype'] + #ret += "pseudo_dir ./\n" + if "ecutwfc" in fp_params: + assert(fp_params["ecutwfc"] >= 0) , "'ntype' should be non-negative." + ret += "ecutwfc %f\n" % fp_params["ecutwfc"] + if "dr2" in fp_params: + ret += "dr2 %e\n" % fp_params["dr2"] + if "niter" in fp_params: + assert(fp_params['niter'] >= 0 and type(fp_params["niter"])== int), "'niter' should be a positive integer." + ret += "niter %d\n" % fp_params["niter"] + if "basis_type" in fp_params: + assert(fp_params["basis_type"] in ["pw", "lcao", "lcao_in_pw"]) , "'basis_type' must in 'pw', 'lcao' or 'lcao_in_pw'." + ret+= "basis_type %s\n" % fp_params["basis_type"] + if "dft_functional" in fp_params: + ret += "dft_functional %s\n" % fp_params["dft_functional"] + if "gamma_only" in fp_params: + assert(fp_params["gamma_only"] ==1 ) , "'gamma_only' should be 1. Multi-k algorithm will be supported after the KPT generator is completed." + ret+= "gamma_only %d\n" % fp_params["gamma_only"] + if "mixing_type" in fp_params: + assert(fp_params["mixing_type"] in ["plain", "kerker", "pulay", "pulay-kerker", "broyden"]) + ret += "mixing_type %s\n" % fp_params["mixing_type"] + if "mixing_beta" in fp_params: + assert(fp_params["mixing_beta"] >= 0 and fp_params["mixing_beta"] < 1), "'mixing_beta' should between 0 and 1." + ret += "mixing_beta %f\n" % fp_params["mixing_beta"] + if "symmetry" in fp_params: + assert(fp_params["symmetry"] == 0 or fp_params["symmetry"] == 1), "'symmetry' should be either 0 or 1." + ret += "symmetry %d\n" % fp_params["symmetry"] + if "nbands" in fp_params: + assert(fp_params["nbands"] > 0 and type(fp_params["nbands"]) == int), "'nbands' should be a positive integer." + ret += "nbands %d\n" % fp_params["nbands"] + if "nspin" in fp_params: + assert(fp_params["nspin"] == 1 or fp_params["nspin"] == 2 or fp_params["nspin"] == 4), "'nspin' can anly take 1, 2 or 4" + ret += "nspin %d\n" % fp_params["nspin"] + if "ks_solver" in fp_params: + assert(fp_params["ks_solver"] in ["cg", "dav", "lapack", "genelpa", "hpseps", "scalapack_gvx"]), "'ks_sover' should in 'cgx', 'dav', 'lapack', 'genelpa', 'hpseps', 'scalapack_gvx'." + ret += "ks_solver %s\n" % fp_params["ks_solver"] + if "smearing" in fp_params: + assert(fp_params["smearing"] in ["gaussian", "fd", "fixed", "mp", "mp2", "mv"]), "'smearing' should in 'gaussian', 'fd', 'fixed', 'mp', 'mp2', 'mv'. " + ret += "smearing %s\n" % fp_params["smearing"] + if "sigma" in fp_params: + assert(fp_params["sigma"] >= 0), "'sigma' should be non-negative." + ret += "sigma %f\n" % fp_params["sigma"] + if "force" in fp_params: + assert(fp_params["force"] == 0 or fp_params["force"] == 1), "'force' should be either 0 or 1." + ret += "force %d\n" % fp_params["force"] + if "stress" in fp_params: + assert(fp_params["stress"] == 0 or fp_params["stress"] == 1), "'stress' should be either 0 or 1." + ret += "stress %d\n" % fp_params["stress"] + #paras for deepks + if "out_descriptor" in fp_params: + assert(fp_params["out_descriptor"] == 0 or fp_params["out_descriptor"] == 1), "'out_descriptor' should be either 0 or 1." + ret += "out_descriptor %d\n" % fp_params["out_descriptor"] + if "lmax_descriptor" in fp_params: + assert(fp_params["lmax_descriptor"] >= 0), "'lmax_descriptor' should be a positive integer." + ret += "lmax_descriptor %d\n" % fp_params["lmax_descriptor"] + if "deepks_scf" in fp_params: + assert(fp_params["deepks_scf"] == 0 or fp_params["deepks_scf"] == 1), "'deepks_scf' should be either 0 or 1." + ret += "deepks_scf %d\n" % fp_params["deepks_scf"] + if "model_file" in fp_params: + ret += "model_file %s\n" % fp_params["model_file"] + return ret + +def make_abacus_scf_stru(sys_data, fp_pp_files, fp_params): + atom_names = sys_data['atom_names'] + atom_numbs = sys_data['atom_numbs'] + assert(len(atom_names) == len(fp_pp_files)), "the number of pp_files must be equal to the number of atom types. " + assert(len(atom_names) == len(atom_numbs)), "Please check the name of atoms. " + cell = sys_data["cells"][0].reshape([3, 3]) + coord = sys_data['coords'][0] + #volume = np.linalg.det(cell) + #lattice_const = np.power(volume, 1/3) + + ret = "ATOMIC_SPECIES\n" + for iatom in range(len(atom_names)): + ret += atom_names[iatom] + " 1.00 " + fp_pp_files[iatom] + "\n" + ret += "\n" + + if "lattice_constant" in fp_params: + ret += "\nLATTICE_CONSTANT\n" + ret += str(fp_params["lattice_constant"]) + "\n\n" # in Bohr, in this way coord and cell are in Angstrom + else: + ret += "\nLATTICE_CONSTANT\n" + ret += str(1/bohr2ang) + "\n\n" + + ret += "LATTICE_VECTORS\n" + for ix in range(3): + for iy in range(3): + ret += str(cell[ix][iy]) + " " + ret += "\n" + ret += "\n" + + ret += "ATOMIC_POSITIONS\n" + ret += "Cartesian # Cartesian(Unit is LATTICE_CONSTANT)\n" + ret += "\n" + natom_tot = 0 + for iele in range(len(atom_names)): + ret += atom_names[iele] + "\n" + ret += "0.0\n" + ret += str(atom_numbs[iele]) + "\n" + for iatom in range(atom_numbs[iele]): + ret += "%.12f %.12f %.12f %d %d %d\n" % (coord[natom_tot, 0], coord[natom_tot, 1], coord[natom_tot, 2], 0, 0, 0) + natom_tot += 1 + assert(natom_tot == sum(atom_numbs)) + + if "basis_type" in fp_params and fp_params["basis_type"]=="lcao": + ret +="\nNUMERICAL_ORBITAL\n" + assert(len(fp_params["orb_files"])==len(atom_names)) + for iatom in range(len(atom_names)): + ret += fp_params["orb_files"][iatom] +"\n" + + if "deepks_scf" in fp_params and fp_params["out_descriptor"]==1: + ret +="\nNUMERICAL_DESCRIPTOR\n" + ret +=fp_params["proj_file"][0]+"\n" + + return ret + + +if __name__ == "__main__": + fp_params = {"k_points": [1, 1, 1, 0, 0, 0]} + ret = make_abacus_scf_kpt(fp_params) + print(ret) \ No newline at end of file diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 054692fd7..6c5530a63 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -45,7 +45,7 @@ from dpgen.generator.lib.vasp import make_vasp_incar_user_dict from dpgen.generator.lib.vasp import incar_upper from dpgen.generator.lib.pwscf import make_pwscf_input -from dpgen.generator.lib.abacus_pw_scf import make_abacus_pw_scf_stru, make_abacus_pw_scf_input, make_abacus_pw_scf_kpt +from dpgen.generator.lib.abacus_scf import make_abacus_scf_stru, make_abacus_scf_input, make_abacus_scf_kpt #from dpgen.generator.lib.pwscf import cvt_1frame from dpgen.generator.lib.pwmat import make_pwmat_input_dict from dpgen.generator.lib.pwmat import write_input_dict @@ -2063,7 +2063,7 @@ def make_fp_pwscf(iter_index, # link pp files _link_fp_vasp_pp(iter_index, jdata) -def make_fp_abacus_pw_scf(iter_index, +def make_fp_abacus_scf(iter_index, jdata) : # make config fp_tasks = _make_fp_vasp_configs(iter_index, jdata) @@ -2084,13 +2084,13 @@ def make_fp_abacus_pw_scf(iter_index, sys_data = dpdata.System('POSCAR').data if 'mass_map' in jdata: sys_data['atom_masses'] = jdata['mass_map'] - ret_input = make_abacus_pw_scf_input(fp_params) + ret_input = make_abacus_scf_input(fp_params) with open('INPUT', 'w') as fp: fp.write(ret_input) - ret_kpt = make_abacus_pw_scf_kpt(fp_params) + ret_kpt = make_abacus_scf_kpt(fp_params) with open("KPT", "w") as fp: fp.write(ret_kpt) - ret_stru = make_abacus_pw_scf_stru(sys_data, fp_pp_files) + ret_stru = make_abacus_scf_stru(sys_data, fp_pp_files, fp_params) with open("STRU", "w") as fp: fp.write(ret_stru) @@ -2224,7 +2224,7 @@ def make_fp (iter_index, elif fp_style == "pwscf" : make_fp_pwscf(iter_index, jdata) elif fp_style == "abacus/scf" : - make_fp_abacus_pw_scf(iter_index, jdata) + make_fp_abacus_scf(iter_index, jdata) elif fp_style == "siesta" : make_fp_siesta(iter_index, jdata) elif fp_style == "gaussian" : @@ -2262,7 +2262,7 @@ def _qe_check_fin(ii) : return False return True -def _abacus_pw_scf_check_fin(ii) : +def _abacus_scf_check_fin(ii) : if os.path.isfile(os.path.join(ii, 'OUT.ABACUS/running_scf.log')) : with open(os.path.join(ii, 'OUT.ABACUS/running_scf.log'), 'r') as fp : content = fp.read() @@ -2407,7 +2407,7 @@ def run_fp (iter_index, elif fp_style == "abacus/scf": forward_files = ["INPUT", "STRU", "KPT"] + fp_pp_files backward_files = ["output", "OUT.ABACUS"] - run_fp_inner(iter_index, jdata, mdata, forward_files, backward_files, _abacus_pw_scf_check_fin, log_file = 'output') + run_fp_inner(iter_index, jdata, mdata, forward_files, backward_files, _abacus_scf_check_fin, log_file = 'output') elif fp_style == "siesta": forward_files = ['input'] + fp_pp_files backward_files = ['output'] @@ -2582,7 +2582,7 @@ def post_fp_pwscf (iter_index, all_sys.to_deepmd_raw(sys_data_path) all_sys.to_deepmd_npy(sys_data_path, set_size = len(sys_output)) -def post_fp_abacus_pw_scf (iter_index, +def post_fp_abacus_scf (iter_index, jdata): model_devi_jobs = jdata['model_devi_jobs'] assert (iter_index < len(model_devi_jobs)) @@ -2828,7 +2828,7 @@ def post_fp (iter_index, elif fp_style == "pwscf" : post_fp_pwscf(iter_index, jdata) elif fp_style == "abacus/scf": - post_fp_abacus_pw_scf(iter_index, jdata) + post_fp_abacus_scf(iter_index, jdata) elif fp_style == "siesta": post_fp_siesta(iter_index, jdata) elif fp_style == 'gaussian' : diff --git a/tests/generator/param-methane-abacus.json b/tests/generator/param-methane-abacus.json index 2fca7d924..ab8c4d34e 100644 --- a/tests/generator/param-methane-abacus.json +++ b/tests/generator/param-methane-abacus.json @@ -131,17 +131,29 @@ "fp_task_max": 30, "fp_task_min": 8, "fp_pp_path": ".", - "fp_pp_files": [ "H_HSCV_PBE-1.0.UPF","C_HSCV_PBE-1.0.UPF"], + "fp_pp_files": [ "./H_HSCV_PBE-1.0.UPF","./C_HSCV_PBE-1.0.UPF"], "user_fp_params":{ + "lattice_constant":1, "ntype": 2, "ecutwfc": 80, - "mixing_type": "pulay", + "dr2": 1e-7, + "niter": 50, + "basis_type": "pw", + "gamma_only": true, + "dft_functional": "pbe", + "mixing_type": "pulay", "mixing_beta": 0.4, "symmetry": 1, "nbands": 5, "nspin": 1, "ks_solver": "cg", "smearing": "fixed", - "sigma": 0.001 + "sigma": 0.001, + "force":1, + "stress":1, + "out_descriptor":0, + "lmax_descriptor":0, + "deepks_scf":0, + "model_file":"model.ptg" } } diff --git a/tests/generator/test_make_fp.py b/tests/generator/test_make_fp.py index 914c9b149..1687b695d 100644 --- a/tests/generator/test_make_fp.py +++ b/tests/generator/test_make_fp.py @@ -150,19 +150,28 @@ IN.PSP3 = N.SG15.PBE.UPF\n"; abacus_input_ref = "INPUT_PARAMETERS\n\ +calculation scf\n\ ntype 2\n\ -pseudo_dir ./\n\ ecutwfc 80.000000\n\ +dr2 1.000000e-07\n\ +niter 50\n\ +basis_type pw\n\ +dft_functional pbe\n\ +gamma_only 1\n\ mixing_type pulay\n\ mixing_beta 0.400000\n\ symmetry 1\n\ -nbands 5.000000\n\ +nbands 5\n\ nspin 1\n\ ks_solver cg\n\ smearing fixed\n\ sigma 0.001000\n\ force 1\n\ -stress 1\n" +stress 1\n\ +out_descriptor 0\n\ +lmax_descriptor 0\n\ +deepks_scf 0\n\ +model_file model.ptg\n" abacus_kpt_ref = "K_POINTS\n\ 0\n\ diff --git a/tests/generator/test_post_fp.py b/tests/generator/test_post_fp.py index 8c14889f6..f0028db92 100644 --- a/tests/generator/test_post_fp.py +++ b/tests/generator/test_post_fp.py @@ -7,7 +7,7 @@ __package__ = 'generator' from .context import post_fp from .context import post_fp_pwscf -from .context import post_fp_abacus_pw_scf +from .context import post_fp_abacus_scf from .context import post_fp_siesta from .context import post_fp_vasp from .context import post_fp_gaussian From cceeba74c912ef1c6a33d79ea5494ddc8db02fa0 Mon Sep 17 00:00:00 2001 From: AnguseZhang <529133328@qq.com> Date: Thu, 21 Oct 2021 16:18:59 +0800 Subject: [PATCH 14/28] Add ase, custodian, GromacsWrapper in conda packages. (#578) * Add ase, custodian, GromacsWrapper in conda packages. --- conda/meta.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/conda/meta.yaml b/conda/meta.yaml index c599c642f..e5480ba7a 100644 --- a/conda/meta.yaml +++ b/conda/meta.yaml @@ -25,6 +25,9 @@ requirements: - requests - dpdata - dpdispatcher + - ase + - GromacsWrapper + - custodian run: - python >=3.6 @@ -34,6 +37,9 @@ requirements: - dpdata - dpdispatcher - pymatgen + - ase + - GromacsWrapper + - custodian test: imports: From 90ea59ea451cf5834af1b1ebf538df8a1103593d Mon Sep 17 00:00:00 2001 From: Zhiwei Zhang <43613336+KZHIWEI@users.noreply.github.com> Date: Thu, 21 Oct 2021 16:19:20 +0800 Subject: [PATCH 15/28] add lebesgue ch4 example (#571) * add lebesgue ch4 example * fix readme nad machine file * fix machine.json --- examples/CH4-lebesgue/CH4.POSCAR | 1 + examples/CH4-lebesgue/INCAR_methane | 1 + examples/CH4-lebesgue/INCAR_methane.md | 1 + examples/CH4-lebesgue/INCAR_methane.rlx | 1 + examples/CH4-lebesgue/README.md | 1 + examples/CH4-lebesgue/init.json | 1 + .../CH4-lebesgue/lebesgue_v2_machine.json | 1 + .../param_CH4_deepmd-kit-2.0.1.json | 1 + .../DeePMD-kit-2.x/lebesgue_v2_machine.json | 108 ++++++++++++++ .../lebesgue_v2_machine_README.md | 24 ++++ .../param_CH4_deepmd-kit-2.0.1.json | 136 ++++++++++++++++++ 11 files changed, 276 insertions(+) create mode 100644 examples/CH4-lebesgue/CH4.POSCAR create mode 100644 examples/CH4-lebesgue/INCAR_methane create mode 100644 examples/CH4-lebesgue/INCAR_methane.md create mode 100644 examples/CH4-lebesgue/INCAR_methane.rlx create mode 100644 examples/CH4-lebesgue/README.md create mode 100644 examples/CH4-lebesgue/init.json create mode 100644 examples/CH4-lebesgue/lebesgue_v2_machine.json create mode 100644 examples/CH4-lebesgue/param_CH4_deepmd-kit-2.0.1.json create mode 100644 examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine.json create mode 100644 examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine_README.md create mode 100644 examples/run/dp2.x-lammps-vasp/param_CH4_deepmd-kit-2.0.1.json diff --git a/examples/CH4-lebesgue/CH4.POSCAR b/examples/CH4-lebesgue/CH4.POSCAR new file mode 100644 index 000000000..2f9def780 --- /dev/null +++ b/examples/CH4-lebesgue/CH4.POSCAR @@ -0,0 +1 @@ +../init/CH4.POSCAR \ No newline at end of file diff --git a/examples/CH4-lebesgue/INCAR_methane b/examples/CH4-lebesgue/INCAR_methane new file mode 100644 index 000000000..b946fb7e5 --- /dev/null +++ b/examples/CH4-lebesgue/INCAR_methane @@ -0,0 +1 @@ +../run/dp1.x-lammps-vasp/CH4/INCAR_methane \ No newline at end of file diff --git a/examples/CH4-lebesgue/INCAR_methane.md b/examples/CH4-lebesgue/INCAR_methane.md new file mode 100644 index 000000000..4c48cdd86 --- /dev/null +++ b/examples/CH4-lebesgue/INCAR_methane.md @@ -0,0 +1 @@ +../init/INCAR_methane.md \ No newline at end of file diff --git a/examples/CH4-lebesgue/INCAR_methane.rlx b/examples/CH4-lebesgue/INCAR_methane.rlx new file mode 100644 index 000000000..e44202f43 --- /dev/null +++ b/examples/CH4-lebesgue/INCAR_methane.rlx @@ -0,0 +1 @@ +../init/INCAR_methane.rlx \ No newline at end of file diff --git a/examples/CH4-lebesgue/README.md b/examples/CH4-lebesgue/README.md new file mode 100644 index 000000000..d550d16f0 --- /dev/null +++ b/examples/CH4-lebesgue/README.md @@ -0,0 +1 @@ +../machine/DeePMD-kit-2.x/lebesgue_v2_machine_README.md \ No newline at end of file diff --git a/examples/CH4-lebesgue/init.json b/examples/CH4-lebesgue/init.json new file mode 100644 index 000000000..72cc77ef5 --- /dev/null +++ b/examples/CH4-lebesgue/init.json @@ -0,0 +1 @@ +../init/ch4.json \ No newline at end of file diff --git a/examples/CH4-lebesgue/lebesgue_v2_machine.json b/examples/CH4-lebesgue/lebesgue_v2_machine.json new file mode 100644 index 000000000..02f838b49 --- /dev/null +++ b/examples/CH4-lebesgue/lebesgue_v2_machine.json @@ -0,0 +1 @@ +../machine/DeePMD-kit-2.x/lebesgue_v2_machine.json \ No newline at end of file diff --git a/examples/CH4-lebesgue/param_CH4_deepmd-kit-2.0.1.json b/examples/CH4-lebesgue/param_CH4_deepmd-kit-2.0.1.json new file mode 100644 index 000000000..1b19d3d66 --- /dev/null +++ b/examples/CH4-lebesgue/param_CH4_deepmd-kit-2.0.1.json @@ -0,0 +1 @@ +../run/dp2.x-lammps-vasp/param_CH4_deepmd-kit-2.0.1.json \ No newline at end of file diff --git a/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine.json b/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine.json new file mode 100644 index 000000000..6b9ead467 --- /dev/null +++ b/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine.json @@ -0,0 +1,108 @@ +{ + "api_version": "1.0", + "deepmd_version": "2.0.1", + "train" :[ + { + "command": "dp", + "machine": { + "batch_type": "DpCloudServer", + "context_type": "DpCloudServerContext", + "local_root" : "./", + "remote_profile":{ + "email": "your lebesgue register email", + "password": "your lebesgue password", + "program_id": 0, + "input_data":{ + "api_version":2, + "job_type": "indicate", + "log_file": "00*/train.log", + "grouped":true, + "job_name": "dpgen_train_job", + "disk_size": 100, + "scass_type":"c8_m60_1 * NVIDIA P100", + "platform": "ali", + "image_name":"deepmd-kit:2.0.1", + "on_demand":0 + } + } + }, + "resources": { + "number_node": 1, + "local_root":"./", + "cpu_per_node": 4, + "gpu_per_node": 1, + "queue_name": "GPU", + "group_size": 1 + } + }], + "model_devi": + [{ + "command": "lmp -i input.lammps -v restart 0", + "machine": { + "batch_type": "DpCloudServer", + "context_type": "DpCloudServerContext", + "local_root" : "./", + "remote_profile":{ + "email": "your lebesgue register email", + "password": "your lebesgue password", + "program_id": 0, + "input_data":{ + "api_version":2, + "job_type": "indicate", + "log_file": "*/model_devi.log", + "grouped":true, + "job_name": "dpgen_model_devi_job", + "disk_size": 200, + "scass_type":"c8_m60_1 * NVIDIA P100", + "platform": "ali", + "image_name":"deepmd-kit:2.0.1", + "on_demand":0 + } + } + }, + "resources": { + "number_node": 1, + "local_root":"./", + "cpu_per_node": 4, + "gpu_per_node": 1, + "queue_name": "GPU", + "group_size": 5 + } + }], + "fp": + [{ + "command": "mpirun -n 16 vasp_std", + "machine": { + "batch_type": "DpCloudServer", + "context_type": "DpCloudServerContext", + "local_root" : "./", + "remote_profile":{ + "email": "your lebesgue register email", + "password": "your lebesgue password", + "program_id": 0, + "input_data":{ + "api_version":2, + "job_type": "indicate", + "log_file": "task*/fp.log", + "grouped":true, + "job_name": "dpgen_fp_job", + "disk_size": 100, + "scass_type":"c8_m8_cpu", + "platform": "ali", + "image_name":"vasp:5.4.4", + "on_demand":0 + } + } + }, + "resources": { + "number_node": 1, + "cpu_per_node": 32, + "gpu_per_node": 0, + "queue_name": "CPU", + "group_size": 5, + "local_root":"./", + "source_list": ["/opt/intel/oneapi/setvars.sh"] + } + } + ] +} diff --git a/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine_README.md b/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine_README.md new file mode 100644 index 000000000..3c81f5d04 --- /dev/null +++ b/examples/machine/DeePMD-kit-2.x/lebesgue_v2_machine_README.md @@ -0,0 +1,24 @@ +# Config machine.json file in order to submit task to lebesgue platform. + +You can login to lebesgue official website http://lebesgue.dp.tech/ . Then click [Function]-[DPGEN]-[manual]\(On the top left corner of the function panel\) from left navigator bar http://lebesgue.dp.tech/#/jobs/dpgen. + +Below is the description of each json fields, please visit official documentation for more information and update. + +| field | optional | type | description | +| --- | --- | --- | --- | +| email | false | string | your lebesgue login email | +| password | false | string | your lebesgue login password (note this is not your remote machine login password) | +| program_id | false | int | your program id(int) you can find it in [Lebesgue Official website]-[Programs]-[Program ID] to view your program id | +| api_version | true | int| (default 1) the api_version inside input_data is different from the outside one, which is used to decide which api version will be called to lebesgue. lebesgue currently support version 1 and 2, and version 1 will be deprecate in the future. | +| job_group_id | true | int | config this to specific job_group so submitted jobs can be view as a whole group in the webpage. +| rerun | true | int | if the submitted job terminate unsuccessfully, does it need to be rerun. +| job_type | false | string | job type, should be indicate | +| log_file | true | string | the location of log file, where you can view the log in webpage | +| job_name | false | string | job group name | +| on_demand | true | int | default:0, 0:use spot machine 1:use ondemand machine | +| image_name | true/false | int | image name, necessary when platform is ali or aws, optional when platform is sugon | +| disk_size | true/false | int | disk size (GB), necessary when platform is ali or aws, optional when platform is sugon | +| scass_type | false | string | machine configuration, about scass_type, you can find them on [lebesgue official website] - [Finance]-[Price calculator] to select disire machine configuration. invalid when instance_group_id is present | +| instance_group_id | true | int | group of scass type | +| platform | false | string | avaliable platform: "aws" "ali" "sugon" | +| grouped | false | bool | weather group same task in to one job group. | \ No newline at end of file diff --git a/examples/run/dp2.x-lammps-vasp/param_CH4_deepmd-kit-2.0.1.json b/examples/run/dp2.x-lammps-vasp/param_CH4_deepmd-kit-2.0.1.json new file mode 100644 index 000000000..15f682cdc --- /dev/null +++ b/examples/run/dp2.x-lammps-vasp/param_CH4_deepmd-kit-2.0.1.json @@ -0,0 +1,136 @@ +{ + "type_map": [ + "H", + "C" + ], + "mass_map": [ + 1, + 12 + ], + "init_data_prefix": "./", + "init_data_sys": [ + "CH4.POSCAR.01x01x01/02.md/sys-0004-0001/deepmd" + ], + "sys_configs_prefix": "./", + "sys_configs": [ + [ + "CH4.POSCAR.01x01x01/01.scale_pert/sys-0004-0001/scale*/00000*/POSCAR" + ], + [ + "CH4.POSCAR.01x01x01/01.scale_pert/sys-0004-0001/scale*/00001*/POSCAR" + ] + ], + "_comment": " that's all ", + "numb_models": 4, + "default_training_param": { + "model": { + "type_map": [ + "H", + "C" + ], + "descriptor": { + "type": "se_a", + "sel": [ + 16, + 4 + ], + "rcut_smth": 0.5, + "rcut": 5.0, + "neuron": [ + 120, + 120, + 120 + ], + "resnet_dt": true, + "axis_neuron": 12, + "seed": 1 + }, + "fitting_net": { + "neuron": [ + 25, + 50, + 100 + ], + "resnet_dt": false, + "seed": 1 + } + }, + "learning_rate": { + "type": "exp", + "start_lr": 0.001, + "decay_steps": 100 + }, + "loss": { + "start_pref_e": 0.02, + "limit_pref_e": 2, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0.0, + "limit_pref_v": 0.0 + }, + "training": { + "set_prefix": "set", + "stop_batch": 2000, + "batch_size": 1, + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 4, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "disp_training": true, + "time_training": true, + "profiling": false, + "profiling_file": "timeline.json", + "_comment": "that's all" + } + }, + "model_devi_dt": 0.002, + "model_devi_skip": 0, + "model_devi_f_trust_lo": 0.05, + "model_devi_f_trust_hi": 0.15, + "model_devi_e_trust_lo": 10000000000.0, + "model_devi_e_trust_hi": 10000000000.0, + "model_devi_clean_traj": true, + "model_devi_jobs": [ + { + "sys_idx": [ + 0 + ], + "temps": [ + 100 + ], + "press": [ + 1.0 + ], + "trj_freq": 10, + "nsteps": 300, + "ensemble": "nvt", + "_idx": "00" + }, + { + "sys_idx": [ + 1 + ], + "temps": [ + 100 + ], + "press": [ + 1.0 + ], + "trj_freq": 10, + "nsteps": 3000, + "ensemble": "nvt", + "_idx": "01" + } + ], + "fp_style": "vasp", + "shuffle_poscar": false, + "fp_task_max": 20, + "fp_task_min": 5, + "fp_pp_path": "./", + "fp_pp_files": [ + "POTCAR_H", + "POTCAR_C" + ], + "fp_incar": "./INCAR_methane" +} From 2fd6c7e74b14a249a2138448ed95692f428481cd Mon Sep 17 00:00:00 2001 From: shazj99 Date: Tue, 26 Oct 2021 08:20:29 +0800 Subject: [PATCH 16/28] remove jr.json to allow rerun in autotest make (#579) Change-Id: I26e8f7120703f131ffb005e4c0c927c108821ae0 Co-authored-by: Zhengju Sha --- dpgen/auto_test/common_equi.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dpgen/auto_test/common_equi.py b/dpgen/auto_test/common_equi.py index d5dba8227..ce5df94e8 100644 --- a/dpgen/auto_test/common_equi.py +++ b/dpgen/auto_test/common_equi.py @@ -79,6 +79,9 @@ def make_equi(confs, poscar = os.path.abspath(os.path.join(ii, 'POSCAR')) if not os.path.exists(poscar): raise FileNotFoundError('no configuration for autotest') + if os.path.exists(os.path.join(ii, 'relaxation', 'jr.json')): + os.remove(os.path.join(ii, 'relaxation', 'jr.json')) + relax_dirs = os.path.abspath(os.path.join(ii, 'relaxation', 'relax_task')) # to be consistent with property in make dispatcher create_path(relax_dirs) task_dirs.append(relax_dirs) From 7aec0915ca630987db4509f1fde814c6ee261825 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Yuan=20Fengbo=20=28=E8=A2=81=E5=A5=89=E5=8D=9A=29?= <757627927@qq.com> Date: Wed, 3 Nov 2021 07:51:21 +0800 Subject: [PATCH 17/28] will convert local_root's './' to abspath to avoid hash conflict in new dpdispatcher (#582) --- dpgen/dispatcher/Dispatcher.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/dpgen/dispatcher/Dispatcher.py b/dpgen/dispatcher/Dispatcher.py index 655761816..239654b26 100644 --- a/dpgen/dispatcher/Dispatcher.py +++ b/dpgen/dispatcher/Dispatcher.py @@ -346,11 +346,17 @@ def make_dispatcher(mdata, mdata_resource=None, work_path=None, run_tasks=None, def make_submission(mdata_machine, mdata_resources, commands, work_path, run_tasks, group_size, forward_common_files, forward_files, backward_files, outlog, errlog): - machine = Machine.load_from_dict(mdata_machine) - resources = Resources.load_from_dict(mdata_resources) - if mdata_machine['local_root'] != './': raise RuntimeError(f"local_root must be './' in dpgen's machine.json.") + + abs_local_root = os.path.abspath('./') + + abs_mdata_machine = mdata_machine.copy() + abs_mdata_machine['local_root'] = abs_local_root + + machine = Machine.load_from_dict(abs_mdata_machine) + resources = Resources.load_from_dict(mdata_resources) + command = "&&".join(commands) From 5ce256ca4edc46b3bca1c9a4b235c2807664ef83 Mon Sep 17 00:00:00 2001 From: Zhiwei Zhang <43613336+KZHIWEI@users.noreply.github.com> Date: Fri, 5 Nov 2021 17:05:13 +0800 Subject: [PATCH 18/28] model_devi_clean_traj support int type (#583) * model_devi_clean_traj support buffer * fix clean index * update readme --- README.md | 2 +- dpgen/generator/run.py | 17 +++++++++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 52d608313..120578145 100644 --- a/README.md +++ b/README.md @@ -557,7 +557,7 @@ The bold notation of key (such aas **type_map**) means that it's a necessary key | model_devi_perc_candi_f | Float | 0.0 | See `model_devi_adapt_trust_lo`.| | model_devi_perc_candi_v | Float | 0.0 | See `model_devi_adapt_trust_lo`.| | model_devi_f_avg_relative | Boolean | False | Normalized the force model deviations by the RMS force magnitude along the trajectory. This key should not be used with `use_relative`. | -| **model_devi_clean_traj** | Boolean | true | Deciding whether to clean traj folders in MD since they are too large. | +| **model_devi_clean_traj** | Boolean or Int | true | If type of model_devi_clean_traj is boolean type then it denote whether to clean traj folders in MD since they are too large. If it is Int type, then the most recent n iterations of traj folders will be retained, others will be removed. | | **model_devi_nopbc** | Boolean | False | Assume open boundary condition in MD simulations. | | model_devi_activation_func | List of list of string | [["tanh","tanh"],["tanh","gelu"],["gelu","tanh"],["gelu","gelu"]] | Set activation functions for models, length of the List should be the same as `numb_models`, and two elements in the list of string respectively assign activation functions to the embedding and fitting nets within each model. *Backward compatibility*: the orginal "List of String" format is still supported, where embedding and fitting nets of one model use the same activation function, and the length of the List should be the same as `numb_models`| | **model_devi_jobs** | [
{
"sys_idx": [0],
"temps":
[100],
"press":
[1],
"trj_freq":
10,
"nsteps":
1000,
"ensembles":
"nvt"
},
...
] | List of dict | Settings for exploration in `01.model_devi`. Each dict in the list corresponds to one iteration. The index of `model_devi_jobs` exactly accord with index of iterations | diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 6c5530a63..f6185d36d 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -2840,15 +2840,24 @@ def post_fp (iter_index, else : raise RuntimeError ("unsupported fp style") # clean traj - iter_name = make_iter_name(iter_index) clean_traj = True if 'model_devi_clean_traj' in jdata : clean_traj = jdata['model_devi_clean_traj'] - if clean_traj: - modd_path = os.path.join(iter_name, model_devi_name) + modd_path = None + if isinstance(clean_traj, bool): + iter_name = make_iter_name(iter_index) + if clean_traj: + modd_path = os.path.join(iter_name, model_devi_name) + elif isinstance(clean_traj, int): + clean_index = iter_index - clean_traj + if clean_index >= 0: + modd_path = os.path.join(make_iter_name(clean_index), model_devi_name) + if modd_path is not None: md_trajs = glob.glob(os.path.join(modd_path, 'task*/traj')) - for ii in md_trajs : + for ii in md_trajs: shutil.rmtree(ii) + + def set_version(mdata): From 23054a10a99821f25ab46280606f2b8c09dbc627 Mon Sep 17 00:00:00 2001 From: Jinzhe Zeng Date: Thu, 11 Nov 2021 18:44:24 -0500 Subject: [PATCH 19/28] add a citation badge (#589) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 120578145..fe06f8cd9 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ [![GitHub release](https://img.shields.io/github/release/deepmodeling/dpgen.svg?maxAge=86400)](https://github.com/deepmodeling/dpgen/releases/) [![doi:10.1016/j.cpc.2020.107206](https://img.shields.io/badge/DOI-10.1016%2Fj.cpc.2020.107206-blue)](https://doi.org/10.1016/j.cpc.2020.107206) +![Citations](https://citations.njzjz.win/10.1016/j.cpc.2020.107206) [![conda install](https://img.shields.io/conda/dn/conda-forge/dpgen?label=conda%20install)](https://anaconda.org/conda-forge/dpgen) [![pip install](https://img.shields.io/pypi/dm/dpgen?label=pip%20install)](https://pypi.org/project/dpgen) From c6ce120f3fb550a4b5ba91d61180d6bde9b4aee5 Mon Sep 17 00:00:00 2001 From: Yongbin Zhuang <38876805+robinzyb@users.noreply.github.com> Date: Thu, 25 Nov 2021 13:38:28 +0800 Subject: [PATCH 20/28] fix bug issue:597 (#598) * fix bug issue:597 * fix energies mismatch bug for dpdata cp2k unittest * fix issue 597 --- .../out_data_post_fp_cp2k/02.fp/task.000.000000/output | 2 +- tests/generator/out_data_post_fp_cp2k/orig/energy.raw | 2 +- tests/generator/out_data_post_fp_cp2k/orig/force.raw | 2 +- tests/generator/out_data_post_fp_cp2k/orig/type_map.raw | 4 +++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/generator/out_data_post_fp_cp2k/02.fp/task.000.000000/output b/tests/generator/out_data_post_fp_cp2k/02.fp/task.000.000000/output index 03543b509..9a5bdd19e 100644 --- a/tests/generator/out_data_post_fp_cp2k/02.fp/task.000.000000/output +++ b/tests/generator/out_data_post_fp_cp2k/02.fp/task.000.000000/output @@ -877,7 +877,7 @@ Total energy: -31.04389446913810 - *** WARNING in qs_scf.F:542 :: SCF run NOT converged *** + *** SCF run converged in 29 steps *** !-----------------------------------------------------------------------------! diff --git a/tests/generator/out_data_post_fp_cp2k/orig/energy.raw b/tests/generator/out_data_post_fp_cp2k/orig/energy.raw index 48fcacaef..362e9a65e 100644 --- a/tests/generator/out_data_post_fp_cp2k/orig/energy.raw +++ b/tests/generator/out_data_post_fp_cp2k/orig/energy.raw @@ -1 +1 @@ --8.447581865210245269e+02 +-8.447582606992446017e+02 diff --git a/tests/generator/out_data_post_fp_cp2k/orig/force.raw b/tests/generator/out_data_post_fp_cp2k/orig/force.raw index 6fa5a5550..0ad41126e 100644 --- a/tests/generator/out_data_post_fp_cp2k/orig/force.raw +++ b/tests/generator/out_data_post_fp_cp2k/orig/force.raw @@ -1 +1 @@ -6.917655894184945309e-01 7.553510274301609151e-01 2.274964525840567742e+00 -3.209682908751355646e-01 2.775192185863639693e-01 9.094058723076186013e-01 -6.332575659257858036e-02 -6.449617417274867703e-01 -8.145295946268114040e-01 -1.756150875299649639e-01 -3.121818029385921012e-01 -2.260577661002467487e+00 1.112470571386751095e+00 1.688658903147764123e+00 -1.721519658405964881e+00 -1.208800036573186576e+00 -1.778025306758221902e+00 1.601097413955125504e+00 +6.917656471387700901e-01 7.553510904559461725e-01 2.274964715661485837e+00 -3.209683176564304130e-01 2.775192417423087421e-01 9.094059481875964579e-01 -6.332576187642065257e-02 -6.449617955424929994e-01 -8.145296625904011600e-01 -1.756151021831240944e-01 -3.121818289867506202e-01 -2.260577849622959601e+00 1.112470664210258198e+00 1.688659044047888358e+00 -1.721519802047959624e+00 -1.208800137434334454e+00 -1.778025455114995435e+00 1.601097547549200639e+00 diff --git a/tests/generator/out_data_post_fp_cp2k/orig/type_map.raw b/tests/generator/out_data_post_fp_cp2k/orig/type_map.raw index dc4df7f2f..101da0cbf 100644 --- a/tests/generator/out_data_post_fp_cp2k/orig/type_map.raw +++ b/tests/generator/out_data_post_fp_cp2k/orig/type_map.raw @@ -1 +1,3 @@ -C H N +C +H +N From 08cf615b93b27c8c7babe5c91db63646ffa90a74 Mon Sep 17 00:00:00 2001 From: Han Wang <92130845+wanghan-iapcm@users.noreply.github.com> Date: Thu, 25 Nov 2021 14:57:34 +0800 Subject: [PATCH 21/28] bump dpdata version to 0.2.4 (#595) * bump dpdata version to 0.2.4 * fix AssertionErrors in TestPostGaussian (#5) * fix bug issue:597 (#598) (#6) * fix bug issue:597 * fix energies mismatch bug for dpdata cp2k unittest * fix issue 597 Co-authored-by: Yongbin Zhuang <38876805+robinzyb@users.noreply.github.com> Co-authored-by: Han Wang Co-authored-by: Jinzhe Zeng Co-authored-by: Han Wang Co-authored-by: Yongbin Zhuang <38876805+robinzyb@users.noreply.github.com> --- setup.py | 2 +- tests/generator/out_data_post_fp_gaussian/orig/energy.raw | 2 +- tests/generator/out_data_post_fp_gaussian/orig/nopbc | 0 3 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 tests/generator/out_data_post_fp_gaussian/orig/nopbc diff --git a/setup.py b/setup.py index f86886544..e051389bf 100755 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ with open(path.join('dpgen', '_date.py'), 'w') as fp : fp.write('date = \'%s\'' % today) -install_requires=['numpy>=1.14.3', 'dpdata>=0.1.14', 'pymatgen>=2019.1.13', 'ase', 'monty>2.0.0', 'paramiko', 'custodian','GromacsWrapper>=0.8.0', 'dpdispatcher>=0.3.11'] +install_requires=['numpy>=1.14.3', 'dpdata>=0.2.4', 'pymatgen>=2019.1.13', 'ase', 'monty>2.0.0', 'paramiko', 'custodian','GromacsWrapper>=0.8.0', 'dpdispatcher>=0.3.11'] setuptools.setup( name=NAME, diff --git a/tests/generator/out_data_post_fp_gaussian/orig/energy.raw b/tests/generator/out_data_post_fp_gaussian/orig/energy.raw index fd4a649a7..a0dea2026 100644 --- a/tests/generator/out_data_post_fp_gaussian/orig/energy.raw +++ b/tests/generator/out_data_post_fp_gaussian/orig/energy.raw @@ -1 +1 @@ --1102.7145808882785 +-1102.7145898692427 diff --git a/tests/generator/out_data_post_fp_gaussian/orig/nopbc b/tests/generator/out_data_post_fp_gaussian/orig/nopbc new file mode 100644 index 000000000..e69de29bb From 58c76741261c000780c6de8bc6f869a288ce50ee Mon Sep 17 00:00:00 2001 From: shazj99 Date: Tue, 30 Nov 2021 13:30:07 +0800 Subject: [PATCH 22/28] init_bulk: skip relax if skip_relax is true (#594) Change-Id: I12cc467e5956a5deb29ac4bd43719db1c0ef8942 Co-authored-by: Zhengju Sha --- dpgen/data/gen.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 1cac205d0..842752b88 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -769,13 +769,14 @@ def gen_init_bulk(args) : dlog.info("Current stage is 1, relax") create_path(out_dir) shutil.copy2(args.PARAM, os.path.join(out_dir, 'param.json')) + skip_relax = jdata['skip_relax'] if from_poscar : make_super_cell_poscar(jdata) else : make_unit_cell(jdata) make_super_cell(jdata) place_element(jdata) - if args.MACHINE is not None: + if args.MACHINE is not None and not skip_relax: make_vasp_relax(jdata, mdata) run_vasp_relax(jdata, mdata) else: From df9095d0c0f4ddc23ed7eb0ac9bce7edb01b2b26 Mon Sep 17 00:00:00 2001 From: TaipingHu <32788428+taipinghu@users.noreply.github.com> Date: Tue, 30 Nov 2021 13:31:15 +0800 Subject: [PATCH 23/28] change compress level to 6 from default 9 to accelerate compress (#593) --- dpgen/dispatcher/SSHContext.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpgen/dispatcher/SSHContext.py b/dpgen/dispatcher/SSHContext.py index 9e355b5ed..35a7e5997 100644 --- a/dpgen/dispatcher/SSHContext.py +++ b/dpgen/dispatcher/SSHContext.py @@ -280,7 +280,7 @@ def _put_files(self, os.chdir(self.local_root) if os.path.isfile(of) : os.remove(of) - with tarfile.open(of, "w:gz", dereference = dereference) as tar: + with tarfile.open(of, "w:gz", dereference = dereference, compresslevel=6) as tar: for ii in files : tar.add(ii) os.chdir(cwd) From 43d1a0d3622a3c5dab18622f95a4ad02e8446ccf Mon Sep 17 00:00:00 2001 From: Jinzhe Zeng Date: Tue, 30 Nov 2021 18:59:06 -0500 Subject: [PATCH 24/28] support `dp compress` (#607) Set `dp_compress` to `true` in parameters will enable model compression. --- README.md | 1 + dpgen/generator/run.py | 10 +++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index fe06f8cd9..1fdd86ff7 100644 --- a/README.md +++ b/README.md @@ -545,6 +545,7 @@ The bold notation of key (such aas **type_map**) means that it's a necessary key | training_iter0_model_path | list of string | ["/path/to/model0_ckpt/", ...] | The model used to init the first iter training. Number of element should be equal to `numb_models` | | training_init_model | bool | False | Iteration > 0, the model parameters will be initilized from the model trained at the previous iteration. Iteration == 0, the model parameters will be initialized from `training_iter0_model_path`. | | **default_training_param** | Dict | | Training parameters for `deepmd-kit` in `00.train`.
You can find instructions from here: (https://github.com/deepmodeling/deepmd-kit)..
| +| dp_compress | bool | false | Use `dp compress` to compress the model. Default is false. | | *#Exploration* | **model_devi_dt** | Float | 0.002 (recommend) | Timestep for MD | | **model_devi_skip** | Integer | 0 | Number of structures skipped for fp in each MD diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index f6185d36d..e2fbbfdd9 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -514,6 +514,8 @@ def run_train (iter_index, commands.append(command) command = '%s freeze' % train_command commands.append(command) + if jdata.get("dp_compress", False): + commands.append("%s compress" % train_command) else: raise RuntimeError("DP-GEN currently only supports for DeePMD-kit 1.x or 2.x version!" ) @@ -536,6 +538,8 @@ def run_train (iter_index, ] backward_files = ['frozen_model.pb', 'lcurve.out', 'train.log'] backward_files+= ['model.ckpt.meta', 'model.ckpt.index', 'model.ckpt.data-00000-of-00001', 'checkpoint'] + if jdata.get("dp_compress", False): + backward_files.append('frozen_model_compressed.pb') init_data_sys_ = jdata['init_data_sys'] init_data_sys = [] for ii in init_data_sys_ : @@ -621,7 +625,11 @@ def post_train (iter_index, return # symlink models for ii in range(numb_models) : - task_file = os.path.join(train_task_fmt % ii, 'frozen_model.pb') + if not jdata.get("dp_compress", False): + model_name = 'frozen_model.pb' + else: + model_name = 'frozen_model_compressed.pb' + task_file = os.path.join(train_task_fmt % ii, model_name) ofile = os.path.join(work_path, 'graph.%03d.pb' % ii) if os.path.isfile(ofile) : os.remove(ofile) From 33cf2459983c6eaed6dd7b69797452a99dc52b7f Mon Sep 17 00:00:00 2001 From: Futaki Hatuki <812556867@qq.com> Date: Thu, 2 Dec 2021 08:10:40 +0800 Subject: [PATCH 25/28] Adapt different trust level for different sys_configs (#609) * fix typo in github actions release to conda * Get from_poscar_path only when from_poscar is true (#537) Change-Id: I17774bee345634e4e72bd783e8112eefaaf9f0d3 Co-authored-by: Zhengju Sha * adapt different trust level for different sys_configs Co-authored-by: Han Wang Co-authored-by: felix5572 Co-authored-by: shazj99 Co-authored-by: Zhengju Sha --- README.md | 8 ++++---- dpgen/generator/run.py | 24 +++++++++++++++++++----- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 1fdd86ff7..897cd1cf0 100644 --- a/README.md +++ b/README.md @@ -549,10 +549,10 @@ The bold notation of key (such aas **type_map**) means that it's a necessary key | *#Exploration* | **model_devi_dt** | Float | 0.002 (recommend) | Timestep for MD | | **model_devi_skip** | Integer | 0 | Number of structures skipped for fp in each MD -| **model_devi_f_trust_lo** | Float | 0.05 | Lower bound of forces for the selection. - | **model_devi_f_trust_hi** | Float | 0.15 | Upper bound of forces for the selection -| **model_devi_v_trust_lo** | Float | 1e10 | Lower bound of virial for the selection. Should be used with DeePMD-kit v2.x | -| **model_devi_v_trust_hi** | Float | 1e10 | Upper bound of virial for the selection. Should be used with DeePMD-kit v2.x | +| **model_devi_f_trust_lo** | Float or List of float | 0.05 | Lower bound of forces for the selection. If List, should be set for each index in `sys_configs`, respectively. | +| **model_devi_f_trust_hi** | Float or List of float | 0.15 | Upper bound of forces for the selection. If List, should be set for each index in `sys_configs`, respectively. | +| **model_devi_v_trust_lo** | Float or List of float | 1e10 | Lower bound of virial for the selection. If List, should be set for each index in `sys_configs`, respectively. Should be used with DeePMD-kit v2.x. | +| **model_devi_v_trust_hi** | Float or List of float | 1e10 | Upper bound of virial for the selection. If List, should be set for each index in `sys_configs`, respectively. Should be used with DeePMD-kit v2.x. | | model_devi_adapt_trust_lo | Boolean | False | Adaptively determines the lower trust levels of force and virial. This option should be used together with `model_devi_numb_candi_f`, `model_devi_numb_candi_v` and optionally with `model_devi_perc_candi_f` and `model_devi_perc_candi_v`. `dpgen` will make two sets: 1. From the frames with force model deviation lower than `model_devi_f_trust_hi`, select `max(model_devi_numb_candi_f, model_devi_perc_candi_f*n_frames)` frames with largest force model deviation. 2. From the frames with virial model deviation lower than `model_devi_v_trust_hi`, select `max(model_devi_numb_candi_v, model_devi_perc_candi_v*n_frames)` frames with largest virial model deviation. The union of the two sets is made as candidate dataset| | model_devi_numb_candi_f | Int | 10 | See `model_devi_adapt_trust_lo`.| | model_devi_numb_candi_v | Int | 0 | See `model_devi_adapt_trust_lo`.| diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index e2fbbfdd9..05596b0f4 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -1593,18 +1593,32 @@ def _make_fp_vasp_inner (modd_path, skip_bad_box = jdata.get('fp_skip_bad_box') # skip discrete structure in cluster fp_cluster_vacuum = jdata.get('fp_cluster_vacuum',None) - for ss in system_index : + + def _trust_limitation_check(sys_idx, lim): + if isinstance(lim, list): + sys_lim = lim[sys_idx] + else: + sys_lim = lim + return sys_lim + + for ss in system_index: modd_system_glob = os.path.join(modd_path, 'task.' + ss + '.*') modd_system_task = glob.glob(modd_system_glob) modd_system_task.sort() + # convert global trust limitations to local ones + f_trust_lo_sys = _trust_limitation_check(ss, f_trust_lo) + f_trust_hi_sys = _trust_limitation_check(ss, f_trust_hi) + v_trust_lo_sys = _trust_limitation_check(ss, v_trust_lo) + v_trust_hi_sys = _trust_limitation_check(ss, v_trust_hi) + # assumed e -> v if not model_devi_adapt_trust_lo: fp_rest_accurate, fp_candidate, fp_rest_failed, counter \ = _select_by_model_devi_standard( modd_system_task, - f_trust_lo, f_trust_hi, - v_trust_lo, v_trust_hi, + f_trust_lo_sys, f_trust_hi_sys, + v_trust_lo_sys, v_trust_hi_sys, cluster_cutoff, model_devi_skip, model_devi_f_avg_relative = model_devi_f_avg_relative, @@ -1618,8 +1632,8 @@ def _make_fp_vasp_inner (modd_path, fp_rest_accurate, fp_candidate, fp_rest_failed, counter, f_trust_lo_ad, v_trust_lo_ad \ = _select_by_model_devi_adaptive_trust_low( modd_system_task, - f_trust_hi, numb_candi_f, perc_candi_f, - v_trust_hi, numb_candi_v, perc_candi_v, + f_trust_hi_sys, numb_candi_f, perc_candi_f, + v_trust_hi_sys, numb_candi_v, perc_candi_v, model_devi_skip = model_devi_skip, model_devi_f_avg_relative = model_devi_f_avg_relative, ) From 5c8faa6b8f2f4a01b957812442eba20c083b0a9f Mon Sep 17 00:00:00 2001 From: Wanrun Jiang <58099845+Vibsteamer@users.noreply.github.com> Date: Thu, 2 Dec 2021 16:43:20 +0800 Subject: [PATCH 26/28] bug_fix | to support multi-element cases (#608) currently, only count the #atoms of the first element when evaluating E-per-atom; --- dpgen/auto_test/EOS.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dpgen/auto_test/EOS.py b/dpgen/auto_test/EOS.py index b69db6e1c..83a7482ab 100644 --- a/dpgen/auto_test/EOS.py +++ b/dpgen/auto_test/EOS.py @@ -169,8 +169,8 @@ def _compute_lower(self, # vol = self.vol_start + ii * self.vol_step vol = loadfn(os.path.join(all_tasks[ii], 'eos.json'))['volume'] task_result = loadfn(all_res[ii]) - res_data[vol] = task_result['energies'][-1] / task_result['atom_numbs'][0] - ptr_data += '%7.3f %8.4f \n' % (vol, task_result['energies'][-1] / task_result['atom_numbs'][0]) + res_data[vol] = task_result['energies'][-1] / task_result['atom_numbs'].sum() + ptr_data += '%7.3f %8.4f \n' % (vol, task_result['energies'][-1] / task_result['atom_numbs'].sum()) # res_data[vol] = all_res[ii]['energy'] / len(all_res[ii]['force']) # ptr_data += '%7.3f %8.4f \n' % (vol, all_res[ii]['energy'] / len(all_res[ii]['force'])) From 76928358e43c44da27d629c4802806db1161959c Mon Sep 17 00:00:00 2001 From: Wanrun Jiang <58099845+Vibsteamer@users.noreply.github.com> Date: Thu, 9 Dec 2021 07:42:08 +0800 Subject: [PATCH 27/28] bug_fix | typo in setting default KSPACING (#611) if kspacing is provided in json, will in fact use KSPACING=1 (default) to cal --- dpgen/auto_test/VASP.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dpgen/auto_test/VASP.py b/dpgen/auto_test/VASP.py index 410bde02c..61cbeb931 100644 --- a/dpgen/auto_test/VASP.py +++ b/dpgen/auto_test/VASP.py @@ -140,8 +140,8 @@ def make_input_file(self, incar['ENCUT'] = cal_setting['encut'] if 'kspacing' in cal_setting: - dlog.info("%s setting KSAPCING to %s" % (self.make_input_file.__name__, cal_setting['kspacing'])) - incar['KSAPCING'] = cal_setting['kspacing'] + dlog.info("%s setting KSPACING to %s" % (self.make_input_file.__name__, cal_setting['kspacing'])) + incar['KSPACING'] = cal_setting['kspacing'] if 'kgamma' in cal_setting: dlog.info("%s setting KGAMMA to %s" % (self.make_input_file.__name__, cal_setting['kgamma'])) From 6de10d0c22c9b53e167e809edb0e324ac19b13e7 Mon Sep 17 00:00:00 2001 From: Wanrun Jiang <58099845+Vibsteamer@users.noreply.github.com> Date: Thu, 9 Dec 2021 07:43:26 +0800 Subject: [PATCH 28/28] model_devi | sort models in default&support user-defined order (#610) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * model_devi | sort models in default&support user-defined order current behavior: 1)the list of models written in input.lammps for model_devi is created by "glob" without "sort" --> may give different (random) orders in different machine environments --> would randomly alter the model being used to sample configurations (always use the first model written in imput.lammps) ---> could lead to unexpected performance, such as, when using together with "model_devi_activation_func" that allows four models to be nonequivalent. 2)when preparing input.lammps from iuser-provided template, the line begin with "pair_style deepmd" will be overwritten by dpgen, thus overwrites the user defined order of models ---> could lead to unexpected performance, such as, when using together with "model_devi_activation_func" that allows four models to be nonequivalent and users indeed expected a specific order of models. changes: 1)sorted the list of models in default, thus always use graph.000.pb to sample configurations; 2)check weather user writes the full line of begin with "pair_style deepmd" (by checking the length), if yes, leave it be; if not, overwrites with the default settings (use graph.000.pb to sample); besides, the original error trigger is retained if key words "pair_style deepmd" are not provided in the template. * adjust space/tab * adjust annotation --- dpgen/generator/run.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 05596b0f4..1c4733037 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -804,7 +804,7 @@ def make_model_devi (iter_index, iter_name = make_iter_name(iter_index) train_path = os.path.join(iter_name, train_name) train_path = os.path.abspath(train_path) - models = glob.glob(os.path.join(train_path, "graph*pb")) + models = sorted(glob.glob(os.path.join(train_path, "graph*pb"))) work_path = os.path.join(iter_name, model_devi_name) create_path(work_path) for mm in models : @@ -890,7 +890,7 @@ def _make_model_devi_revmat(iter_index, jdata, mdata, conf_systems): iter_name = make_iter_name(iter_index) train_path = os.path.join(iter_name, train_name) train_path = os.path.abspath(train_path) - models = glob.glob(os.path.join(train_path, "graph*pb")) + models = sorted(glob.glob(os.path.join(train_path, "graph*pb"))) task_model_list = [] for ii in models: task_model_list.append(os.path.join('..', os.path.basename(ii))) @@ -947,7 +947,23 @@ def _make_model_devi_revmat(iter_index, jdata, mdata, conf_systems): # revise input of lammps with open('input.lammps') as fp: lmp_lines = fp.readlines() - lmp_lines = revise_lmp_input_model(lmp_lines, task_model_list, trj_freq, deepmd_version = deepmd_version) + # only revise the line "pair_style deepmd" if the user has not written the full line (checked by then length of the line) + template_has_pair_deepmd=1 + for line_idx,line_context in enumerate(lmp_lines): + if (line_context[0] != "#") and ("pair_style" in line_context) and ("deepmd" in line_context): + template_has_pair_deepmd=0 + template_pair_deepmd_idx=line_idx + if template_has_pair_deepmd == 0: + if LooseVersion(deepmd_version) < LooseVersion('1'): + if len(lmp_lines[template_pair_deepmd_idx].split()) != (len(models) + len(["pair_style","deepmd","10", "model_devi.out"])): + lmp_lines = revise_lmp_input_model(lmp_lines, task_model_list, trj_freq, deepmd_version = deepmd_version) + else: + if len(lmp_lines[template_pair_deepmd_idx].split()) != (len(models) + len(["pair_style","deepmd","out_freq", "10", "out_file", "model_devi.out"])): + lmp_lines = revise_lmp_input_model(lmp_lines, task_model_list, trj_freq, deepmd_version = deepmd_version) + #use revise_lmp_input_model to raise error message if "part_style" or "deepmd" not found + else: + lmp_lines = revise_lmp_input_model(lmp_lines, task_model_list, trj_freq, deepmd_version = deepmd_version) + lmp_lines = revise_lmp_input_dump(lmp_lines, trj_freq) lmp_lines = revise_by_keys( lmp_lines, total_rev_keys[:total_num_lmp], total_rev_item[:total_num_lmp]