Skip to content

Commit

Permalink
Merge pull request #262 from nlesc-nano/atom_order
Browse files Browse the repository at this point in the history
BUG: Fix atom-pairs not being sorted if certain parameters are guessed
  • Loading branch information
BvB93 committed Jan 24, 2022
2 parents e27b09a + 4256e17 commit 4a655ba
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 37 deletions.
30 changes: 5 additions & 25 deletions FOX/armc/package_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,7 +266,7 @@ def clear_jobs(self, **kwargs: Any) -> None:
raise NotImplementedError('Trying to call an abstract method')

@abstractmethod
def update_settings(self, dct: Any, **kwargs: Any) -> None:
def update_settings(self, dct_seq: Sequence[dict[str, pd.DataFrame]]) -> None:
"""Update the Settings embedded in this instance using **dct**."""
raise NotImplementedError('Trying to call an abstract method')

Expand Down Expand Up @@ -387,31 +387,11 @@ def clear_jobs() -> None:
job_manager.jobs = []
job_manager.names = {}

def update_settings(self, dct: Sequence[Tuple[str, Mapping]], new_keys: bool = True) -> None:
def update_settings(self, dct_seq: Sequence[dict[str, pd.DataFrame]]) -> None:
"""Update all forcefield parameter blocks in this instance's CP2K settings."""
iterator = (job['settings'] for job in chain.from_iterable(self.values()))
for settings in iterator:
for key_alias, sub_dict in dct:
param = sub_dict['param']

if key_alias not in settings:
settings[key_alias] = pd.DataFrame(sub_dict, index=[param])
continue

# Ensure all column-keys in **sub_dict** are also in **df**
df: pd.DataFrame = settings[key_alias]
if new_keys:
keys = set(sub_dict.keys()).difference(df.columns)
for k in keys:
df[k] = np.nan

# Ensure that the **param** index-key is in **df** and update
df_update = pd.DataFrame(sub_dict, index=[param])
if param not in df.index:
df.loc[param] = np.nan
if 'guess' in df.columns:
del df['guess']
df.update(df_update)
for job_list in self.values():
for job, dct in zip(job_list, dct_seq):
job['settings'].update(dct)

@overload
@staticmethod
Expand Down
22 changes: 21 additions & 1 deletion FOX/armc/param_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@
from types import MappingProxyType
from logging import Logger
from functools import wraps, partial
from collections import defaultdict
from typing import (
Any, TypeVar, Optional, Tuple, Mapping, Iterable, ClassVar, Union,
Callable, FrozenSet, cast, MutableMapping, TYPE_CHECKING, Dict
Callable, FrozenSet, cast, MutableMapping, TYPE_CHECKING, Dict,
)

import h5py
Expand All @@ -37,6 +38,7 @@

from ..type_hints import ArrayLike
from ..functions.charge_utils import update_charge, get_net_charge, ChargeError
from ..functions.cp2k_utils import UNIT_MAP_REVERSED

if TYPE_CHECKING:
from pandas.core.generic import NDFrame
Expand Down Expand Up @@ -556,6 +558,24 @@ def to_yaml_dict(self) -> Dict[str, Any]:
ret[key][i]['unit'] = unit or None
return ret

def get_cp2k_dicts(self) -> list[defaultdict[str, pd.DataFrame]]:
"""Get dictionaries with CP2K parameters that are parsable by QMFlows."""
ret = []
df_template = pd.DataFrame(columns=["param", "unit"], dtype=object)
for i, series in self.param.items():
dct: defaultdict[str, pd.DataFrame] = defaultdict(df_template.copy)
visited = set()
for (k, param, atom), v in series.items():
visited_key = (k, param)
if visited_key not in visited:
unit = self.metadata.at[(k, param, atom), (i, "unit")] or None
visited.add(visited_key)
dct[k].at[param, "param"] = param
dct[k].at[param, "unit"] = UNIT_MAP_REVERSED.get(unit, unit)
dct[k].at[param, atom] = v
ret.append(dct)
return ret


MOVE_RANGE = np.array([[
0.900, 0.905, 0.910, 0.915, 0.920, 0.925, 0.930, 0.935, 0.940,
Expand Down
12 changes: 4 additions & 8 deletions FOX/armc/sanitization.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,11 +97,6 @@ def dict_to_armc(input_dict: MainMapping) -> Tuple[MonteCarloABC, RunDict]:
param, _param, _param_frozen, validation_dict = get_param(dct['param'])
mc, run_kwargs = get_armc(dct['monte_carlo'], package, param, phi, mol_list)

# Update the job Settings
if _param_frozen is not None:
package.update_settings(list(prm_iter(_param_frozen)), new_keys=True)
package.update_settings(list(prm_iter(_param)), new_keys=True)

# Handle psf stuff
psf_list: Optional[List[PSFContainer]] = get_psf(dct['psf'], mol_list)
run_kwargs['psf'] = psf_list
Expand All @@ -126,10 +121,14 @@ def dict_to_armc(input_dict: MainMapping) -> Tuple[MonteCarloABC, RunDict]:
validate_constraints(param, enforce_constraints=validation_dict['enforce_constraints'])
param._net_charge_to_integer()

# Sort the index
mc.param.param.sort_index(inplace=True)
mc.param.param_old.sort_index(inplace=True)
mc.param.metadata.sort_index(inplace=True)

# Update the job settings
mc.package_manager.update_settings(mc.param.get_cp2k_dicts())

# Add PES evaluators
pes = get_pes(dct['pes'], len(mol_list))
for name, kwargs in pes.items():
Expand Down Expand Up @@ -252,9 +251,6 @@ def _guess_param(mc: MonteCarloABC, prm: dict,
prm_dict['param'] = param
seq.append((k, prm_dict))

# Update the constant parameters
package.update_settings(seq, new_keys=True)

# Update the variable parameters
metadata = {'min': -np.inf, 'max': np.inf, 'count': 0, 'guess': True}
_prm = mc.param.param
Expand Down
11 changes: 8 additions & 3 deletions FOX/functions/cp2k_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,35 @@
parse_cp2k_value
get_xyz_path
UNIT_MAP
UNIT_MAP_REVERSED
API
---
.. autofunction:: parse_cp2k_value
.. autofunction:: update_charge
.. autodata:: UNIT_MAP
.. autodata:: UNIT_MAP_REVERSED
"""

import os
from types import MappingProxyType
from typing import Mapping, Union, Optional, TypeVar
from typing import Union, Optional, TypeVar

import numpy as np
from scipy import constants

from scm.plams import Units

__all__ = ['UNIT_MAP', 'parse_cp2k_value', 'get_xyz_path']
__all__ = ['UNIT_MAP', 'UNIT_MAP_REVERSED', 'parse_cp2k_value', 'get_xyz_path']

# Multiplicative factor for converting Hartree into Kelvin
Units.energy['k'] = Units.energy['kelvin'] = (
constants.physical_constants['Hartree energy'][0] / constants.Boltzmann
)

#: Map CP2K units to PLAMS units.
UNIT_MAP: Mapping[str, str] = MappingProxyType({
UNIT_MAP = MappingProxyType({
'hartree': 'hartree',
'ev': 'eV',
'kcalmol': 'kcal/mol',
Expand All @@ -49,6 +51,9 @@
'deg': 'degree'
})

#: Map PLAMS units to CP2K units.
UNIT_MAP_REVERSED = {k: v for v, k in UNIT_MAP.items()}

T = TypeVar('T', float, np.ndarray)


Expand Down

0 comments on commit 4a655ba

Please sign in to comment.