Skip to content

Commit

Permalink
breaking: change the default value of rcond from 1e-3 to None (#…
Browse files Browse the repository at this point in the history
…2688)

This is a breaking change. `rcond=1e-3` is found inaccurate. Using
NumPy's default value is more accurate and not too slow.

---------

Signed-off-by: Jinzhe Zeng <jinzhe.zeng@rutgers.edu>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
njzjz and pre-commit-ci[bot] committed Jul 20, 2023
1 parent 6389891 commit 3b79fa9
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 12 deletions.
2 changes: 1 addition & 1 deletion deepmd/fit/dos.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def __init__(
numb_fparam: int = 0,
numb_aparam: int = 0,
numb_dos: int = 300,
rcond: float = 1e-3,
rcond: Optional[float] = None,
trainable: List[bool] = None,
seed: int = None,
activation_function: str = "tanh",
Expand Down
2 changes: 1 addition & 1 deletion deepmd/fit/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def __init__(
resnet_dt: bool = True,
numb_fparam: int = 0,
numb_aparam: int = 0,
rcond: float = 1e-3,
rcond: Optional[float] = None,
tot_ener_zero: bool = False,
trainable: Optional[List[bool]] = None,
seed: Optional[int] = None,
Expand Down
2 changes: 1 addition & 1 deletion deepmd/fit/polar.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def compute_input_stats(self, all_stat, protection=1e-2):
matrix, bias = np.concatenate(sys_matrix, axis=0), np.concatenate(
polar_bias, axis=0
)
atom_polar, _, _, _ = np.linalg.lstsq(matrix, bias, rcond=1e-3)
atom_polar, _, _, _ = np.linalg.lstsq(matrix, bias, rcond=None)
for itype in range(len(self.sel_type)):
self.constant_matrix[self.sel_type[itype]] = np.mean(
np.diagonal(atom_polar[itype].reshape((3, 3)))
Expand Down
12 changes: 8 additions & 4 deletions deepmd/utils/argcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ def fitting_ener():
doc_trainable = "Whether the parameters in the fitting net are trainable. This option can be\n\n\
- bool: True if all parameters of the fitting net are trainable, False otherwise.\n\n\
- list of bool: Specifies if each layer is trainable. Since the fitting net is composed by hidden layers followed by a output layer, the length of tihs list should be equal to len(`neuron`)+1."
doc_rcond = "The condition number used to determine the inital energy shift for each type of atoms."
doc_rcond = "The condition number used to determine the inital energy shift for each type of atoms. See `rcond` in :py:meth:`numpy.linalg.lstsq` for more details."
doc_seed = "Random seed for parameter initialization of the fitting net"
doc_atom_ener = "Specify the atomic energy in vacuum for each type"
doc_layer_name = (
Expand Down Expand Up @@ -526,7 +526,9 @@ def fitting_ener():
Argument(
"trainable", [list, bool], optional=True, default=True, doc=doc_trainable
),
Argument("rcond", float, optional=True, default=1e-3, doc=doc_rcond),
Argument(
"rcond", [float, type(None)], optional=True, default=None, doc=doc_rcond
),
Argument("seed", [int, None], optional=True, doc=doc_seed),
Argument("atom_ener", list, optional=True, default=[], doc=doc_atom_ener),
Argument("layer_name", list, optional=True, doc=doc_layer_name),
Expand All @@ -550,7 +552,7 @@ def fitting_dos():
doc_trainable = "Whether the parameters in the fitting net are trainable. This option can be\n\n\
- bool: True if all parameters of the fitting net are trainable, False otherwise.\n\n\
- list of bool: Specifies if each layer is trainable. Since the fitting net is composed by hidden layers followed by a output layer, the length of tihs list should be equal to len(`neuron`)+1."
doc_rcond = "The condition number used to determine the inital energy shift for each type of atoms."
doc_rcond = "The condition number used to determine the inital energy shift for each type of atoms. See `rcond` in :py:meth:`numpy.linalg.lstsq` for more details."
doc_seed = "Random seed for parameter initialization of the fitting net"
doc_numb_dos = (
"The number of gridpoints on which the DOS is evaluated (NEDOS in VASP)"
Expand All @@ -574,7 +576,9 @@ def fitting_dos():
Argument(
"trainable", [list, bool], optional=True, default=True, doc=doc_trainable
),
Argument("rcond", float, optional=True, default=1e-3, doc=doc_rcond),
Argument(
"rcond", [float, type(None)], optional=True, default=None, doc=doc_rcond
),
Argument("seed", [int, None], optional=True, doc=doc_seed),
Argument("numb_dos", int, optional=True, default=300, doc=doc_numb_dos),
]
Expand Down
2 changes: 1 addition & 1 deletion deepmd/utils/data_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def default_mesh(self) -> List[np.ndarray]:
for ii in range(self.nsystems)
]

def compute_energy_shift(self, rcond=1e-3, key="energy"):
def compute_energy_shift(self, rcond=None, key="energy"):
sys_ener = []
for ss in self.data_systems:
sys_ener.append(ss.avg(key))
Expand Down
4 changes: 1 addition & 3 deletions deepmd/utils/multi_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,5 @@ def _change_sub_config(jdata: Dict[str, Any], src_jdata: Dict[str, Any], sub_key
# keep some params that are irrelevant to model structures (need to discuss) TODO
if "trainable" in cur_para.keys():
target_para["trainable"] = cur_para["trainable"]
log.info(
f"Change the '{sub_key}' from {str(cur_para)} to {str(target_para)}."
)
log.info(f"Change the '{sub_key}' from {str(cur_para)} to {str(target_para)}.")
jdata[sub_key] = target_para
2 changes: 1 addition & 1 deletion source/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -964,7 +964,7 @@ def compute_energy_shift(self):
sys_tynatom = np.reshape(sys_tynatom, [self.nsystems, -1])
sys_tynatom = sys_tynatom[:, 2:]
energy_shift, resd, rank, s_value = np.linalg.lstsq(
sys_tynatom, sys_ener, rcond=1e-3
sys_tynatom, sys_ener, rcond=None
)
return energy_shift

Expand Down

0 comments on commit 3b79fa9

Please sign in to comment.