Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Sep 15, 2023
1 parent 7e6385e commit c6a79aa
Show file tree
Hide file tree
Showing 23 changed files with 43 additions and 45 deletions.
2 changes: 1 addition & 1 deletion deepmd/calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(
self,
model: Union[str, "Path"],
label: str = "DP",
type_dict: Dict[str, int] = None,
type_dict: Optional[Dict[str, int]] = None,
**kwargs,
) -> None:
Calculator.__init__(self, label=label, **kwargs)
Expand Down
4 changes: 2 additions & 2 deletions deepmd/descriptor/se_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -890,7 +890,7 @@ def _filter_lower(
suffix="",
):
"""Input env matrix, returns R.G."""
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
# cut-out inputs
# with natom x (nei_type_i x 4)
inputs_i = tf.slice(inputs, [0, start_index * 4], [-1, incrs_index * 4])
Expand Down Expand Up @@ -1006,7 +1006,7 @@ def _filter(
nframes = tf.shape(tf.reshape(inputs, [-1, natoms[0], self.ndescrpt]))[0]
# natom x (nei x 4)
shape = inputs.get_shape().as_list()
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
outputs_size_2 = self.n_axis_neuron
all_excluded = all(
(type_input, type_i) in self.exclude_types for type_i in range(self.ntypes)
Expand Down
2 changes: 1 addition & 1 deletion deepmd/descriptor/se_a_ebd.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,7 @@ def _embedding_net(
# natom x (nei x 4)
inputs = tf.reshape(inputs, [-1, self.ndescrpt])
shape = inputs.get_shape().as_list()
outputs_size = [1] + filter_neuron
outputs_size = [1, *filter_neuron]
with tf.variable_scope(name, reuse=reuse):
xyz_scatter_total = []
# with natom x (nei x 4)
Expand Down
4 changes: 2 additions & 2 deletions deepmd/descriptor/se_atten.py
Original file line number Diff line number Diff line change
Expand Up @@ -1057,7 +1057,7 @@ def _filter_lower(
reuse=None,
):
"""Input env matrix, returns R.G."""
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
# cut-out inputs
# with natom x (nei_type_i x 4)
inputs_i = tf.slice(inputs, [0, start_index * 4], [-1, incrs_index * 4])
Expand Down Expand Up @@ -1260,7 +1260,7 @@ def _filter(
nframes = tf.shape(tf.reshape(inputs, [-1, natoms[0], self.ndescrpt]))[0]
# natom x (nei x 4)
shape = inputs.get_shape().as_list()
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
outputs_size_2 = self.n_axis_neuron

start_index = 0
Expand Down
2 changes: 1 addition & 1 deletion deepmd/descriptor/se_r.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,7 @@ def _filter_r(
trainable=True,
):
# natom x nei
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
with tf.variable_scope(name, reuse=reuse):
start_index = 0
xyz_scatter_total = []
Expand Down
2 changes: 1 addition & 1 deletion deepmd/descriptor/se_t.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,7 @@ def _filter(
):
# natom x (nei x 4)
shape = inputs.get_shape().as_list()
outputs_size = [1] + self.filter_neuron
outputs_size = [1, *self.filter_neuron]
with tf.variable_scope(name, reuse=reuse):
start_index_i = 0
result = None
Expand Down
2 changes: 1 addition & 1 deletion deepmd/entrypoints/ipi.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def _program(name: str, args: List[str]):
args : list of str
list of arguments
"""
return subprocess.call([os.path.join(ROOT_DIR, name)] + args, close_fds=False)
return subprocess.call([os.path.join(ROOT_DIR, name), *args], close_fds=False)

Check warning on line 27 in deepmd/entrypoints/ipi.py

View check run for this annotation

Codecov / codecov/patch

deepmd/entrypoints/ipi.py#L27

Added line #L27 was not covered by tests


def dp_ipi():
Expand Down
8 changes: 4 additions & 4 deletions deepmd/fit/dos.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ def __init__(
numb_aparam: int = 0,
numb_dos: int = 300,
rcond: Optional[float] = None,
trainable: List[bool] = None,
seed: int = None,
trainable: Optional[List[bool]] = None,
seed: Optional[int] = None,
activation_function: str = "tanh",
precision: str = "default",
uniform_seed: bool = False,
Expand Down Expand Up @@ -380,8 +380,8 @@ def build(
self,
inputs: tf.Tensor,
natoms: tf.Tensor,
input_dict: dict = None,
reuse: bool = None,
input_dict: Optional[dict] = None,
reuse: Optional[bool] = None,
suffix: str = "",
) -> tf.Tensor:
"""Build the computational graph for fitting net.
Expand Down
4 changes: 2 additions & 2 deletions deepmd/loss/ener.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,9 +388,9 @@ def __init__(
limit_pref_ae: float = 0.0,
start_pref_pf: float = 0.0,
limit_pref_pf: float = 0.0,
relative_f: float = None,
relative_f: Optional[float] = None,
enable_atom_ener_coeff: bool = False,
use_spin: list = None,
use_spin: Optional[list] = None,
) -> None:
self.starter_learning_rate = starter_learning_rate
self.start_pref_e = start_pref_e
Expand Down
2 changes: 1 addition & 1 deletion deepmd/model/model_stat.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def make_stat_input(data, nbatches, merge_sys=True):


def merge_sys_stat(all_stat):
first_key = list(all_stat.keys())[0]
first_key = next(iter(all_stat.keys()))
nsys = len(all_stat[first_key])
ret = defaultdict(list)
for ii in range(nsys):
Expand Down
2 changes: 1 addition & 1 deletion deepmd/nvnmd/entrypoints/wrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def wrap(self):
nvnmd_cfg.save(nvnmd_cfg.config_file)
head = self.wrap_head(nhs, nws)
# output model
hs = [] + head
hs = [*head]
for d in datas:
hs.extend(d)

Expand Down
4 changes: 2 additions & 2 deletions deepmd/train/run_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@


# http://patorjk.com/software/taag. Font:Big"
WELCOME = ( # noqa
WELCOME = (
r" _____ _____ __ __ _____ _ _ _ ",
r"| __ \ | __ \ | \/ || __ \ | | (_)| | ",
r"| | | | ___ ___ | |__) || \ / || | | | ______ | | __ _ | |_ ",
Expand All @@ -71,7 +71,7 @@
f"build float prec: {global_float_prec}",
f"build variant: {GLOBAL_CONFIG['dp_variant']}",
f"build with tf inc: {GLOBAL_CONFIG['tf_include_dir']}",
f"build with tf lib: {GLOBAL_CONFIG['tf_libs'].replace(';', _sep)}", # noqa
f"build with tf lib: {GLOBAL_CONFIG['tf_libs'].replace(';', _sep)}",
)


Expand Down
8 changes: 4 additions & 4 deletions deepmd/train/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def build(self, data=None, stop_batch=0, origin_type_map=None, suffix=""):
if not self.multi_task_mode:
single_data = data
else:
single_data = data[list(data.keys())[0]]
single_data = data[next(iter(data.keys()))]
if self.ntypes < single_data.get_ntypes():
raise ValueError(
"The number of types of the training data is %d, but that of the "
Expand Down Expand Up @@ -373,7 +373,7 @@ def _build_network(self, data, suffix=""):
if not self.multi_task_mode:
self._get_place_horders(data.get_data_dict())
else:
self._get_place_horders(data[list(data.keys())[0]].get_data_dict())
self._get_place_horders(data[next(iter(data.keys()))].get_data_dict())

self.place_holders["type"] = tf.placeholder(tf.int32, [None], name="t_type")
self.place_holders["natoms_vec"] = tf.placeholder(
Expand Down Expand Up @@ -467,7 +467,7 @@ def _build_training(self):
var_list=trainable_variables,
name="train_step",
)
train_ops = [apply_op] + self._extra_train_ops
train_ops = [apply_op, *self._extra_train_ops]
self.train_op = tf.group(*train_ops)
else:
self.train_op = {}
Expand All @@ -479,7 +479,7 @@ def _build_training(self):
var_list=trainable_variables,
name=f"train_step_{fitting_key}",
)
train_ops = [apply_op] + self._extra_train_ops
train_ops = [apply_op, *self._extra_train_ops]
self.train_op[fitting_key] = tf.group(*train_ops)
log.info("built training")

Expand Down
9 changes: 6 additions & 3 deletions deepmd/utils/argcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,8 @@ def descrpt_se_a_tpe_args():
doc_type_nlayer = "number of hidden layers of type embedding net"
doc_numb_aparam = "dimension of atomic parameter. if set to a value > 0, the atomic parameters are embedded."

return descrpt_se_a_args() + [
return [
*descrpt_se_a_args(),
Argument("type_nchanl", int, optional=True, default=4, doc=doc_type_nchanl),
Argument("type_nlayer", int, optional=True, default=2, doc=doc_type_nlayer),
Argument("numb_aparam", int, optional=True, default=0, doc=doc_numb_aparam),
Expand Down Expand Up @@ -397,7 +398,8 @@ def descrpt_se_atten_args():
doc_smooth_type_embdding = "When using stripped type embedding, whether to dot smooth factor on the network output of type embedding to keep the network smooth, instead of setting `set_davg_zero` to be True."
doc_set_davg_zero = "Set the normalization average to zero. This option should be set when `se_atten` descriptor or `atom_ener` in the energy fitting is used"

return descrpt_se_atten_common_args() + [
return [
*descrpt_se_atten_common_args(),
Argument(
"stripped_type_embedding",
bool,
Expand All @@ -422,7 +424,8 @@ def descrpt_se_atten_args():
def descrpt_se_atten_v2_args():
doc_set_davg_zero = "Set the normalization average to zero. This option should be set when `se_atten` descriptor or `atom_ener` in the energy fitting is used"

return descrpt_se_atten_common_args() + [
return [
*descrpt_se_atten_common_args(),
Argument(
"set_davg_zero", bool, optional=True, default=False, doc=doc_set_davg_zero
),
Expand Down
4 changes: 1 addition & 3 deletions deepmd/utils/data_system.py
Original file line number Diff line number Diff line change
Expand Up @@ -618,9 +618,7 @@ def _check_type_map_consistency(self, type_map_list):
min_len = min([len(ii), len(ret)])
for idx in range(min_len):
if ii[idx] != ret[idx]:
raise RuntimeError(
f"inconsistent type map: {str(ret)} {str(ii)}"
)
raise RuntimeError(f"inconsistent type map: {ret!s} {ii!s}")

Check warning on line 621 in deepmd/utils/data_system.py

View check run for this annotation

Codecov / codecov/patch

deepmd/utils/data_system.py#L621

Added line #L621 was not covered by tests
if len(ii) > len(ret):
ret = ii
return ret
Expand Down
6 changes: 2 additions & 4 deletions deepmd/utils/finetune.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def replace_model_params_with_pretrained_model(
if i not in pretrained_type_map:
out_line_type.append(i)
assert not out_line_type, (
f"{str(out_line_type)} type(s) not contained in the pretrained model! "
f"{out_line_type!s} type(s) not contained in the pretrained model! "
"Please choose another suitable one."
)
if cur_type_map != pretrained_type_map:
Expand Down Expand Up @@ -103,9 +103,7 @@ def replace_model_params_with_pretrained_model(
# keep some params that are irrelevant to model structures (need to discuss) TODO
if "trainable" in cur_para.keys():
target_para["trainable"] = cur_para["trainable"]
log.info(
f"Change the '{config_key}' from {str(cur_para)} to {str(target_para)}."
)
log.info(f"Change the '{config_key}' from {cur_para!s} to {target_para!s}.")
jdata["model"][config_key] = target_para

return jdata, cur_type_map
4 changes: 2 additions & 2 deletions deepmd/utils/multi_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def replace_model_params_with_frz_multi_model(
if i not in pretrained_type_map:
out_line_type.append(i)
assert not out_line_type, (
f"{str(out_line_type)} type(s) not contained in the pretrained model! "
f"{out_line_type!s} type(s) not contained in the pretrained model! "
"Please choose another suitable one."
)
if cur_type_map != pretrained_type_map:
Expand Down Expand Up @@ -169,5 +169,5 @@ def _change_sub_config(jdata: Dict[str, Any], src_jdata: Dict[str, Any], sub_key
# keep some params that are irrelevant to model structures (need to discuss) TODO
if "trainable" in cur_para.keys():
target_para["trainable"] = cur_para["trainable"]
log.info(f"Change the '{sub_key}' from {str(cur_para)} to {str(target_para)}.")
log.info(f"Change the '{sub_key}' from {cur_para!s} to {target_para!s}.")
jdata[sub_key] = target_para
2 changes: 1 addition & 1 deletion deepmd/utils/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ def embedding_net(
International Publishing, 2016.
"""
input_shape = xx.get_shape().as_list()
outputs_size = [input_shape[1]] + network_size
outputs_size = [input_shape[1], *network_size]

for ii in range(1, len(outputs_size)):
w_initializer = tf.random_normal_initializer(
Expand Down
2 changes: 1 addition & 1 deletion deepmd/utils/path.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def __str__(self) -> str:
"""Represent string."""

def __repr__(self) -> str:
return f"{type(self)} ({str(self)})"
return f"{type(self)} ({self!s})"

Check warning on line 117 in deepmd/utils/path.py

View check run for this annotation

Codecov / codecov/patch

deepmd/utils/path.py#L117

Added line #L117 was not covered by tests

def __eq__(self, other) -> bool:
return str(self) == str(other)
Expand Down
7 changes: 4 additions & 3 deletions deepmd/utils/spin.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
from typing import (
List,
Optional,
)

from deepmd.env import (
Expand All @@ -24,9 +25,9 @@ class Spin:

def __init__(
self,
use_spin: List[bool] = None,
spin_norm: List[float] = None,
virtual_len: List[float] = None,
use_spin: Optional[List[bool]] = None,
spin_norm: Optional[List[float]] = None,
virtual_len: Optional[List[float]] = None,
) -> None:
"""Constructor."""
self.use_spin = use_spin
Expand Down
2 changes: 1 addition & 1 deletion source/install/build_tf.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def __call__(self):
if not self.exists:
raise RuntimeError(
f"Download {self.filename} from {self.url} failed! "
f"You can manually download it to {str(self.path)} and "
f"You can manually download it to {self.path!s} and "
"retry the script."
)
self.post_process()
Expand Down
4 changes: 1 addition & 3 deletions source/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -919,9 +919,7 @@ def check_type_map_consistency(self, type_map_list):
min_len = min([len(ii), len(ret)])
for idx in range(min_len):
if ii[idx] != ret[idx]:
raise RuntimeError(
f"inconsistent type map: {str(ret)} {str(ii)}"
)
raise RuntimeError(f"inconsistent type map: {ret!s} {ii!s}")
if len(ii) > len(ret):
ret = ii
return ret
Expand Down
2 changes: 1 addition & 1 deletion source/tests/test_argument_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def run_test(self, *, command: str, mapping: "TEST_DICT"):
)

# test default values
cmd_args = [command] + required
cmd_args = [command, *required]
buffer = StringIO()
try:
with redirect_stderr(buffer):
Expand Down

0 comments on commit c6a79aa

Please sign in to comment.