Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jan 13, 2025
1 parent 3216375 commit 7df79b0
Show file tree
Hide file tree
Showing 84 changed files with 426 additions and 423 deletions.
6 changes: 3 additions & 3 deletions deepmd/dpmodel/atomic_model/linear_atomic_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,9 +457,9 @@ def _compute_weight(
list[np.ndarray]
the atomic ZBL weight for interpolation. (nframes, nloc, 1)
"""
assert (
self.sw_rmax > self.sw_rmin
), "The upper boundary `sw_rmax` must be greater than the lower boundary `sw_rmin`."
assert self.sw_rmax > self.sw_rmin, (
"The upper boundary `sw_rmax` must be greater than the lower boundary `sw_rmin`."
)

xp = array_api_compat.array_namespace(extended_coord, extended_atype)
dp_nlist = nlists_[0]
Expand Down
6 changes: 3 additions & 3 deletions deepmd/dpmodel/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -432,9 +432,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
obj = self.se_atten
obj.ntypes = len(type_map)
Expand Down
12 changes: 6 additions & 6 deletions deepmd/dpmodel/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -543,9 +543,9 @@ def init_subclass_params(sub_data, sub_class):
)
self.rcsl_list.sort()
for ii in range(1, len(self.rcsl_list)):
assert (
self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1]
), "rcut and sel are not in the same order"
assert self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1], (
"rcut and sel are not in the same order"
)
self.rcut_list = [ii[0] for ii in self.rcsl_list]
self.nsel_list = [ii[1] for ii in self.rcsl_list]
self.use_econf_tebd = use_econf_tebd
Expand Down Expand Up @@ -678,9 +678,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
self.type_map = type_map
self.type_embedding.change_type_map(type_map=type_map)
Expand Down
4 changes: 3 additions & 1 deletion deepmd/dpmodel/descriptor/hybrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,9 @@ def __init__(
for ii in range(1, self.numb_descrpt):
assert (
self.descrpt_list[ii].get_ntypes() == self.descrpt_list[0].get_ntypes()
), f"number of atom types in {ii}th descriptor {self.descrpt_list[0].__class__.__name__} does not match others"
), (
f"number of atom types in {ii}th descriptor {self.descrpt_list[0].__class__.__name__} does not match others"
)
# if hybrid sel is larger than sub sel, the nlist needs to be cut for each type
hybrid_sel = self.get_sel()
nlist_cut_idx: list[np.ndarray] = []
Expand Down
6 changes: 3 additions & 3 deletions deepmd/dpmodel/descriptor/se_t_tebd.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,9 +270,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
obj = self.se_ttebd
obj.ntypes = len(type_map)
Expand Down
6 changes: 3 additions & 3 deletions deepmd/dpmodel/fitting/general_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,9 +242,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
assert self.mixed_types, "Only models in mixed types can perform type changing!"
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
self.type_map = type_map
Expand Down
12 changes: 6 additions & 6 deletions deepmd/dpmodel/fitting/polarizability_fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
assert self.mixed_types, "Only models in mixed types can perform type changing!"
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
super().change_type_map(type_map=type_map)
Expand Down Expand Up @@ -280,9 +280,9 @@ def call(
"""
xp = array_api_compat.array_namespace(descriptor, atype)
nframes, nloc, _ = descriptor.shape
assert (
gr is not None
), "Must provide the rotation matrix for polarizability fitting."
assert gr is not None, (
"Must provide the rotation matrix for polarizability fitting."
)
# (nframes, nloc, _net_out_dim)
out = self._call_common(descriptor, atype, gr, g2, h2, fparam, aparam)[
self.var_name
Expand Down
2 changes: 1 addition & 1 deletion deepmd/dpmodel/utils/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ def check_shape_consistency(self) -> None:
if self.layers[ii].dim_out() != self.layers[ii + 1].dim_in():
raise ValueError(
f"the dim of layer {ii} output {self.layers[ii].dim_out} ",
f"does not match the dim of layer {ii+1} ",
f"does not match the dim of layer {ii + 1} ",
f"output {self.layers[ii].dim_out}",
)

Expand Down
18 changes: 9 additions & 9 deletions deepmd/dpmodel/utils/type_embed.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
if not self.use_econf_tebd:
do_resnet = self.neuron[0] in [
Expand All @@ -177,9 +177,9 @@ def change_type_map(
len(type_map),
len(type_map) * 2,
]
assert (
not do_resnet or self.activation_function == "Linear"
), "'activation_function' must be 'Linear' when performing type changing on resnet structure!"
assert not do_resnet or self.activation_function == "Linear", (
"'activation_function' must be 'Linear' when performing type changing on resnet structure!"
)
first_layer_matrix = self.embedding_net.layers[0].w
eye_vector = np.eye(self.ntypes, dtype=PRECISION_DICT[self.precision])
# preprocess for resnet connection
Expand Down Expand Up @@ -227,9 +227,9 @@ def get_econf_tebd(type_map, precision: str = "default"):
)
from deepmd.utils.econf_embd import type_map as periodic_table

assert (
type_map is not None
), "When using electronic configuration type embedding, type_map must be provided!"
assert type_map is not None, (
"When using electronic configuration type embedding, type_map must be provided!"
)

missing_types = [t for t in type_map if t not in periodic_table]
assert not missing_types, (
Expand Down
2 changes: 1 addition & 1 deletion deepmd/infer/model_devi.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def write_model_devi_out(
f"min_devi_{item}",
f"avg_devi_{item}",
)
header += f'{"devi_e":19s}'
header += f"{'devi_e':19s}"
if atomic:
header += f"{'atm_devi_f(N)':19s}"
with open(fname, "ab") as fp:
Expand Down
2 changes: 1 addition & 1 deletion deepmd/loggers/training.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ def format_training_message(
wall_time: float,
) -> str:
"""Format a training message."""
return f"batch {batch:7d}: " f"total wall time = {wall_time:.2f} s"
return f"batch {batch:7d}: total wall time = {wall_time:.2f} s"


def format_training_message_per_task(
Expand Down
2 changes: 1 addition & 1 deletion deepmd/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class DeprecateAction(argparse.Action):
def __init__(self, *args, **kwargs) -> None:
self.call_count = 0
if "help" in kwargs:
kwargs["help"] = f'[DEPRECATED] {kwargs["help"]}'
kwargs["help"] = f"[DEPRECATED] {kwargs['help']}"
super().__init__(*args, **kwargs)

def __call__(self, parser, namespace, values, option_string=None):
Expand Down
24 changes: 12 additions & 12 deletions deepmd/pd/entrypoints/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,9 +247,9 @@ def train(
if multi_task:
config["model"], shared_links = preprocess_shared_params(config["model"])
# handle the special key
assert (
"RANDOM" not in config["model"]["model_dict"]
), "Model name can not be 'RANDOM' in multi-task mode!"
assert "RANDOM" not in config["model"]["model_dict"], (

Check warning on line 250 in deepmd/pd/entrypoints/main.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/entrypoints/main.py#L250

Added line #L250 was not covered by tests
"Model name can not be 'RANDOM' in multi-task mode!"
)

# update fine-tuning config
finetune_links = None
Expand Down Expand Up @@ -404,9 +404,9 @@ def change_bias(
multi_task = "model_dict" in model_params
bias_adjust_mode = "change-by-statistic" if mode == "change" else "set-by-statistic"
if multi_task:
assert (
model_branch is not None
), "For multitask model, the model branch must be set!"
assert model_branch is not None, (

Check warning on line 407 in deepmd/pd/entrypoints/main.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/entrypoints/main.py#L407

Added line #L407 was not covered by tests
"For multitask model, the model branch must be set!"
)
assert model_branch in model_params["model_dict"], (
f"For multitask model, the model branch must be in the 'model_dict'! "
f"Available options are : {list(model_params['model_dict'].keys())}."
Expand All @@ -427,12 +427,12 @@ def change_bias(

if bias_value is not None:
# use user-defined bias
assert model_to_change.model_type in [
"ener"
], "User-defined bias is only available for energy model!"
assert (
len(bias_value) == len(type_map)
), f"The number of elements in the bias should be the same as that in the type_map: {type_map}."
assert model_to_change.model_type in ["ener"], (
"User-defined bias is only available for energy model!"
)
assert len(bias_value) == len(type_map), (
f"The number of elements in the bias should be the same as that in the type_map: {type_map}."
)
old_bias = model_to_change.get_out_bias()
bias_to_set = paddle.to_tensor(
bias_value, dtype=old_bias.dtype, place=old_bias.place
Expand Down
12 changes: 6 additions & 6 deletions deepmd/pd/infer/deep_eval.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,12 @@ def __init__(
model_keys = list(self.input_param["model_dict"].keys())
if isinstance(head, int):
head = model_keys[0]
assert (
head is not None
), f"Head must be set for multitask model! Available heads are: {model_keys}"
assert (
head in model_keys
), f"No head named {head} in model! Available heads are: {model_keys}"
assert head is not None, (

Check warning on line 95 in deepmd/pd/infer/deep_eval.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/infer/deep_eval.py#L95

Added line #L95 was not covered by tests
f"Head must be set for multitask model! Available heads are: {model_keys}"
)
assert head in model_keys, (

Check warning on line 98 in deepmd/pd/infer/deep_eval.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/infer/deep_eval.py#L98

Added line #L98 was not covered by tests
f"No head named {head} in model! Available heads are: {model_keys}"
)
self.input_param = self.input_param["model_dict"][head]
state_dict_head = {"_extra_state": state_dict["_extra_state"]}
for item in state_dict:
Expand Down
6 changes: 3 additions & 3 deletions deepmd/pd/model/descriptor/descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,9 +128,9 @@ def share_params(self, base_class, shared_level, resume=False):
If not start from checkpoint (resume is False),
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
assert (
self.__class__ == base_class.__class__
), "Only descriptors of the same type can share params!"
assert self.__class__ == base_class.__class__, (
"Only descriptors of the same type can share params!"
)
if shared_level == 0:
# link buffers
if hasattr(self, "mean"):
Expand Down
12 changes: 6 additions & 6 deletions deepmd/pd/model/descriptor/dpa1.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,9 +378,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
If not start from checkpoint (resume is False),
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
assert (
self.__class__ == base_class.__class__
), "Only descriptors of the same type can share params!"
assert self.__class__ == base_class.__class__, (
"Only descriptors of the same type can share params!"
)
# For DPA1 descriptors, the user-defined share-level
# shared_level: 0
# share all parameters in both type_embedding and se_atten
Expand Down Expand Up @@ -449,9 +449,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
obj = self.se_atten
obj.ntypes = len(type_map)
Expand Down
18 changes: 9 additions & 9 deletions deepmd/pd/model/descriptor/dpa2.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,9 +254,9 @@ def init_subclass_params(sub_data, sub_class):
)
self.rcsl_list.sort()
for ii in range(1, len(self.rcsl_list)):
assert (
self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1]
), "rcut and sel are not in the same order"
assert self.rcsl_list[ii - 1][1] <= self.rcsl_list[ii][1], (
"rcut and sel are not in the same order"
)
self.rcut_list = [ii[0] for ii in self.rcsl_list]
self.nsel_list = [ii[1] for ii in self.rcsl_list]
self.use_econf_tebd = use_econf_tebd
Expand Down Expand Up @@ -386,9 +386,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
If not start from checkpoint (resume is False),
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
assert (
self.__class__ == base_class.__class__
), "Only descriptors of the same type can share params!"
assert self.__class__ == base_class.__class__, (
"Only descriptors of the same type can share params!"
)
# For DPA2 descriptors, the user-defined share-level
# shared_level: 0
# share all parameters in type_embedding, repinit and repformers
Expand Down Expand Up @@ -419,9 +419,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
self.type_map = type_map
self.type_embedding.change_type_map(type_map=type_map)
Expand Down
6 changes: 3 additions & 3 deletions deepmd/pd/model/descriptor/se_a.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,9 +169,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
If not start from checkpoint (resume is False),
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
assert (
self.__class__ == base_class.__class__
), "Only descriptors of the same type can share params!"
assert self.__class__ == base_class.__class__, (
"Only descriptors of the same type can share params!"
)
# For SeA descriptors, the user-defined share-level
# shared_level: 0
# share all parameters in sea
Expand Down
12 changes: 6 additions & 6 deletions deepmd/pd/model/descriptor/se_t_tebd.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,9 +246,9 @@ def share_params(self, base_class, shared_level, resume=False) -> None:
If not start from checkpoint (resume is False),
some separated parameters (e.g. mean and stddev) will be re-calculated across different classes.
"""
assert (
self.__class__ == base_class.__class__
), "Only descriptors of the same type can share params!"
assert self.__class__ == base_class.__class__, (

Check warning on line 249 in deepmd/pd/model/descriptor/se_t_tebd.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/model/descriptor/se_t_tebd.py#L249

Added line #L249 was not covered by tests
"Only descriptors of the same type can share params!"
)
# For DPA1 descriptors, the user-defined share-level
# shared_level: 0
# share all parameters in both type_embedding and se_ttebd
Expand Down Expand Up @@ -317,9 +317,9 @@ def change_type_map(
"""Change the type related params to new ones, according to `type_map` and the original one in the model.
If there are new types in `type_map`, statistics will be updated accordingly to `model_with_new_type_stat` for these new types.
"""
assert (
self.type_map is not None
), "'type_map' must be defined when performing type changing!"
assert self.type_map is not None, (

Check warning on line 320 in deepmd/pd/model/descriptor/se_t_tebd.py

View check run for this annotation

Codecov / codecov/patch

deepmd/pd/model/descriptor/se_t_tebd.py#L320

Added line #L320 was not covered by tests
"'type_map' must be defined when performing type changing!"
)
remap_index, has_new_type = get_index_between_two_maps(self.type_map, type_map)
obj = self.se_ttebd
obj.ntypes = len(type_map)
Expand Down
Loading

0 comments on commit 7df79b0

Please sign in to comment.