Skip to content

Commit

Permalink
ruff updated on Python3.9
Browse files Browse the repository at this point in the history
  • Loading branch information
Paul-Saves committed Apr 17, 2024
1 parent a344037 commit a60a6cf
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 22 deletions.
6 changes: 3 additions & 3 deletions smt/applications/mixed_integer.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,9 @@ def __init__(
)
and self._surrogate.options["categorical_kernel"] is None
):
self._surrogate.options[
"categorical_kernel"
] = MixIntKernelType.HOMO_HSPHERE
self._surrogate.options["categorical_kernel"] = (
MixIntKernelType.HOMO_HSPHERE
)
warnings.warn(
"Using MixedIntegerSurrogateModel integer model with Continuous Relaxation is not supported. \
Switched to homoscedastic hypersphere kernel instead."
Expand Down
20 changes: 10 additions & 10 deletions smt/surrogate_models/krg_based.py
Original file line number Diff line number Diff line change
Expand Up @@ -720,16 +720,16 @@ def _matrix_data_corr(
d_cont = d[:, np.logical_not(cat_features)]
if self.options["corr"] == "squar_sin_exp":
if self.options["categorical_kernel"] != MixIntKernelType.GOWER:
theta_cont_features[
-len([self.design_space.is_cat_mask]) :
] = np.atleast_2d(
np.array([True] * len([self.design_space.is_cat_mask]))
).T
theta_cat_features[1][
-len([self.design_space.is_cat_mask]) :
] = np.atleast_2d(
np.array([False] * len([self.design_space.is_cat_mask]))
).T
theta_cont_features[-len([self.design_space.is_cat_mask]) :] = (
np.atleast_2d(
np.array([True] * len([self.design_space.is_cat_mask]))
).T
)
theta_cat_features[1][-len([self.design_space.is_cat_mask]) :] = (
np.atleast_2d(
np.array([False] * len([self.design_space.is_cat_mask]))
).T
)

theta_cont = theta[theta_cont_features[:, 0]]
r_cont = _correlation_types[corr](theta_cont, d_cont)
Expand Down
6 changes: 3 additions & 3 deletions smt/surrogate_models/tests/test_surrogate_model_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -818,9 +818,9 @@ def df_dx(x):
genn.options["hidden_layer_sizes"] = [6, 6]
genn.options["alpha"] = 0.1
genn.options["lambd"] = 0.1
genn.options[
"gamma"
] = 1.0 # 1 = gradient-enhanced on, 0 = gradient-enhanced off
genn.options["gamma"] = (
1.0 # 1 = gradient-enhanced on, 0 = gradient-enhanced off
)
genn.options["num_iterations"] = 1000
genn.options["is_backtracking"] = True
genn.options["is_normalize"] = False
Expand Down
6 changes: 3 additions & 3 deletions smt/utils/design_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -526,9 +526,9 @@ def unfold_x(

# The is_acting matrix is simply repeated column-wise
if is_acting is not None:
is_acting_unfolded[
:, i_x_unfold : i_x_unfold + n_dim_cat
] = np.tile(is_acting[:, [i]], (1, n_dim_cat))
is_acting_unfolded[:, i_x_unfold : i_x_unfold + n_dim_cat] = (
np.tile(is_acting[:, [i]], (1, n_dim_cat))
)

i_x_unfold += n_dim_cat

Expand Down
6 changes: 3 additions & 3 deletions smt/utils/neural_net/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,9 +221,9 @@ def train(

# Compute average cost and print output
avg_cost = np.mean(optimizer.cost_history).squeeze()
self._training_history["epoch_" + str(e)][
"batch_" + str(b)
] = optimizer.cost_history
self._training_history["epoch_" + str(e)]["batch_" + str(b)] = (
optimizer.cost_history
)

if not silent:
print(
Expand Down

0 comments on commit a60a6cf

Please sign in to comment.