Skip to content

Commit

Permalink
Merge pull request #109 from jbboin/dtype_fix
Browse files Browse the repository at this point in the history
Prevent int parameters from being cast to float in Bayesian optimization
  • Loading branch information
Lars Hertel committed Oct 18, 2020
2 parents 86a7d25 + f13aad0 commit ff6466e
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 2 deletions.
2 changes: 1 addition & 1 deletion sherpa/algorithms/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ def _reverse_to_sherpa_format(X_next, parameters):
transform = ParameterTransform.from_parameter(p)
col_dict[p.name] = transform.gpyopt_design_format_to_list_in_sherpa_format(X_next[:, i])

return list(pandas.DataFrame(col_dict).T.to_dict().values())
return list(pandas.DataFrame(col_dict).astype(numpy.object).T.to_dict().values())


class ParameterTransform(object):
Expand Down
21 changes: 20 additions & 1 deletion tests/test_gpyopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,4 +403,23 @@ def f(x, sd=1):
# rval = study.get_best_result()
# print(rval)
print(study.results.query("Status=='COMPLETED'"))
# assert numpy.sqrt((rval['Objective'] - 3.)**2) < 0.2
# assert numpy.sqrt((rval['Objective'] - 3.)**2) < 0.2


def test_mixed_dtype():
algorithm = GPyOpt(max_num_trials=4)
parameters = [
sherpa.Choice('param_int', [0, 1]),
sherpa.Choice('param_float', [0.1, 1.1]),
]
study = sherpa.Study(
parameters=parameters,
algorithm=algorithm,
lower_is_better=True,
disable_dashboard=True,
)
for trial in study:
study.add_observation(trial, iteration=0, objective=0)
study.finalize(trial)
assert type(trial.parameters['param_int']) == int
assert type(trial.parameters['param_float']) == float

0 comments on commit ff6466e

Please sign in to comment.