Skip to content
This repository has been archived by the owner on Feb 28, 2024. It is now read-only.

Commit

Permalink
Make arguments consistent
Browse files Browse the repository at this point in the history
  • Loading branch information
MechCoder committed Sep 8, 2016
1 parent 52ce4fc commit e0e2769
Show file tree
Hide file tree
Showing 5 changed files with 22 additions and 17 deletions.
6 changes: 5 additions & 1 deletion skopt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,4 +78,8 @@ def f(x):
"learning",
"optimizer",
"plots",
"space")
"space",
"gp_minimize",
"dummy_minimize",
"forest_minimize",
"gbrt_minimize")
File renamed without changes.
11 changes: 6 additions & 5 deletions skopt/optimizer/forest_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@

from sklearn.utils import check_random_state

from .base_opt import base_minimize
from .base import base_minimize
from ..learning import ExtraTreesRegressor
from ..learning import RandomForestRegressor


def forest_minimize(func, dimensions, base_estimator='et', n_calls=100,
n_points=1000, n_random_starts=10, x0=None, y0=None,
n_jobs=1, random_state=None, acq_func="EI",
xi=0.01, kappa=1.96, verbose=False, callback=None):
def forest_minimize(func, dimensions, base_estimator=None,
n_calls=100, n_random_starts=10,
acq_func="EI", acq_optimizer="auto",
x0=None, y0=None, random_state=None, verbose=False,
callback=None, n_points=1000, xi=0.01, kappa=1.96, n_jobs=1):
"""Sequential optimisation using decision trees.
A tree based regression model is used to model the expensive to evaluate
Expand Down
11 changes: 6 additions & 5 deletions skopt/optimizer/gbrt_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,15 @@
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.utils import check_random_state

from .base_opt import base_minimize
from .base import base_minimize
from ..learning import GradientBoostingQuantileRegressor


def gbrt_minimize(func, dimensions, base_estimator=None, n_calls=100,
n_points=1000, n_random_starts=10, x0=None, y0=None,
n_jobs=1, random_state=None, acq_func="EI",
xi=0.01, kappa=1.96, verbose=False, callback=None):
def gbrt_minimize(func, dimensions, base_estimator=None,
n_calls=100, n_random_starts=10,
acq_func="EI", acq_optimizer="auto",
x0=None, y0=None, random_state=None, verbose=False,
callback=None, n_points=1000, xi=0.01, kappa=1.96, n_jobs=1):
"""Sequential optimization using gradient boosted trees.
Gradient boosted regression trees are used to model the (very)
Expand Down
11 changes: 5 additions & 6 deletions skopt/optimizer/gp_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,15 @@
from sklearn.gaussian_process.kernels import WhiteKernel
from sklearn.utils import check_random_state

from .base_opt import base_minimize
from .base import base_minimize
from ..space import Space


def gp_minimize(func, dimensions, base_estimator=None,
acq_func="EI", xi=0.01, kappa=1.96,
acq_optimizer="auto",
n_calls=100, n_points=500, n_random_starts=10,
n_restarts_optimizer=5, x0=None, y0=None,
random_state=None, verbose=False, callback=None):
n_calls=100, n_random_starts=10,
acq_func="EI", acq_optimizer="auto", x0=None, y0=None,
random_state=None, verbose=False, callback=None,
n_points=10000, n_restarts_optimizer=5, xi=0.01, kappa=1.96):
"""Bayesian optimization using Gaussian Processes.
If every function evaluation is expensive, for instance
Expand Down

0 comments on commit e0e2769

Please sign in to comment.