From ff56d58ee301778cca41817ca3e2bcf81bbe36be Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 19 May 2014 10:06:52 +0200 Subject: [PATCH 001/352] Initial commit, will be used for MetaSel --- .gitignore | 2 + AutoSklearn/__init__.py | 1 + AutoSklearn/autosklearn.py | 218 ++++++++++++++++++ AutoSklearn/components/__init__.py | 1 + .../components/classification/__init__.py | 25 ++ .../components/classification/liblinear.py | 56 +++++ .../components/classification/libsvm_svc.py | 55 +++++ .../classification/random_forest.py | 76 ++++++ AutoSklearn/components/classification_base.py | 30 +++ .../components/preprocessing/__init__.py | 25 ++ AutoSklearn/components/preprocessing/pca.py | 51 ++++ AutoSklearn/components/preprocessor_base.py | 30 +++ AutoSklearn/util.py | 106 +++++++++ setup.py | 0 tests/test_all_combinations.py | 122 ++++++++++ tests/test_autosklearn.py | 132 +++++++++++ 16 files changed, 930 insertions(+) create mode 100644 .gitignore create mode 100644 AutoSklearn/__init__.py create mode 100644 AutoSklearn/autosklearn.py create mode 100644 AutoSklearn/components/__init__.py create mode 100644 AutoSklearn/components/classification/__init__.py create mode 100644 AutoSklearn/components/classification/liblinear.py create mode 100644 AutoSklearn/components/classification/libsvm_svc.py create mode 100644 AutoSklearn/components/classification/random_forest.py create mode 100644 AutoSklearn/components/classification_base.py create mode 100644 AutoSklearn/components/preprocessing/__init__.py create mode 100644 AutoSklearn/components/preprocessing/pca.py create mode 100644 AutoSklearn/components/preprocessor_base.py create mode 100644 AutoSklearn/util.py create mode 100644 setup.py create mode 100644 tests/test_all_combinations.py create mode 100644 tests/test_autosklearn.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..00cb3af0c8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +~ +.idea diff --git a/AutoSklearn/__init__.py b/AutoSklearn/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/AutoSklearn/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py new file mode 100644 index 0000000000..b78b9a8c9b --- /dev/null +++ b/AutoSklearn/autosklearn.py @@ -0,0 +1,218 @@ +import numpy as np +from numpy import float64 + +from sklearn.base import BaseEstimator, ClassifierMixin +from sklearn.utils import check_random_state +from sklearn.utils.validation import safe_asarray, assert_all_finite + +from .components import classification as classification_components +from .components import preprocessing as preprocessing_components +from .util import NoModelException, hp_choice + +task_types = set(["classification"]) + +class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): + """AutoSklearn + + AutoSklearn provides a search space covering a (work in progress) huge + part of the scikit-learn models and the possibility to evaluate them. + Together with a hyperparameter optimization package, AutoSklearn solves + the Combined algorithm selection and Hyperparameter optimization problem + (CASH). + + This class implements the classification task. It can perform + preprocessing. It can render a search space for all known classification + and preprocessing problems. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available classifiers at runtime. For this reason the user must + specifiy the parameters via set_params. + + Parameters + ---------- + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : An underlying scikit-learn target model specified by a call to + set_parames + + See also + -------- + + References + ---------- + + Examples + -------- + """ + def __init__(self, + classifier=None, + preprocessor=None, + random_state=None, + parameters=None): + + # Test that either the classifier or the parameters + if classifier is not None: + assert parameters is None + # TODO: Somehow assemble a parameters dictionary + + if preprocessor is not None: + assert classifier is not None + assert parameters is None + + if parameters is not None: + assert classifier is None + assert preprocessor is None + classifier = parameters.get("classifier") + preprocessor = parameters.get("preprocessor") + if preprocessor == "None": + preprocessor = None + + self.random_state = random_state + self._estimator = None + self._preprocessor = None + self.parameters = parameters if parameters is not None else {} + # TODO: add valid parameters to the parameters dictionary + + # TODO: make sure that there are no duplicate classifiers + self._available_classifiers = classification_components._classifiers + self._available_preprocessors = preprocessing_components._preprocessors + + if random_state is None: + random_state = check_random_state(1) + + self._estimator_class = self._available_classifiers.get(classifier) + if classifier is not None and self._estimator_class is None: + raise KeyError("The classifier %s is not in the list " + "of classifiers found on this system: %s" % + (classifier, self._available_classifiers)) + + self._preprocessor_class = self._available_preprocessors.get(preprocessor) + if preprocessor is not None and self._preprocessor_class is None: + raise KeyError("The preprocessor %s is not in the list " + "of preprocessors found on this system: %s" % + (preprocessor, self._available_preprocessors)) + + def fit(self, X, Y): + # TODO: perform input validation + # TODO: look if X.shape[0] == y.shape[0] + # TODO: check if the hyperparameters have been set... + if self._estimator_class is None: + raise NoModelException(self, "fit(X, Y)") + + # Extract Hyperparameters from the parameters dict... + space = self._estimator_class.get_hyperparameter_search_space() + name = space["name"] + + parameters = {} + for key in space: + if "%s:%s" % (name, key) in self.parameters: + parameters[key] = self.parameters["%s:%s" % (name, key)] + + random_state = check_random_state(self.random_state) + self._estimator = self._estimator_class(random_state=random_state, + **parameters) + + self._validate_input_X(X) + self._validate_input_Y(Y) + + if self._preprocessor_class is not None: + # TODO: copy everything or not? + parameters = {} + preproc_space = self._preprocessor_class\ + .get_hyperparameter_search_space() + preproc_name = preproc_space["name"] + + for key in preproc_space: + if "%s:%s" % (preproc_name, key) in self.parameters: + parameters[key] = self.parameters["%s:%s" % (preproc_name, key)] + + self._preprocessor = self._preprocessor_class( + random_state=random_state, **parameters) + self._preprocessor.fit(X, Y) + X = self._preprocessor.transform(X) + + self._estimator.fit(X, Y) + return self + + def predict(self, X): + if self._preprocessor is not None: + X = self._preprocessor.transform(X) + self._validate_input_X(X) + return self._estimator.predict(X) + + def _validate_input_X(self, X): + # TODO: think of all possible states which can occur and how to + # handle them + if not self._estimator.handles_missing_values() or \ + (self._preprocessor is not None and not\ + self._preprocessor.handles_missing_value()): + assert_all_finite(X) + X = safe_asarray(X) + else: + raise NotImplementedError() + + if not self._estimator.handles_nominal_features() or \ + (self._preprocessor is not None and not \ + self._preprocessor.handles_nominal_features()): + if X.dtype not in (np.float64, float64, np.float32, float): + raise ValueError("Data type of X matrix is not float but %s!" + % X.dtype) + else: + raise NotImplementedError() + + if not self._estimator.handles_numeric_features() or \ + (self._preprocessor is not None and not \ + self._preprocessor.handles_numeric_features()): + raise NotImplementedError() + else: + if X.dtype not in (np.float64, float64, np.float32, float): + raise ValueError("Data type of X matrix is not float but %s!" + % X.dtype) + + def _validate_input_Y(self, Y): + Y = np.atleast_1d(Y) + if not self._estimator.handles_non_binary_classes() or \ + (self._preprocessor is not None and not \ + self._preprocessor.handles_non_binary_classes()): + unique = np.unique(Y) + if unique > 2: + raise ValueError("Estimator %s which only handles binary " + "classes cannot handle %d unique values" % + (self._estimator, unique)) + else: + pass + + if len(Y.shape) > 1: + raise NotImplementedError() + + def add_model_class(self, model): + raise NotImplementedError() + + def get_hyperparameter_search_space(self): + classifiers = {} + for name in self._available_classifiers: + classifier_parameters = self._available_classifiers[name]\ + .get_hyperparameter_search_space() + print classifier_parameters + classifier_parameters["name"] = name + classifiers["classifier:" + name] = classifier_parameters + + preprocessors = {} + preprocessors[None] = {} + for name in self._available_preprocessors: + preprocessor_parameters = self._available_preprocessors[name]\ + .get_hyperparameter_search_space() + preprocessor_parameters["name"] = name + preprocessors["preprocessing:" + name] = preprocessor_parameters + return {"classifier": hp_choice("classifier", classifiers.values()), + "preprocessing": hp_choice("preprocessing", preprocessors.values())} + + # TODO: maybe provide an interface to the underlying predictor like + # decision_function or predict_proba \ No newline at end of file diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/AutoSklearn/components/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/AutoSklearn/components/classification/__init__.py b/AutoSklearn/components/classification/__init__.py new file mode 100644 index 0000000000..99207957a9 --- /dev/null +++ b/AutoSklearn/components/classification/__init__.py @@ -0,0 +1,25 @@ +__author__ = 'feurerm' + +import inspect +import os +import pkgutil +import sys + +from ..classification_base import AutoSklearnClassificationAlgorithm + +classifier_directory = os.path.split(__file__)[0] +_classifiers = {} + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([classifier_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = module_loader.find_module(module_name).load_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnClassificationAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the wrapper, + # but not the real target classifier + classifier = obj + _classifiers[module_name] = classifier diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py new file mode 100644 index 0000000000..70ac1fecc1 --- /dev/null +++ b/AutoSklearn/components/classification/liblinear.py @@ -0,0 +1,56 @@ +import sklearn.svm + +from ...util import hp_uniform, hp_choice +from ..classification_base import AutoSklearnClassificationAlgorithm + +class LibLinear_SVC(AutoSklearnClassificationAlgorithm): + # TODO: maybe add dual and crammer-singer? + def __init__(self, penalty="l2", loss="l2", C=1.0, LOG2_C=None, random_state=None): + self.penalty = penalty + self.loss = loss + self.C = C + self.LOG2_C = LOG2_C + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + if self.LOG2_C is not None: + self.C = 2 ** self.LOG2_C + self.estimator = sklearn.svm.LinearSVC(penalty=self.penalty, + loss=self.loss, C=self.C, + random_state=self.random_state) + self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def handles_missing_values(self): + # TODO: should be able to handle sparse data itself... + return False + + def handles_nominal_features(self): + return False + + def handles_numeric_features(self): + return True + + def handles_non_binary_classes(self): + # TODO: describe whether by OneVsOne or OneVsTheRest + return True + + @staticmethod + def get_hyperparameter_search_space(): + # penalty l1 and loss l1 together are forbidden + penalty_and_loss = hp_choice("penalty_and_loss", + [{"penalty": "l1", "loss": "l2"}, + {"penalty": "l2", "loss": "l1"}, + {"penalty": "l2", "loss": "l2"}]) + loss = hp_choice("loss", ["l1", "l2"]) + LOG2_C = hp_uniform("LOG2_C", -5, 15) + return {"name": "liblinear", "penalty_and_loss": penalty_and_loss, + "LOG2_C": LOG2_C} + + def __str__(self): + return "AutoSklearn Liblinear Classifier" diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py new file mode 100644 index 0000000000..6b57c0ae61 --- /dev/null +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -0,0 +1,55 @@ +import sklearn.svm + +from ...util import hp_uniform +from ..classification_base import AutoSklearnClassificationAlgorithm + +class LibSVM_SVC(AutoSklearnClassificationAlgorithm): + # TODO: maybe ad shrinking to the parameters? + def __init__(self, C=1.0, gamma=0.0, + LOG2_C=None, LOG2_gamma=None, + random_state=None): + self.C = C + self.gamma = gamma + self.LOG2_C = LOG2_C + self.LOG2_gamma = LOG2_gamma + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + if self.LOG2_C is not None: + self.C = 2 ** self.LOG2_C + if self.LOG2_gamma is not None: + self.gamma = 2 ** self.LOG2_gamma + + self.estimator = sklearn.svm.SVC(C=self.C, gamma=self.gamma, + random_state=self.random_state, + cache_size=2000) + self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def handles_missing_values(self): + # TODO: should be able to handle sparse data itself... + return False + + def handles_nominal_features(self): + return False + + def handles_numeric_features(self): + return True + + def handles_non_binary_classes(self): + # TODO: describe whether by OneVsOne or OneVsTheRest + return True + + @staticmethod + def get_hyperparameter_search_space(): + LOG2_C = hp_uniform("LOG2_C", -5, 15) + LOG2_gamma = hp_uniform("LOG2_gamma", -15, 5) + return {"name": "libsmv_scv", "LOG2_C": LOG2_C, "LOG2_gamma": LOG2_gamma} + + def __str__(self): + return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py new file mode 100644 index 0000000000..83859e541f --- /dev/null +++ b/AutoSklearn/components/classification/random_forest.py @@ -0,0 +1,76 @@ +import sklearn.ensemble + +from hyperopt.pyll import scope + +from ...util import hp_uniform, hp_choice, hp_quniform +from ..classification_base import AutoSklearnClassificationAlgorithm + +class RandomForest(AutoSklearnClassificationAlgorithm): + def __init__(self, n_estimators=10, criterion='gini', max_features='auto', + max_depth=None, min_samples_split=2, min_samples_leaf=1, + bootstrap=True, random_state=None, n_jobs=1): + self.n_estimators = n_estimators + self.criterion = criterion + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.n_jobs = n_jobs + self.bootstrap = bootstrap + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + self.n_estimators = int(self.n_estimators) + if self.max_depth is not None: + self.max_depth = int(self.max_depth) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_features not in ("sqrt", ("log2")): + self.max_features = float(self.max_features) + + self.estimator = sklearn.ensemble.RandomForestClassifier( + n_estimators=self.n_estimators, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self + .min_samples_split, min_samples_leaf=self.min_samples_leaf, + max_features=self.max_features, random_state=self.random_state, + n_jobs=self.n_jobs) + self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def handles_missing_values(self): + return False + + def handles_nominal_features(self): + return False + + def handles_numeric_features(self): + return True + + def handles_non_binary_classes(self): + # TODO: describe whether by OneVsOne or OneVsTheRest + return True + + @staticmethod + def get_hyperparameter_search_space(): + n_estimators = scope.int(hp_quniform("n_estimators", 10, 100, 1)) + criterion = hp_choice("criterion", ["gini", "entropy"]) + max_features = hp_uniform("max_features", 0.01, 1.0) + # Don't know how to parametrize this...RF should rather be + # regularized by the other parameters + # max_depth = hp_uniform("max_depth", lower, upper) + min_samples_split = scope.int(hp_quniform("min_samples_split", 1, 20, 1)) + min_samples_leaf = scope.int(hp_quniform("min_samples_leaf", 1, 20, 1)) + bootstrap = hp_choice("bootstrap", [True, False]) + return {"name": "random_forest", + "n_estimators": n_estimators, "criterion": criterion, + "max_features": max_features, "min_samples_split": + min_samples_split, "min_samples_leaf": min_samples_leaf, + "bootstrap": bootstrap} + + def __str__(self): + return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py new file mode 100644 index 0000000000..57880a818a --- /dev/null +++ b/AutoSklearn/components/classification_base.py @@ -0,0 +1,30 @@ +class AutoSklearnClassificationAlgorithm(object): + def __init__(self): + self.estimator = None + + def handles_missing_values(self): + raise NotImplementedError() + + def handles_nominal_features(self): + raise NotImplementedError() + + def handles_numeric_features(self): + raise NotImplementedError() + + def handles_non_binary_classes(self): + raise NotImplementedError() + + def get_hyperparameter_search_space(self): + raise NotImplementedError() + + def fit(self, X, Y): + raise NotImplementedError() + + def predict(self, X): + raise NotImplementedError() + + def get_estimator(self): + return self.estimator + + def __str__(self): + raise NotImplementedError() diff --git a/AutoSklearn/components/preprocessing/__init__.py b/AutoSklearn/components/preprocessing/__init__.py new file mode 100644 index 0000000000..ce970e6709 --- /dev/null +++ b/AutoSklearn/components/preprocessing/__init__.py @@ -0,0 +1,25 @@ +__author__ = 'feurerm' + +import inspect +import os +import pkgutil +import sys + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + +preprocessors_directory = os.path.split(__file__)[0] +_preprocessors = {} + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([preprocessors_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = module_loader.find_module(module_name).load_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the wrapper, + # but not the real target classifier + preprocessor = obj + _preprocessors[module_name] = preprocessor diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py new file mode 100644 index 0000000000..095df2040c --- /dev/null +++ b/AutoSklearn/components/preprocessing/pca.py @@ -0,0 +1,51 @@ +import sklearn.decomposition + +from ...util import hp_uniform, hp_choice +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + +class PCA(AutoSklearnPreprocessingAlgorithm): + def __init__(self, keep_variance=1.0, whiten=False, random_state=None): + self.keep_variance = keep_variance + self.whiten = whiten + + def fit(self, X, Y): + self.preprocessor = sklearn.decomposition.PCA(whiten=self.whiten, + copy=True) + self.preprocessor.fit(X, Y) + + sum_ = 0. + idx = 0 + while idx < len(self.preprocessor.explained_variance_ratio_) and \ + sum_ < self.keep_variance: + sum_ += self.preprocessor.explained_variance_ratio_[idx] + idx += 1 + + components = self.preprocessor.components_ + self.preprocessor.components_ = components[:idx] + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + def handles_missing_values(self): + return False + + def handles_nominal_features(self): + return False + + def handles_numeric_features(self): + return True + + def handles_non_binary_classes(self): + return True + + @staticmethod + def get_hyperparameter_search_space(): + keep_variance = hp_uniform("n_components", 0.5, 1.0) + whiten = hp_choice("whiten", ["False", "True"]) + return {"name": "pca", "keep_variance": keep_variance, + "whiten": whiten} + + def __str__(self): + return "AutoSklearn Principle Component Analysis preprocessor." \ No newline at end of file diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py new file mode 100644 index 0000000000..4537ff2d78 --- /dev/null +++ b/AutoSklearn/components/preprocessor_base.py @@ -0,0 +1,30 @@ +class AutoSklearnPreprocessingAlgorithm(object): + def __init__(self): + self.estimator = None + + def handles_missing_values(self): + raise NotImplementedError() + + def handles_nominal_features(self): + raise NotImplementedError() + + def handles_numeric_features(self): + raise NotImplementedError() + + def handles_non_binary_classes(self): + raise NotImplementedError() + + def get_hyperparameter_search_space(self): + raise NotImplementedError() + + def fit(self, X, Y): + raise NotImplementedError() + + def transform(self, X): + raise NotImplementedError() + + def get_preprocessor(self): + return self.estimator + + def __str__(self): + raise NotImplementedError() diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py new file mode 100644 index 0000000000..1a36dbd338 --- /dev/null +++ b/AutoSklearn/util.py @@ -0,0 +1,106 @@ +import hyperopt.pyll as pyll + + +class NoModelException(Exception): + def __init__(self, cls, method): + self.cls = cls + self.method = method + + def __str__(self): + return repr("You called %s.%s without specifying a model first." + % (type(self.cls), self.method)) + + +def hp_pchoice(label, p_options): + """ + label: string + p_options: list of (probability, option) pairs + """ + if not isinstance(label, basestring): + raise TypeError('require string label') + p, options = zip(*p_options) + n_options = len(options) + ch = pyll.scope.hyperopt_param(label, + pyll.scope.categorical( + p, + upper=n_options)) + return pyll.scope.switch(ch, *options) + + +def hp_choice(label, options): + if not isinstance(label, basestring): + raise TypeError('require string label') + ch = pyll.scope.hyperopt_param(label, + pyll.scope.randint(len(options))) + return pyll.scope.switch(ch, *options) + + +def hp_randint(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.hyperopt_param(label, + pyll.scope.randint(*args, **kwargs)) + + +def hp_uniform(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.uniform(*args, **kwargs))) + + +def hp_quniform(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.quniform(*args, **kwargs))) + + +def hp_loguniform(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.loguniform(*args, **kwargs))) + + +def hp_qloguniform(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.qloguniform(*args, **kwargs))) + + +def hp_normal(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.normal(*args, **kwargs))) + + +def hp_qnormal(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.qnormal(*args, **kwargs))) + + +def hp_lognormal(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.lognormal(*args, **kwargs))) + + +def hp_qlognormal(label, *args, **kwargs): + if not isinstance(label, basestring): + raise TypeError('require string label') + return pyll.scope.float( + pyll.scope.hyperopt_param(label, + pyll.scope.qlognormal(*args, **kwargs))) \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_all_combinations.py b/tests/test_all_combinations.py new file mode 100644 index 0000000000..d15927bef9 --- /dev/null +++ b/tests/test_all_combinations.py @@ -0,0 +1,122 @@ +__author__ = 'feurerm' + +import numpy as np +import time +import unittest + +import itertools + +import sklearn.datasets +import sklearn.decomposition + +from AutoSklearn.autosklearn import AutoSklearnClassifier + +class TestAllCombinations(unittest.TestCase): + def get_iris(self): + iris = sklearn.datasets.load_iris() + X = iris.data + Y = iris.target + rs = np.random.RandomState(42) + indices = np.arange(X.shape[0]) + rs.shuffle(indices) + X = X[indices] + Y = Y[indices] + X_train = X[:100] + Y_train = Y[:100] + X_test = X[100:] + Y_test = Y[100:] + return X_train, Y_train, X_test, Y_test + + def test_all_combinations(self): + # TODO: do the combination testing on the basis of one component + # TODO: automate the testing, so far it is enumerated by hand + parameter_combinations = list() + + libsvm_svc = [] + libsvm_svc_C_values = range(-5, 15 + 1) + libsvm_svc_gamma_values = range(-15, 3 + 1) + for C, gamma in itertools.product(libsvm_svc_C_values, libsvm_svc_gamma_values): + libsvm_svc.append({"libsvm_svc:LOG2_C": C, + "libsvm_svc:LOG2_gamma": gamma, + "classifier": "libsvm_svc"}) + print "Parameter configurations LibSVM-SVC", len(libsvm_svc) + + liblinear = [] + liblinear_C_values = range(-5, 15 + 1) + for C in liblinear_C_values: + for penalty_and_loss in [{"penalty": "l1", "loss": "l2"}, + {"penalty": "l2", "loss": "l1"}, + {"penalty": "l2", "loss": "l2"}]: + liblinear.append({"liblinear:LOG2_C": C, + "liblinear:penalty": penalty_and_loss["penalty"], + "liblinear:loss": penalty_and_loss["loss"], + "classifier": "liblinear"}) + print "Parameter configurations LibLinear", len(liblinear) + + random_forest = [] + random_forest_n_estimators = range(10, 100 + 1, 10) + # This makes things too expensive + # random_forst_min_samples_leaf = [1, 2, 4, 7, 10, 15, 20] + random_forst_min_splits = [1, 2, 4, 7, 10] + random_forest_max_features = np.linspace(0.01, 1.0, 8) + random_forest_max_features = itertools.chain( + random_forest_max_features, ["sqrt", "log2"]) + random_forest_criterion = ["gini", "entropy"] + # random_forest_bootstrap = [True, False] + + #for n_est, min_leaf, min_splits, max_features, criterion, bootstrap in \ + for n_est, min_splits, max_features, criterion in \ + itertools.product(random_forest_n_estimators, + #random_forst_min_samples_leaf, + random_forst_min_splits, + random_forest_max_features, + random_forest_criterion): + #random_forest_bootstrap) + random_forest.append(({"random_forest:n_estimators": n_est, + "random_forest:criterion": criterion, + "random_forest:max_features": max_features, + "random_forest:min_samples_split": min_splits, + #"random_forest:min_samples_leaf": min_leaf, + #"random_forest:bootstrap": bootstrap, + "classifier": "random_forest"})) + print "Parameter configurations RF", len(random_forest) + + pca = [] + pca_n_components = np.linspace(0.60, 1.0, 10) + # pca_whiten = [True, False] + #for n_components, whiten in itertools.product(pca_n_components): + #pca_whiten): + for n_components in pca_n_components: + pca.append({"pca:n_components": n_components, + #"pca:whiten": whiten, + "preprocessor": "pca"}) + print "Parameter configurations PCA", len(pca) + + classifiers = [liblinear, libsvm_svc, random_forest] + preprocessors = [pca, [{"preprocessor": None}]] + + for classifier, preprocessor in itertools.product(classifiers, + preprocessors): + print classifier[0]["classifier"], preprocessor[0]["preprocessor"] + for classifier_params, preprocessor_params in itertools.product( + classifier, preprocessor): + params = {} + params.update(classifier_params) + params.update(preprocessor_params) + parameter_combinations.append(params) + + starttime = time.time() + print len(parameter_combinations) + for i, parameter_combination in enumerate(parameter_combinations): + auto = AutoSklearnClassifier(parameters=parameter_combination) + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) + + if i % 1000 == 0 and i != 0: + print "Iteration", i + print (time.time() - starttime) * 1000 / i + + print "Finished, took", time.time() - starttime + diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py new file mode 100644 index 0000000000..6f69a8764c --- /dev/null +++ b/tests/test_autosklearn.py @@ -0,0 +1,132 @@ +__author__ = 'feurerm' + +import numpy as np +import StringIO +import unittest + +import hyperopt + +import sklearn.datasets +import sklearn.decomposition + +from AutoSklearn.autosklearn import AutoSklearnClassifier +from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm +from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm +import AutoSklearn.components.classification as classification_components +import AutoSklearn.components.preprocessing as preprocessing_components +from AutoSklearn.util import NoModelException + +class TestAutoSKlearnClassifier(unittest.TestCase): + def get_iris(self): + iris = sklearn.datasets.load_iris() + X = iris.data + Y = iris.target + rs = np.random.RandomState(42) + indices = np.arange(X.shape[0]) + rs.shuffle(indices) + X = X[indices] + Y = Y[indices] + X_train = X[:100] + Y_train = Y[:100] + X_test = X[100:] + Y_test = Y[100:] + return X_train, Y_train, X_test, Y_test + + def test_find_classifiers(self): + classifiers = classification_components._classifiers + self.assertGreaterEqual(len(classifiers), 1) + for key in classifiers: + self.assertIn(AutoSklearnClassificationAlgorithm, + classifiers[key].__bases__) + + def test_find_preprocessors(self): + preprocessors = preprocessing_components._preprocessors + self.assertGreaterEqual(len(preprocessors), 1) + for key in preprocessors: + self.assertIn(AutoSklearnPreprocessingAlgorithm, + preprocessors[key].__bases__) + + def test_init_no_classifier(self): + try: + AutoSklearnClassifier(None, None) + except NoModelException as e: + self.assertEqual(e.__str__(), + '"You called .__init__() without ' + 'specifying a model first."') + + def test_init_unknown_classifier(self): + self.assertRaises(KeyError, AutoSklearnClassifier, + "qufrpdvltromeaiudtroembdtaiubo", None) + + @unittest.skip("test_init_parameters_as_dict_or_as_keywords Not yet Implemented") + def test_init_parameters_as_dict_or_as_keywords(self): + pass + + def test_fit_iris(self): + auto = AutoSklearnClassifier("liblinear", None) + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + self.assertIsInstance(auto, AutoSklearnClassifier) + self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) + + def test_predict_iris(self): + auto = AutoSklearnClassifier("liblinear", None) + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) + self.assertAlmostEqual(accuracy, 1.0) + + def test_fit_with_preproc(self): + auto = AutoSklearnClassifier("liblinear", "pca") + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + self.assertIsInstance(auto, AutoSklearnClassifier) + self.assertIsInstance(auto._preprocessor, AutoSklearnPreprocessingAlgorithm) + self.assertIsInstance(auto._preprocessor.preprocessor, sklearn + .decomposition.PCA) + + def test_predict_with_preproc(self): + auto = AutoSklearnClassifier("liblinear", "pca") + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + prediction = auto.predict(X_test) + self.assertIsInstance(auto, AutoSklearnClassifier) + self.assertIsInstance(auto._preprocessor, AutoSklearnPreprocessingAlgorithm) + + def test_specify_hyperparameters(self): + auto = AutoSklearnClassifier(random_state=1, + parameters={"classifier": "random_forest", "preprocessing": + "pca", "random_forest:n_estimators": 1, + "random_forest:max_features": 1.0}) + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) + self.assertAlmostEqual(accuracy, 0.939999999) + self.assertEqual(auto._estimator.estimator.n_estimators, 1) + + def test_get_hyperparameter_search_space(self): + auto = AutoSklearnClassifier(None, None) + space = auto.get_hyperparameter_search_space() + space = hyperopt.pyll.base.as_apply(space) + print space + + @unittest.skip("test_check_random_state Not yet Implemented") + def test_check_random_state(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_X Not yet Implemented") + def test_validate_input_X(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_Y Not yet Implemented") + def test_validate_input_Y(self): + raise NotImplementedError() + + def test_set_params(self): + pass + + def test_get_params(self): + pass \ No newline at end of file From d4334e33fffdf85ceaa0b3da7ce522bbe0c79691 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 1 Jul 2014 13:15:54 +0200 Subject: [PATCH 002/352] After paper submission, added setup.py --- .gitignore | 6 +- AutoSklearn/autosklearn.py | 31 +++++++++- .../components/classification/liblinear.py | 7 ++- .../components/classification/libsvm_svc.py | 9 ++- .../classification/random_forest.py | 7 ++- AutoSklearn/components/classification_base.py | 3 + AutoSklearn/components/preprocessing/pca.py | 12 +++- AutoSklearn/components/preprocessor_base.py | 3 + CHANGES.md | 0 LICENSE.txt | 0 README.md | 0 setup.py | 15 +++++ tests/test_all_combinations.py | 15 +++-- tests/test_autosklearn.py | 56 +++++++++++++++++-- 14 files changed, 144 insertions(+), 20 deletions(-) create mode 100644 CHANGES.md create mode 100644 LICENSE.txt create mode 100644 README.md diff --git a/.gitignore b/.gitignore index 00cb3af0c8..ec789f7a40 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,6 @@ -~ +*.pyc +*~ .idea +dist/ +AutoSklearn.egg-info + diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index b78b9a8c9b..2a338c07eb 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -70,7 +70,7 @@ def __init__(self, assert classifier is None assert preprocessor is None classifier = parameters.get("classifier") - preprocessor = parameters.get("preprocessor") + preprocessor = parameters.get("preprocessing") if preprocessor == "None": preprocessor = None @@ -82,10 +82,35 @@ def __init__(self, # TODO: make sure that there are no duplicate classifiers self._available_classifiers = classification_components._classifiers + classifier_parameters = set() + for _classifier in self._available_classifiers: + accepted_hyperparameter_names = self._available_classifiers[_classifier] \ + .get_all_accepted_hyperparameter_names() + name = self._available_classifiers[_classifier].get_hyperparameter_search_space()['name'] + for key in accepted_hyperparameter_names: + classifier_parameters.add("%s:%s" % (name, key)) + self._available_preprocessors = preprocessing_components._preprocessors + preprocessor_parameters = set() + for _preprocessor in self._available_preprocessors: + accepted_hyperparameter_names = self._available_preprocessors[_preprocessor] \ + .get_all_accepted_hyperparameter_names() + name = self._available_preprocessors[_preprocessor].get_hyperparameter_search_space()['name'] + for key in accepted_hyperparameter_names: + preprocessor_parameters.add("%s:%s" % (name, key)) + + for parameter in self.parameters: + if parameter not in classifier_parameters and \ + parameter not in preprocessor_parameters and \ + parameter not in ("preprocessing", "classifier", "name"): + print "Classifier parameters %s" % str(classifier_parameters) + print "Preprocessing parameters %s" % str(preprocessor_parameters) + raise ValueError("Parameter %s is unknown." % parameter) if random_state is None: - random_state = check_random_state(1) + self.random_state = check_random_state(1) + else: + self.random_state = check_random_state(random_state) self._estimator_class = self._available_classifiers.get(classifier) if classifier is not None and self._estimator_class is None: @@ -99,6 +124,8 @@ def __init__(self, "of preprocessors found on this system: %s" % (preprocessor, self._available_preprocessors)) + + def fit(self, X, Y): # TODO: perform input validation # TODO: look if X.shape[0] == y.shape[0] diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 70ac1fecc1..4936cf9a7f 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -4,7 +4,8 @@ from ..classification_base import AutoSklearnClassificationAlgorithm class LibLinear_SVC(AutoSklearnClassificationAlgorithm): - # TODO: maybe add dual and crammer-singer? + # Liblinear is not deterministic as it uses a RNG inside + # TODO: maybe add dual and crammer-singer? def __init__(self, penalty="l2", loss="l2", C=1.0, LOG2_C=None, random_state=None): self.penalty = penalty self.loss = loss @@ -52,5 +53,9 @@ def get_hyperparameter_search_space(): return {"name": "liblinear", "penalty_and_loss": penalty_and_loss, "LOG2_C": LOG2_C} + @staticmethod + def get_all_accepted_hyperparameter_names(): + return (["LOG2_C", "C", "penalty", "loss"]) + def __str__(self): return "AutoSklearn Liblinear Classifier" diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 6b57c0ae61..868706159b 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -48,8 +48,13 @@ def handles_non_binary_classes(self): @staticmethod def get_hyperparameter_search_space(): LOG2_C = hp_uniform("LOG2_C", -5, 15) - LOG2_gamma = hp_uniform("LOG2_gamma", -15, 5) - return {"name": "libsmv_scv", "LOG2_C": LOG2_C, "LOG2_gamma": LOG2_gamma} + LOG2_gamma = hp_uniform("LOG2_gamma", -15, 3) + return {"name": "libsvm_svc", "LOG2_C": LOG2_C, "LOG2_gamma": + LOG2_gamma} + + @staticmethod + def get_all_accepted_hyperparameter_names(): + return (["LOG2_C", "C", "LOG2_gamma", "gamma"]) def __str__(self): return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 83859e541f..8d22efa196 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -26,7 +26,7 @@ def fit(self, X, Y): self.max_depth = int(self.max_depth) self.min_samples_split = int(self.min_samples_split) self.min_samples_leaf = int(self.min_samples_leaf) - if self.max_features not in ("sqrt", ("log2")): + if self.max_features not in ("sqrt", "log2", "auto"): self.max_features = float(self.max_features) self.estimator = sklearn.ensemble.RandomForestClassifier( @@ -72,5 +72,10 @@ def get_hyperparameter_search_space(): min_samples_split, "min_samples_leaf": min_samples_leaf, "bootstrap": bootstrap} + @staticmethod + def get_all_accepted_hyperparameter_names(): + return (["n_estimators", "criterion", "max_features", + "min_samples_split", "min_samples_leaf", "bootstrap"]) + def __str__(self): return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 57880a818a..13749dec2a 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -17,6 +17,9 @@ def handles_non_binary_classes(self): def get_hyperparameter_search_space(self): raise NotImplementedError() + def get_all_accepted_hyperparameter_names(self): + raise NotImplementedError() + def fit(self, X, Y): raise NotImplementedError() diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 095df2040c..7ec6857782 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -9,8 +9,12 @@ def __init__(self, keep_variance=1.0, whiten=False, random_state=None): self.whiten = whiten def fit(self, X, Y): + # TODO: implement that keep_variance can be a percentage (in int) self.preprocessor = sklearn.decomposition.PCA(whiten=self.whiten, copy=True) + # num components is + # selected further down + # the code self.preprocessor.fit(X, Y) sum_ = 0. @@ -42,10 +46,14 @@ def handles_non_binary_classes(self): @staticmethod def get_hyperparameter_search_space(): - keep_variance = hp_uniform("n_components", 0.5, 1.0) + keep_variance = hp_uniform("keep_variance", 0.5, 1.0) whiten = hp_choice("whiten", ["False", "True"]) return {"name": "pca", "keep_variance": keep_variance, "whiten": whiten} + @staticmethod + def get_all_accepted_hyperparameter_names(): + return (["keep_variance", "whiten"]) + def __str__(self): - return "AutoSklearn Principle Component Analysis preprocessor." \ No newline at end of file + return "AutoSklearn Principle Component Analysis preprocessor." diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 4537ff2d78..8c7c8a9ba5 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -17,6 +17,9 @@ def handles_non_binary_classes(self): def get_hyperparameter_search_space(self): raise NotImplementedError() + def get_all_accepted_hyperparameter_names(): + raise NotImplementedError() + def fit(self, X, Y): raise NotImplementedError() diff --git a/CHANGES.md b/CHANGES.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/README.md b/README.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setup.py b/setup.py index e69de29bb2..7440c2146e 100644 --- a/setup.py +++ b/setup.py @@ -0,0 +1,15 @@ +import setuptools + +setuptools.setup(name="AutoSklearn", + description="Scikit-Learn wrapper for automatic " + "hyperparameter configuration.", + version="0.1dev", + packages=setuptools.find_packages(), + install_requires=["scikit_learn==0.14.1"], + package_data={'': ['*.txt', '*.md']}, + author="Matthias Feurer", + author_email="feurerm@informatik.uni-freiburg.de", + license="BSD", + platforms=['Linux'], + classifiers=[] + url="github.com/mfeurer/autosklearn") diff --git a/tests/test_all_combinations.py b/tests/test_all_combinations.py index d15927bef9..201974e2de 100644 --- a/tests/test_all_combinations.py +++ b/tests/test_all_combinations.py @@ -87,17 +87,17 @@ def test_all_combinations(self): #for n_components, whiten in itertools.product(pca_n_components): #pca_whiten): for n_components in pca_n_components: - pca.append({"pca:n_components": n_components, + pca.append({"pca:keep_variance": n_components, #"pca:whiten": whiten, - "preprocessor": "pca"}) + "preprocessing": "pca"}) print "Parameter configurations PCA", len(pca) classifiers = [liblinear, libsvm_svc, random_forest] - preprocessors = [pca, [{"preprocessor": None}]] + preprocessors = [pca, [{"preprocessing": None}]] for classifier, preprocessor in itertools.product(classifiers, preprocessors): - print classifier[0]["classifier"], preprocessor[0]["preprocessor"] + print classifier[0]["classifier"], preprocessor[0]["preprocessing"] for classifier_params, preprocessor_params in itertools.product( classifier, preprocessor): params = {} @@ -110,7 +110,12 @@ def test_all_combinations(self): for i, parameter_combination in enumerate(parameter_combinations): auto = AutoSklearnClassifier(parameters=parameter_combination) X_train, Y_train, X_test, Y_test = self.get_iris() - auto = auto.fit(X_train, Y_train) + try: + auto = auto.fit(X_train, Y_train) + except Exception as e: + print parameter_combination + print (parameter_combination['random_forest:max_features'] * X_train.shape[1]) + raise e predictions = auto.predict(X_test) accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 6f69a8764c..8576b5c5be 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -8,6 +8,8 @@ import sklearn.datasets import sklearn.decomposition +import sklearn.ensemble +import sklearn.svm from AutoSklearn.autosklearn import AutoSklearnClassifier from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm @@ -17,6 +19,9 @@ from AutoSklearn.util import NoModelException class TestAutoSKlearnClassifier(unittest.TestCase): + # TODO: test for both possible ways to initialize AutoSklearn + # parameters and other... + def get_iris(self): iris = sklearn.datasets.load_iris() X = iris.data @@ -63,20 +68,41 @@ def test_init_unknown_classifier(self): def test_init_parameters_as_dict_or_as_keywords(self): pass - def test_fit_iris(self): - auto = AutoSklearnClassifier("liblinear", None) + def test_predict_iris(self): + auto = AutoSklearnClassifier(parameters={"classifier": "liblinear", + "preprocessing": None}) X_train, Y_train, X_test, Y_test = self.get_iris() auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) self.assertIsInstance(auto, AutoSklearnClassifier) self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) + self.assertIsInstance(auto._estimator.estimator, sklearn.svm.LinearSVC) + self.assertAlmostEqual(accuracy, 1.0) - def test_predict_iris(self): - auto = AutoSklearnClassifier("liblinear", None) + def test_predict_svm(self): + auto = AutoSklearnClassifier(parameters={"classifier": "libsvm_svc", + "preprocessing": None}) X_train, Y_train, X_test, Y_test = self.get_iris() auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertAlmostEqual(accuracy, 1.0) + self.assertIsInstance(auto, AutoSklearnClassifier) + self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) + self.assertIsInstance(auto._estimator.estimator, sklearn.svm.SVC) + self.assertAlmostEqual(accuracy, 0.959999999999) + + def test_predict_iris_rf(self): + auto = AutoSklearnClassifier(parameters={"classifier": "random_forest", + "preprocessing": None}) + X_train, Y_train, X_test, Y_test = self.get_iris() + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) + self.assertIsInstance(auto, AutoSklearnClassifier) + self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) + self.assertIsInstance(auto._estimator.estimator, sklearn.ensemble.RandomForestClassifier) + self.assertAlmostEqual(accuracy, 0.959999999999) def test_fit_with_preproc(self): auto = AutoSklearnClassifier("liblinear", "pca") @@ -102,11 +128,29 @@ def test_specify_hyperparameters(self): "random_forest:max_features": 1.0}) X_train, Y_train, X_test, Y_test = self.get_iris() auto = auto.fit(X_train, Y_train) + self.assertIsNotNone(auto._preprocessor) + self.assertIsNotNone(auto._preprocessor.preprocessor) + self.assertIsNotNone(auto._estimator) + self.assertIsNotNone(auto._estimator.estimator) predictions = auto.predict(X_test) accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertAlmostEqual(accuracy, 0.939999999) + self.assertAlmostEqual(accuracy, 0.92) self.assertEqual(auto._estimator.estimator.n_estimators, 1) + def test_specify_unknown_hyperparameters(self): + self.assertRaisesRegexp(ValueError, + "Parameter random_forest:blablabla is unknown.", + AutoSklearnClassifier, random_state=1, + parameters={"classifier": "random_forest", + "preprocessing": "pca", + "random_forest:blablabla": 1}) + self.assertRaisesRegexp(ValueError, + "Parameter pca:blablabla is unknown.", + AutoSklearnClassifier, random_state=1, + parameters={"classifier": "random_forest", + "preprocessing": "pca", + "pca:blablabla": 1}) + def test_get_hyperparameter_search_space(self): auto = AutoSklearnClassifier(None, None) space = auto.get_hyperparameter_search_space() From 7d074e24d0934d84b1d450b73333ad488f9f0a99 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 1 Jul 2014 11:28:52 +0000 Subject: [PATCH 003/352] setup.py edited online with Bitbucket --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7440c2146e..612359ecbc 100644 --- a/setup.py +++ b/setup.py @@ -11,5 +11,5 @@ author_email="feurerm@informatik.uni-freiburg.de", license="BSD", platforms=['Linux'], - classifiers=[] + classifiers=[], url="github.com/mfeurer/autosklearn") From 743d3edd535dcd9a87ec41cffbc0feb524a63f76 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 6 Oct 2014 16:51:10 +0200 Subject: [PATCH 004/352] Add a basic documentation --- .gitignore | 1 + AutoSklearn/__init__.py | 6 +- AutoSklearn/autosklearn.py | 43 +-- AutoSklearn/components/__init__.py | 5 +- .../components/classification/liblinear.py | 6 +- .../components/classification/libsvm_svc.py | 5 + AutoSklearn/components/preprocessor_base.py | 2 +- Makefile | 177 ++++++++++++ README.md | 4 + make.bat | 242 ++++++++++++++++ setup.py | 3 +- source/api.rst | 22 ++ source/components.rst | 28 ++ source/conf.py | 265 ++++++++++++++++++ source/extending_AutoSklearn.rst | 4 + source/first_steps.rst | 26 ++ source/index.rst | 26 ++ source/installation.rst | 3 + source/introduction.rst | 10 + tests/test_autosklearn.py | 6 + 20 files changed, 858 insertions(+), 26 deletions(-) create mode 100644 Makefile create mode 100644 make.bat create mode 100644 source/api.rst create mode 100644 source/components.rst create mode 100644 source/conf.py create mode 100644 source/extending_AutoSklearn.rst create mode 100644 source/first_steps.rst create mode 100644 source/index.rst create mode 100644 source/installation.rst create mode 100644 source/introduction.rst diff --git a/.gitignore b/.gitignore index ec789f7a40..6427e91061 100644 --- a/.gitignore +++ b/.gitignore @@ -2,5 +2,6 @@ *~ .idea dist/ +build/ AutoSklearn.egg-info diff --git a/AutoSklearn/__init__.py b/AutoSklearn/__init__.py index 8f0ce6cb7c..e87ea67e5e 100644 --- a/AutoSklearn/__init__.py +++ b/AutoSklearn/__init__.py @@ -1 +1,5 @@ -__author__ = 'feurerm' +"""AutoSklearn provides a search space covering a (work in progress) huge +part of the scikit-learn models and the possibility to evaluate them. +Together with a hyperparameter optimization package, AutoSklearn solves +the Combined algorithm selection and Hyperparameter optimization problem +(CASH).""" \ No newline at end of file diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 2a338c07eb..2110cbf25f 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -12,15 +12,7 @@ task_types = set(["classification"]) class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): - """AutoSklearn - - AutoSklearn provides a search space covering a (work in progress) huge - part of the scikit-learn models and the possibility to evaluate them. - Together with a hyperparameter optimization package, AutoSklearn solves - the Combined algorithm selection and Hyperparameter optimization problem - (CASH). - - This class implements the classification task. It can perform + """This class implements the classification task. It can perform preprocessing. It can render a search space for all known classification and preprocessing problems. @@ -50,6 +42,7 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): Examples -------- + """ def __init__(self, classifier=None, @@ -112,17 +105,24 @@ def __init__(self, else: self.random_state = check_random_state(random_state) - self._estimator_class = self._available_classifiers.get(classifier) - if classifier is not None and self._estimator_class is None: - raise KeyError("The classifier %s is not in the list " - "of classifiers found on this system: %s" % - (classifier, self._available_classifiers)) + if classifier is not None and 'name' in classifier: + self._estimator_class = self._available_classifiers.get(classifier['name']) + if self._estimator_class is None: + raise KeyError("The classifier %s is not in the list " + "of classifiers found on this system: %s" % + (classifier, self._available_classifiers)) + else: + self._estimator_class = None + + if preprocessor is not None and 'name' in preprocessor: + self._preprocessor_class = self._available_preprocessors.get(preprocessor['name']) + if self._preprocessor_class is None: + raise KeyError("The preprocessor %s is not in the list " + "of preprocessors found on this system: %s" % + (preprocessor, self._available_preprocessors)) + else: + self._preprocessor_class = None - self._preprocessor_class = self._available_preprocessors.get(preprocessor) - if preprocessor is not None and self._preprocessor_class is None: - raise KeyError("The preprocessor %s is not in the list " - "of preprocessors found on this system: %s" % - (preprocessor, self._available_preprocessors)) @@ -134,8 +134,9 @@ def fit(self, X, Y): raise NoModelException(self, "fit(X, Y)") # Extract Hyperparameters from the parameters dict... - space = self._estimator_class.get_hyperparameter_search_space() - name = space["name"] + #space = self._estimator_class.get_hyperparameter_search_space() + space = self._estimator_class.get_all_accepted_hyperparameter_names() + name = self._estimator_class.get_hyperparameter_search_space()['name'] parameters = {} for key in space: diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py index 8f0ce6cb7c..7f952608f0 100644 --- a/AutoSklearn/components/__init__.py +++ b/AutoSklearn/components/__init__.py @@ -1 +1,4 @@ -__author__ = 'feurerm' +"""AutoSklearn can be easily extended with new classification and +preprocessing methods. At import time, AutoSklearn checks the directory +``AutoSklearn/components/classification`` for classification algorithms and +``AutoSklearn/components/preprocessing`` for preprocessing algorithms.""" \ No newline at end of file diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 4936cf9a7f..fee6204d7e 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -16,10 +16,14 @@ def __init__(self, penalty="l2", loss="l2", C=1.0, LOG2_C=None, random_state=Non def fit(self, X, Y): if self.LOG2_C is not None: + self.LOG2_C = float(self.LOG2_C) self.C = 2 ** self.LOG2_C + + self.C = float(self.C) self.estimator = sklearn.svm.LinearSVC(penalty=self.penalty, loss=self.loss, C=self.C, - random_state=self.random_state) + random_state=self.random_state, + dual=False) self.estimator.fit(X, Y) def predict(self, X): diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 868706159b..51df72321e 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -17,10 +17,15 @@ def __init__(self, C=1.0, gamma=0.0, def fit(self, X, Y): if self.LOG2_C is not None: + self.LOG2_C = float(self.LOG2_C) self.C = 2 ** self.LOG2_C if self.LOG2_gamma is not None: + self.LOG2_gamma = float(self.LOG2_gamma) self.gamma = 2 ** self.LOG2_gamma + self.C = float(self.C) + self.gamma = float(self.gamma) + print self.C, self.gamma self.estimator = sklearn.svm.SVC(C=self.C, gamma=self.gamma, random_state=self.random_state, cache_size=2000) diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 8c7c8a9ba5..f2fe77a1e1 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -17,7 +17,7 @@ def handles_non_binary_classes(self): def get_hyperparameter_search_space(self): raise NotImplementedError() - def get_all_accepted_hyperparameter_names(): + def get_all_accepted_hyperparameter_names(self): raise NotImplementedError() def fit(self, X, Y): diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..4eaa687bce --- /dev/null +++ b/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/AutoSklearn.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/AutoSklearn.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/AutoSklearn" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/AutoSklearn" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/README.md b/README.md index e69de29bb2..67655a16cc 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,4 @@ +### Building the documentation ### + + make html + firefox build/index.html \ No newline at end of file diff --git a/make.bat b/make.bat new file mode 100644 index 0000000000..cceba6a6dd --- /dev/null +++ b/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\AutoSklearn.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\AutoSklearn.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/setup.py b/setup.py index 7440c2146e..d86f981208 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,8 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["scikit_learn==0.14.1"], + install_requires=["scikit_learn==0.14.1", + "numpydoc"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", diff --git a/source/api.rst b/source/api.rst new file mode 100644 index 0000000000..91a9593f16 --- /dev/null +++ b/source/api.rst @@ -0,0 +1,22 @@ +.. _api: + +API +*** + +Main modules +============ + +.. autoclass:: AutoSklearn.autosklearn.AutoSklearnClassifier + :members: + +.. autoclass:: AutoSklearn.autosklearn.AutoSklearnRegressor + :members: + +Extension Interfaces +==================== + +.. autoclass:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm + :members: + +.. autoclass:: AutoSklearn.components.preprocessor_base.AutoSklearnPreprocessingAlgorithm + :members: diff --git a/source/components.rst b/source/components.rst new file mode 100644 index 0000000000..3bca511491 --- /dev/null +++ b/source/components.rst @@ -0,0 +1,28 @@ +.. _components: + +Components +********** + +Classification +============== + +A list of all classification algorithms considered in the AutoSklearn search space. + +.. autoclass:: AutoSklearn.components.classification.liblinear.LibLinear_SVC + :members: + +.. autoclass:: AutoSklearn.components.classification.libsvm_svc.LibSVM_SVC + :members: + +.. autoclass:: AutoSklearn.components.classification.random_forest.RandomForest + :members: + +Regression +========== + +Currently there is no AutoSklearnRegressor. + +Preprocessing +============= + +.. autoclass:: AutoSklearn.components.preprocessing.pca.PCA diff --git a/source/conf.py b/source/conf.py new file mode 100644 index 0000000000..6868597490 --- /dev/null +++ b/source/conf.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +# +# AutoSklearn documentation build configuration file, created by +# sphinx-quickstart on Mon Oct 6 11:12:25 2014. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.doctest', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'numpydoc', # Important for get headings like Parameters... +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'AutoSklearn' +copyright = u'2014, Matthias Feurer' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.0.1dev' +# The full version, including alpha/beta/rc tags. +release = '0.0.1dev' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'AutoSklearndoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'AutoSklearn.tex', u'AutoSklearn Documentation', + u'Matthias Feurer', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'autosklearn', u'AutoSklearn Documentation', + [u'Matthias Feurer'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'AutoSklearn', u'AutoSklearn Documentation', + u'Matthias Feurer', 'AutoSklearn', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/source/extending_AutoSklearn.rst b/source/extending_AutoSklearn.rst new file mode 100644 index 0000000000..5469691413 --- /dev/null +++ b/source/extending_AutoSklearn.rst @@ -0,0 +1,4 @@ +Extending AutoSklearn +********************* + +.. automodule:: AutoSklearn.components diff --git a/source/first_steps.rst b/source/first_steps.rst new file mode 100644 index 0000000000..5be649844c --- /dev/null +++ b/source/first_steps.rst @@ -0,0 +1,26 @@ +First Steps with AutoSklearn +**************************** + +This example demonstrates how to get the whole search space covered by +AutoSklearn, feed it to the random search algorithm implemented by the hyperopt +package and then train a classifier with a random configuration on the iris +dataset. + + >>> from AutoSklearn.autosklearn import AutoSklearnClassifier + >>> import sklearn.datasets + >>> import sklearn.metrics + >>> import numpy as np + >>> import hyperopt + >>> iris = sklearn.datasets.load_iris() + >>> X = iris.data + >>> Y = iris.target + >>> indices = np.arange(X.shape[0]) + >>> np.random.shuffle(indices) + >>> auto = AutoSklearnClassifier() + >>> search_space = auto.get_hyperparameter_search_space() + >>> configuration = hyperopt.pyll.stochastic.sample(search_space) + >>> auto = AutoSklearnClassifier(classifier=configuration['classifier'], preprocessor=configuration['preprocessing']) + >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) + >>> predictions = auto.predict(X[indices[100:]]) + >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) + diff --git a/source/index.rst b/source/index.rst new file mode 100644 index 0000000000..fbef0eb019 --- /dev/null +++ b/source/index.rst @@ -0,0 +1,26 @@ +.. AutoSklearn documentation master file, created by + sphinx-quickstart on Mon Oct 6 11:12:25 2014. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to AutoSklearn's documentation! +======================================= + +Contents: + +.. toctree:: + :maxdepth: 2 + + introduction + installation + first_steps + extending_AutoSklearn + + +Indices and Tables +================== + +* :ref:`API ` +* :ref:`Components ` +* :ref:`search` + diff --git a/source/installation.rst b/source/installation.rst new file mode 100644 index 0000000000..0d405ab144 --- /dev/null +++ b/source/installation.rst @@ -0,0 +1,3 @@ +Install AutoSklearn +******************* + diff --git a/source/introduction.rst b/source/introduction.rst new file mode 100644 index 0000000000..67cbdd072b --- /dev/null +++ b/source/introduction.rst @@ -0,0 +1,10 @@ +Introduction to AutoSklearn +*************************** + +What is AutoSklearn? +==================== + +.. automodule:: AutoSklearn + +Get involved +============ diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 8576b5c5be..b423cf816c 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -64,6 +64,12 @@ def test_init_unknown_classifier(self): self.assertRaises(KeyError, AutoSklearnClassifier, "qufrpdvltromeaiudtroembdtaiubo", None) + def test_init_unknown_parameter(self): + self.assertRaises(KeyError, AutoSklearnClassifier, + None, None,parameters={"classifier": "liblinear", + "preprocessing": None, + "libsvm_svc:gamma": 0.025}) + @unittest.skip("test_init_parameters_as_dict_or_as_keywords Not yet Implemented") def test_init_parameters_as_dict_or_as_keywords(self): pass From 899f12b4a9bfa87c30aa59d65a8728709eabd0b3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 6 Oct 2014 14:52:46 +0000 Subject: [PATCH 005/352] README.md edited online with Bitbucket --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 67655a16cc..6a1ab0aee6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -### Building the documentation ### - - make html +### Build the documentation ### + + make html firefox build/index.html \ No newline at end of file From f04db962a82d984a0bed4427d7b0c5df38264ae3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 6 Oct 2014 14:54:04 +0000 Subject: [PATCH 006/352] README.md edited online with Bitbucket --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6a1ab0aee6..f146b14da4 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ ### Build the documentation ### make html - firefox build/index.html \ No newline at end of file + firefox `pwd`/build/html/index.html \ No newline at end of file From d7572b18adab160f23c4d9056e4b65ed67efb67e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 6 Oct 2014 15:29:50 +0000 Subject: [PATCH 007/352] README.md edited online with Bitbucket --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f146b14da4..be2616d62f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,7 @@ -### Build the documentation ### +### Download and build the documentation ### + git clone https://bitbucket.org/mfeurer/autosklearn.git + cd autosklearn + python setup.py make html firefox `pwd`/build/html/index.html \ No newline at end of file From cdc094d5b74bf36d2e4302219bfe032b4e39aa23 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Oct 2014 13:20:44 +0200 Subject: [PATCH 008/352] Update (mostly) API documentation --- AutoSklearn/__init__.py | 11 +- AutoSklearn/autosklearn.py | 75 +++++++++- AutoSklearn/components/__init__.py | 11 +- AutoSklearn/components/classification_base.py | 128 +++++++++++++++++- AutoSklearn/components/preprocessor_base.py | 108 ++++++++++++++- setup.py | 3 +- source/api.rst | 5 +- source/conf.py | 4 + source/introduction.rst | 29 ++++ 9 files changed, 355 insertions(+), 19 deletions(-) diff --git a/AutoSklearn/__init__.py b/AutoSklearn/__init__.py index e87ea67e5e..bf27f9dd6d 100644 --- a/AutoSklearn/__init__.py +++ b/AutoSklearn/__init__.py @@ -1,5 +1,6 @@ -"""AutoSklearn provides a search space covering a (work in progress) huge -part of the scikit-learn models and the possibility to evaluate them. -Together with a hyperparameter optimization package, AutoSklearn solves -the Combined algorithm selection and Hyperparameter optimization problem -(CASH).""" \ No newline at end of file +"""AutoSklearn is a python package to solve the Combined Algorithm Selection and +Hyperparameter Optimization problem (CASH) for the scikit-learn package. + +AutoSklearn provides a configuration space spanning a huge part of the +scikit-learn models. This configuration space can be searched by one of the +hyperparameter optimization algorithms in the HPOlib.""" \ No newline at end of file diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 2110cbf25f..61ed3121e6 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -13,21 +13,40 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): """This class implements the classification task. It can perform - preprocessing. It can render a search space for all known classification - and preprocessing problems. + preprocessing. It can render a search space including all known + classification and preprocessing algorithms. Contrary to the sklearn API it is not possible to enumerate the possible parameters in the __init__ function because we only know the available classifiers at runtime. For this reason the user must specifiy the parameters via set_params. + The user can specify the hyperparameters of the AutoSklearnClassifier + either by giving the classifier and the preprocessor argument or the + parameters argument. + Parameters ---------- + classifier: dict + A dictionary which contains at least the name of the classification + algorithm. It can also contain {parameter : value} pairs. + + preprocessor: dict + A dictionary which contains at least the name of the preprocessing + algorithm. It can also contain {parameter : value} pairs. + random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance - used by `np.random`. + used by `np.random`. + + parameters: dict + A dictionary which contains at least {'classifier' : name}. It can + also contain the classifiers hyperparameters in the form of {name + + ':hyperparametername' : value}. To also use a preprocessing algorithm + you must specify {'preprocessing': name}, then you can also add its + hyperparameters in the form {name + ':hyperparametername' : value}. Attributes ---------- @@ -123,13 +142,32 @@ def __init__(self, else: self._preprocessor_class = None - - - def fit(self, X, Y): + """Fit the selected algorithm to the training data. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + Targets + + Returns + ------- + self : returns an instance of self. + + Raises + ------ + NoModelException + NoModelException is raised if fit() is called without specifying + a classification algorithm first. + """ # TODO: perform input validation # TODO: look if X.shape[0] == y.shape[0] # TODO: check if the hyperparameters have been set... + # TODO: this is an example of the antipattern of not properly + # initializing a class in the init function! if self._estimator_class is None: raise NoModelException(self, "fit(X, Y)") @@ -170,6 +208,17 @@ def fit(self, X, Y): return self def predict(self, X): + """Predict the classes using the selected model.. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + C : array, shape = (n_samples,) + Returns the predicted values""" + # TODO check if fit() was called before... if self._preprocessor is not None: X = self._preprocessor.transform(X) self._validate_input_X(X) @@ -221,14 +270,26 @@ def _validate_input_Y(self, Y): raise NotImplementedError() def add_model_class(self, model): + """ + Raises + ------ + NotImplementedError + """ raise NotImplementedError() def get_hyperparameter_search_space(self): + """Return the configuration space for the CASH problem. + + Returns + ------- + cs : dict + A dictionary with all hyperparameters as hyperopt.pyll objects. + + """ classifiers = {} for name in self._available_classifiers: classifier_parameters = self._available_classifiers[name]\ .get_hyperparameter_search_space() - print classifier_parameters classifier_parameters["name"] = name classifiers["classifier:" + name] = classifier_parameters diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py index 7f952608f0..dfe14086c0 100644 --- a/AutoSklearn/components/__init__.py +++ b/AutoSklearn/components/__init__.py @@ -1,4 +1,13 @@ """AutoSklearn can be easily extended with new classification and preprocessing methods. At import time, AutoSklearn checks the directory ``AutoSklearn/components/classification`` for classification algorithms and -``AutoSklearn/components/preprocessing`` for preprocessing algorithms.""" \ No newline at end of file +``AutoSklearn/components/preprocessing`` for preprocessing algorithms. To be +found, the algorithm must be provide a class implementing one of the given +interfaces. + +Classification +============== + + +Preprocessing +=============""" \ No newline at end of file diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 13749dec2a..83a0c06ae8 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -1,32 +1,158 @@ class AutoSklearnClassificationAlgorithm(object): + """Provide an abstract interface for classification algorithms in + AutoSklearn. + + Make a subclass of this and put it into the directory + `AutoSklearn/components/classification` to make it available.""" def __init__(self): self.estimator = None def handles_missing_values(self): + """Can the underlying algorithm handle missing values itself? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles missing values itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Missing values are not supported. + """ raise NotImplementedError() def handles_nominal_features(self): + """Can the underlying algorithm handle nominal features? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles nominal values itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Nominal values are not + supported. It is suggested to perform a OneHotEncoding on them. + """ raise NotImplementedError() def handles_numeric_features(self): + """Can the underlying algorithm handle numeric features itself? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles numeric features itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Every algorithm support numeric + features. + """ raise NotImplementedError() def handles_non_binary_classes(self): + """Can the underlying algorithm handle multiclass problems itself? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles multiclass problems itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Multiclass problems are + supported by every algorithm. + """ raise NotImplementedError() def get_hyperparameter_search_space(self): + """Return the configuration space of this classifier. + + Returns + ------- + cs : dict + A dictionary with all hyperparameters as hyperopt.pyll objects. + + """ raise NotImplementedError() def get_all_accepted_hyperparameter_names(self): + """Return the name of all hyperparameters accepted by this classifier. + + This must not be the same as the list returned by + :meth:`get_hyperparameter_search_space`. An example can be found in + the components for the linear svm and the libsvm, where it is also + possible to specifiy the parameters as the exponent to the base two. + + This list is used by the + :class:`AutoSklearn.autosklearn.AutoSklearnClassifier` to check if it + is called with illegal hyperparameters. + + Returns + ------- + names : A list of accepted hyperparameter names. + """ raise NotImplementedError() - def fit(self, X, Y): + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" raise NotImplementedError() def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + C : array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" raise NotImplementedError() def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ return self.estimator def __str__(self): diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index f2fe77a1e1..a21221a401 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -3,30 +3,134 @@ def __init__(self): self.estimator = None def handles_missing_values(self): + """Can the underlying algorithm handle missing values itself? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles missing values itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Missing values are not supported. + """ raise NotImplementedError() def handles_nominal_features(self): + """Can the underlying algorithm handle nominal features? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles nominal values itself, + otherwise False. + + Note + ---- + + This feature is not implemented yet. Nominal values are not + supported. It is suggested to perform a OneHotEncoding on them. + """ raise NotImplementedError() def handles_numeric_features(self): - raise NotImplementedError() + """Can the underlying algorithm handle numeric features itself? + + Returns + ------- + flag : Boolean + True if the underlying algorithm handles numeric features itself, + otherwise False. + + Note + ---- - def handles_non_binary_classes(self): + This feature is not implemented yet. Every algorithm support numeric + features. + """ raise NotImplementedError() def get_hyperparameter_search_space(self): + """Return the configuration space of this preprocessing algorithm. + + Returns + ------- + cs : dict + A dictionary with all hyperparameters as hyperopt.pyll objects. + + """ raise NotImplementedError() def get_all_accepted_hyperparameter_names(self): + """Return the name of all hyperparameters accepted by this preprocessing + algorithm. + + This must not be the same as the list returned by + :meth:`get_hyperparameter_search_space`. An example can be found in + the components for the linear svm and the libsvm, where it is also + possible to specifiy the parameters as the exponent to the base two. + + This list is used by the + :class:`AutoSklearn.autosklearn.AutoSklearnClassifier` to check if it + is called with illegal hyperparameters. + + Returns + ------- + names : A list of accepted hyperparameter names. + """ raise NotImplementedError() def fit(self, X, Y): + """The fit function calls the fit function of the underlying + scikit-learn preprocessing algorithm and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" raise NotImplementedError() def transform(self, X): + """The predict function calls the transform function of the + underlying scikit-learn model and returns the transformed array. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + X : array + Return the transformed training data + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" raise NotImplementedError() def get_preprocessor(self): + """Return the underlying preprocessor object. + + Returns + ------- + preprocessor : the underlying preprocessor object + """ return self.estimator def __str__(self): diff --git a/setup.py b/setup.py index de84a7c7ac..a150a75bc1 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,8 @@ version="0.1dev", packages=setuptools.find_packages(), install_requires=["scikit_learn==0.14.1", - "numpydoc"], + "numpydoc", + "Sphinx"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", diff --git a/source/api.rst b/source/api.rst index 91a9593f16..6436324149 100644 --- a/source/api.rst +++ b/source/api.rst @@ -16,7 +16,8 @@ Extension Interfaces ==================== .. autoclass:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm - :members: + + .. automethod:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm.__init__ .. autoclass:: AutoSklearn.components.preprocessor_base.AutoSklearnPreprocessingAlgorithm - :members: + diff --git a/source/conf.py b/source/conf.py index 6868597490..589bcbf6c2 100644 --- a/source/conf.py +++ b/source/conf.py @@ -37,6 +37,10 @@ 'numpydoc', # Important for get headings like Parameters... ] +autosummary_generate = True +autodoc_default_flags = ['members', 'inherited-members', 'undoc-members', + 'show-inheritance'] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/source/introduction.rst b/source/introduction.rst index 67cbdd072b..50458fa7a7 100644 --- a/source/introduction.rst +++ b/source/introduction.rst @@ -8,3 +8,32 @@ What is AutoSklearn? Get involved ============ + +License +======= +We chose to license AutoSklearn the same way as scikit-learn. It is available under the open source and commercially usable 3-clause BSD license. + +Copyright (c) 2014, Matthias Feurer +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. From 0b5c2642818b0c8a26067ae60f14578f380588c7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 10 Oct 2014 16:16:51 +0200 Subject: [PATCH 009/352] More documentation --- AutoSklearn/autosklearn.py | 141 +++++++++++++----- .../components/classification/libsvm_svc.py | 1 - AutoSklearn/components/preprocessor_base.py | 4 +- LICENSE.txt | 24 +++ README.md | 7 +- setup.py | 4 +- 6 files changed, 135 insertions(+), 46 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 61ed3121e6..c7536a78d6 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -69,31 +69,87 @@ def __init__(self, random_state=None, parameters=None): - # Test that either the classifier or the parameters - if classifier is not None: - assert parameters is None - # TODO: Somehow assemble a parameters dictionary - - if preprocessor is not None: - assert classifier is not None - assert parameters is None - - if parameters is not None: - assert classifier is None - assert preprocessor is None - classifier = parameters.get("classifier") - preprocessor = parameters.get("preprocessing") - if preprocessor == "None": - preprocessor = None - self.random_state = random_state self._estimator = None self._preprocessor = None - self.parameters = parameters if parameters is not None else {} - # TODO: add valid parameters to the parameters dictionary + self._available_classifiers = classification_components._classifiers + self._available_preprocessors = preprocessing_components._preprocessors + self.parameters = parameters + + # One can only use the parameters dict if the classifier dict and the + # preprocessor dict are not used. + if parameters is not None: + if classifier is not None: + raise ValueError("Illegal Arguments: You are not allowed to " + "use both parameters and classifier.") + if preprocessor is not None: + raise ValueError("Illegal Arguments: You are not allowed to " + "use both parameters and preprocessor.") + + if not isinstance(parameters, dict): + raise ValueError("Illegal Arguments: The argument preprocessor " + "must be a dictionary.") + + if 'classifier' not in self.parameters: + raise ValueError("Illegal Arguments: You must specify a " + "classification algorithm.") + + if 'preprocessing' in self.parameters and \ + self.parameters['preprocessing'] in [None, "None"]: + del self.parameters['preprocessing'] + + else: + self.parameters = {} + + # Test that either the classifier or the parameters are specified + if classifier is not None: + if not isinstance(classifier, dict): + raise ValueError("Illegal Arguments: The argument classifier " + "must be a dictionary.") + if 'name' not in classifier: + raise ValueError("Illegal Arguments: The dictionary holding " + "the parameters for the classification " + "algorithm must have a key 'name'.") + + # Add all hyperparameters to the parameters dict + classifier_name = classifier['name'] + self.parameters['classifier'] = classifier_name + for key in classifier: + if key == 'name': + continue + + self.parameters[classifier_name + ":" + key] = classifier[key] + + # If there is a preprocessor, there must also be a classifier, + # but no parameters dictionary + if preprocessor is not None: + if classifier is None: + raise ValueError("Illegal Arguments: You must specifiy a " + "classification algorithm if you specifiy a " + "preprocessing algorithm.") + if not isinstance(preprocessor, dict): + raise ValueError("Illegal Arguments: The argument preprocessor " + "must be a dictionary.") + + # Only continue if the dictionary is populated + if len(preprocessor) != 0: + if 'name' not in preprocessor: + raise ValueError("Illegal Arguments: The dictionary holding " + "the parameters for the preprocessing " + "algorithm must have a key 'name'.") + + # Add all hyperparameters to the parameters dict + preprocessor_name = preprocessor['name'] + self.parameters['preprocessing'] = preprocessor_name + for key in preprocessor: + if key == 'name': + continue + + self.parameters[preprocessor_name + ":" + key] = preprocessor[key] + # TODO: make sure that there are no duplicate classifiers - self._available_classifiers = classification_components._classifiers + # Get all available classifiers and their hyperparameters classifier_parameters = set() for _classifier in self._available_classifiers: accepted_hyperparameter_names = self._available_classifiers[_classifier] \ @@ -102,7 +158,7 @@ def __init__(self, for key in accepted_hyperparameter_names: classifier_parameters.add("%s:%s" % (name, key)) - self._available_preprocessors = preprocessing_components._preprocessors + # Get all available preprocessors and their hyperparameters preprocessor_parameters = set() for _preprocessor in self._available_preprocessors: accepted_hyperparameter_names = self._available_preprocessors[_preprocessor] \ @@ -111,6 +167,31 @@ def __init__(self, for key in accepted_hyperparameter_names: preprocessor_parameters.add("%s:%s" % (name, key)) + # Check if the specified classifier is a legal classifier + if 'classifier' in self.parameters: + self._estimator_class = self._available_classifiers.get( + self.parameters['classifier']) + if self._estimator_class is None: + raise KeyError("The classifier %s is not in the list " + "of classifiers found on this system: %s" % + (self.parameters['classifier'], + self._available_classifiers)) + else: + self._estimator_class = None + + # Check if the specified preprocessor is a legal one + if 'preprocessing' in self.parameters: + self._preprocessor_class = self._available_preprocessors.get( + self.parameters['preprocessing']) + if self._preprocessor_class is None: + raise KeyError("The preprocessor %s is not in the list " + "of preprocessors found on this system: %s" % + (self.parameters['preprocessing'], + self._available_preprocessors)) + else: + self._preprocessor_class = None + + # Check if all hyperparameters specified are valid hyperparameters for parameter in self.parameters: if parameter not in classifier_parameters and \ parameter not in preprocessor_parameters and \ @@ -124,24 +205,6 @@ def __init__(self, else: self.random_state = check_random_state(random_state) - if classifier is not None and 'name' in classifier: - self._estimator_class = self._available_classifiers.get(classifier['name']) - if self._estimator_class is None: - raise KeyError("The classifier %s is not in the list " - "of classifiers found on this system: %s" % - (classifier, self._available_classifiers)) - else: - self._estimator_class = None - - if preprocessor is not None and 'name' in preprocessor: - self._preprocessor_class = self._available_preprocessors.get(preprocessor['name']) - if self._preprocessor_class is None: - raise KeyError("The preprocessor %s is not in the list " - "of preprocessors found on this system: %s" % - (preprocessor, self._available_preprocessors)) - else: - self._preprocessor_class = None - def fit(self, X, Y): """Fit the selected algorithm to the training data. diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 51df72321e..942134713d 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -25,7 +25,6 @@ def fit(self, X, Y): self.C = float(self.C) self.gamma = float(self.gamma) - print self.C, self.gamma self.estimator = sklearn.svm.SVC(C=self.C, gamma=self.gamma, random_state=self.random_state, cache_size=2000) diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index a21221a401..782a87e55f 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -1,6 +1,6 @@ class AutoSklearnPreprocessingAlgorithm(object): def __init__(self): - self.estimator = None + self.preprocessor = None def handles_missing_values(self): """Can the underlying algorithm handle missing values itself? @@ -131,7 +131,7 @@ def get_preprocessor(self): ------- preprocessor : the underlying preprocessor object """ - return self.estimator + return self.preprocessor def __str__(self): raise NotImplementedError() diff --git a/LICENSE.txt b/LICENSE.txt index e69de29bb2..e1f17e32cb 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -0,0 +1,24 @@ +Copyright (c) 2014, Matthias Feurer +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/README.md b/README.md index be2616d62f..1b6629760e 100644 --- a/README.md +++ b/README.md @@ -3,5 +3,10 @@ git clone https://bitbucket.org/mfeurer/autosklearn.git cd autosklearn python setup.py + +To build the documentation you also need the packages `sphinx` and `numpydoc`. + + pip install sphinx + pip install numpydoc make html - firefox `pwd`/build/html/index.html \ No newline at end of file + firefox `pwd`/build/html/index.html diff --git a/setup.py b/setup.py index a150a75bc1..612359ecbc 100644 --- a/setup.py +++ b/setup.py @@ -5,9 +5,7 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["scikit_learn==0.14.1", - "numpydoc", - "Sphinx"], + install_requires=["scikit_learn==0.14.1"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", From 9a4cff790de6462bb5f0e14f11148e4e2d341a67 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 7 Nov 2014 15:43:23 +0100 Subject: [PATCH 010/352] Update setup and readme --- README.md | 9 +++++++-- setup.py | 8 +++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 1b6629760e..c91c04ffa2 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,13 @@ git clone https://bitbucket.org/mfeurer/autosklearn.git cd autosklearn - python setup.py - + python setup.py install + +Installation with `pip` + + pip install numpy scipy scikit-learn==0.15.1 numpydoc sphinx + pip install --editable git+https://bitbucket.org/mfeurer/autosklearn#egg=AutoSklearn + To build the documentation you also need the packages `sphinx` and `numpydoc`. pip install sphinx diff --git a/setup.py b/setup.py index 612359ecbc..800240ad72 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,13 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["scikit_learn==0.14.1"], + install_requires=["numpy", + "scipy", + "scikit_learn==0.15.1", + "HPOlibConfigSpace==0.1dev"], + dependency_links=[ + 'git+https://github.com/mfeurer/HPOlibConfigSpace/'], + #'/tarball'/master#egg=HPOlibConfigSpace-0.1dev'], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", From 0578f052f2d447343eba1a03fd8d14ce11e98d17 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 7 Nov 2014 14:59:08 +0000 Subject: [PATCH 011/352] setup.py edited online with Bitbucket --- setup.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.py b/setup.py index 800240ad72..e0bcda535d 100644 --- a/setup.py +++ b/setup.py @@ -9,9 +9,6 @@ "scipy", "scikit_learn==0.15.1", "HPOlibConfigSpace==0.1dev"], - dependency_links=[ - 'git+https://github.com/mfeurer/HPOlibConfigSpace/'], - #'/tarball'/master#egg=HPOlibConfigSpace-0.1dev'], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", From 3d534acb3896b8c100d43711fbe244c7a56092a5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 7 Nov 2014 15:20:04 +0000 Subject: [PATCH 012/352] setup.py edited online with Bitbucket --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e0bcda535d..5aafddfd31 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,8 @@ install_requires=["numpy", "scipy", "scikit_learn==0.15.1", - "HPOlibConfigSpace==0.1dev"], + #"HPOlibConfigSpace==0.1dev"], + "git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", From 8ba5c0ded9b4f0996dd2cd1fc06b58ab0c07cd65 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 7 Nov 2014 15:26:51 +0000 Subject: [PATCH 013/352] setup.py edited online with Bitbucket --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5aafddfd31..b35c8b9b50 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ "scipy", "scikit_learn==0.15.1", #"HPOlibConfigSpace==0.1dev"], - "git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace"], + "--editable git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", From 0afe0d9b9cfcdac0a51c62b17f4dc0752819065c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 7 Nov 2014 15:40:36 +0000 Subject: [PATCH 014/352] README.md edited online with Bitbucket --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c91c04ffa2..f264808c2f 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ ### Download and build the documentation ### + pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev git clone https://bitbucket.org/mfeurer/autosklearn.git cd autosklearn python setup.py install @@ -7,6 +8,7 @@ Installation with `pip` pip install numpy scipy scikit-learn==0.15.1 numpydoc sphinx + pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev pip install --editable git+https://bitbucket.org/mfeurer/autosklearn#egg=AutoSklearn To build the documentation you also need the packages `sphinx` and `numpydoc`. @@ -14,4 +16,4 @@ To build the documentation you also need the packages `sphinx` and `numpydoc`. pip install sphinx pip install numpydoc make html - firefox `pwd`/build/html/index.html + firefox `pwd`/build/html/index.html \ No newline at end of file From 35a9d4d6148f5933db0e3dfda23d8f0acf0403ba Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 14 Nov 2014 14:15:34 +0100 Subject: [PATCH 015/352] Replace hyperopt by HPOlibConfigSpace --- AutoSklearn/autosklearn.py | 347 ++++++++---------- AutoSklearn/components/__init__.py | 5 +- .../components/classification/liblinear.py | 57 ++- .../components/classification/libsvm_svc.py | 38 +- .../classification/random_forest.py | 50 ++- AutoSklearn/components/preprocessing/pca.py | 21 +- AutoSklearn/util.py | 118 ++---- source/first_steps.rst | 19 +- source/introduction.rst | 2 +- 9 files changed, 305 insertions(+), 352 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index c7536a78d6..0213899967 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -5,35 +5,33 @@ from sklearn.utils import check_random_state from sklearn.utils.validation import safe_asarray, assert_all_finite -from .components import classification as classification_components -from .components import preprocessing as preprocessing_components -from .util import NoModelException, hp_choice +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + InactiveHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from . import components as components +from .util import NoModelException task_types = set(["classification"]) class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): - """This class implements the classification task. It can perform - preprocessing. It can render a search space including all known + """This class implements the classification task. + + It implements a pipeline, which includes one preprocessing step and one + classification algorithm. It can render a search space including all known classification and preprocessing algorithms. Contrary to the sklearn API it is not possible to enumerate the possible parameters in the __init__ function because we only know the available classifiers at runtime. For this reason the user must - specifiy the parameters via set_params. - - The user can specify the hyperparameters of the AutoSklearnClassifier - either by giving the classifier and the preprocessor argument or the - parameters argument. + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. Parameters ---------- - classifier: dict - A dictionary which contains at least the name of the classification - algorithm. It can also contain {parameter : value} pairs. - - preprocessor: dict - A dictionary which contains at least the name of the preprocessing - algorithm. It can also contain {parameter : value} pairs. + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; @@ -41,17 +39,16 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): If None, the random number generator is the RandomState instance used by `np.random`. - parameters: dict - A dictionary which contains at least {'classifier' : name}. It can - also contain the classifiers hyperparameters in the form of {name + - ':hyperparametername' : value}. To also use a preprocessing algorithm - you must specify {'preprocessing': name}, then you can also add its - hyperparameters in the form {name + ':hyperparametername' : value}. - Attributes ---------- - _estimator : An underlying scikit-learn target model specified by a call to - set_parames + _estimator : The underlying scikit-learn classification model. This + variable is assigned after a call to the + :ref:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :ref:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. See also -------- @@ -63,142 +60,18 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): -------- """ - def __init__(self, - classifier=None, - preprocessor=None, - random_state=None, - parameters=None): + def __init__(self, configuration, random_state=None): - self.random_state = random_state - self._estimator = None - self._preprocessor = None - self._available_classifiers = classification_components._classifiers - self._available_preprocessors = preprocessing_components._preprocessors - self.parameters = parameters - - # One can only use the parameters dict if the classifier dict and the - # preprocessor dict are not used. - if parameters is not None: - if classifier is not None: - raise ValueError("Illegal Arguments: You are not allowed to " - "use both parameters and classifier.") - if preprocessor is not None: - raise ValueError("Illegal Arguments: You are not allowed to " - "use both parameters and preprocessor.") - - if not isinstance(parameters, dict): - raise ValueError("Illegal Arguments: The argument preprocessor " - "must be a dictionary.") - - if 'classifier' not in self.parameters: - raise ValueError("Illegal Arguments: You must specify a " - "classification algorithm.") - - if 'preprocessing' in self.parameters and \ - self.parameters['preprocessing'] in [None, "None"]: - del self.parameters['preprocessing'] + # TODO check sklearn version! - else: - self.parameters = {} - - # Test that either the classifier or the parameters are specified - if classifier is not None: - if not isinstance(classifier, dict): - raise ValueError("Illegal Arguments: The argument classifier " - "must be a dictionary.") - if 'name' not in classifier: - raise ValueError("Illegal Arguments: The dictionary holding " - "the parameters for the classification " - "algorithm must have a key 'name'.") - - # Add all hyperparameters to the parameters dict - classifier_name = classifier['name'] - self.parameters['classifier'] = classifier_name - for key in classifier: - if key == 'name': - continue + self.random_state = random_state + self.configuration = configuration - self.parameters[classifier_name + ":" + key] = classifier[key] - - # If there is a preprocessor, there must also be a classifier, - # but no parameters dictionary - if preprocessor is not None: - if classifier is None: - raise ValueError("Illegal Arguments: You must specifiy a " - "classification algorithm if you specifiy a " - "preprocessing algorithm.") - if not isinstance(preprocessor, dict): - raise ValueError("Illegal Arguments: The argument preprocessor " - "must be a dictionary.") - - # Only continue if the dictionary is populated - if len(preprocessor) != 0: - if 'name' not in preprocessor: - raise ValueError("Illegal Arguments: The dictionary holding " - "the parameters for the preprocessing " - "algorithm must have a key 'name'.") - - # Add all hyperparameters to the parameters dict - preprocessor_name = preprocessor['name'] - self.parameters['preprocessing'] = preprocessor_name - for key in preprocessor: - if key == 'name': - continue - - self.parameters[preprocessor_name + ":" + key] = preprocessor[key] - - - # TODO: make sure that there are no duplicate classifiers - # Get all available classifiers and their hyperparameters - classifier_parameters = set() - for _classifier in self._available_classifiers: - accepted_hyperparameter_names = self._available_classifiers[_classifier] \ - .get_all_accepted_hyperparameter_names() - name = self._available_classifiers[_classifier].get_hyperparameter_search_space()['name'] - for key in accepted_hyperparameter_names: - classifier_parameters.add("%s:%s" % (name, key)) - - # Get all available preprocessors and their hyperparameters - preprocessor_parameters = set() - for _preprocessor in self._available_preprocessors: - accepted_hyperparameter_names = self._available_preprocessors[_preprocessor] \ - .get_all_accepted_hyperparameter_names() - name = self._available_preprocessors[_preprocessor].get_hyperparameter_search_space()['name'] - for key in accepted_hyperparameter_names: - preprocessor_parameters.add("%s:%s" % (name, key)) - - # Check if the specified classifier is a legal classifier - if 'classifier' in self.parameters: - self._estimator_class = self._available_classifiers.get( - self.parameters['classifier']) - if self._estimator_class is None: - raise KeyError("The classifier %s is not in the list " - "of classifiers found on this system: %s" % - (self.parameters['classifier'], - self._available_classifiers)) - else: - self._estimator_class = None - - # Check if the specified preprocessor is a legal one - if 'preprocessing' in self.parameters: - self._preprocessor_class = self._available_preprocessors.get( - self.parameters['preprocessing']) - if self._preprocessor_class is None: - raise KeyError("The preprocessor %s is not in the list " - "of preprocessors found on this system: %s" % - (self.parameters['preprocessing'], - self._available_preprocessors)) - else: - self._preprocessor_class = None + cs = self.get_hyperparameter_search_space() + cs.check_configuration(configuration) - # Check if all hyperparameters specified are valid hyperparameters - for parameter in self.parameters: - if parameter not in classifier_parameters and \ - parameter not in preprocessor_parameters and \ - parameter not in ("preprocessing", "classifier", "name"): - print "Classifier parameters %s" % str(classifier_parameters) - print "Preprocessing parameters %s" % str(preprocessor_parameters) - raise ValueError("Parameter %s is unknown." % parameter) + self._estimator = None + self._preprocessor = None if random_state is None: self.random_state = check_random_state(1) @@ -211,7 +84,7 @@ def fit(self, X, Y): Parameters ---------- X : array-like, shape = (n_samples, n_features) - Training data + Training data. All values must be in the range [0,1]. y : array-like, shape = [n_samples] Targets @@ -231,39 +104,54 @@ def fit(self, X, Y): # TODO: check if the hyperparameters have been set... # TODO: this is an example of the antipattern of not properly # initializing a class in the init function! - if self._estimator_class is None: + # TODO: can this happen now that a configuration is specified at + # instantiation time + if "classifier" not in self.configuration: raise NoModelException(self, "fit(X, Y)") - # Extract Hyperparameters from the parameters dict... - #space = self._estimator_class.get_hyperparameter_search_space() - space = self._estimator_class.get_all_accepted_hyperparameter_names() - name = self._estimator_class.get_hyperparameter_search_space()['name'] + # Extract Hyperparameters from the configuration object + name = self.configuration["classifier"].value parameters = {} - for key in space: - if "%s:%s" % (name, key) in self.parameters: - parameters[key] = self.parameters["%s:%s" % (name, key)] - + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name.startswith( + name): + continue + if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + continue + + print instantiated_hyperparameter.hyperparameter.name + name_ = instantiated_hyperparameter.hyperparameter.name.\ + split(":")[1] + parameters[name_] = instantiated_hyperparameter.value + + print parameters random_state = check_random_state(self.random_state) - self._estimator = self._estimator_class(random_state=random_state, - **parameters) + self._estimator = components.classification_components._classifiers\ + [name](random_state=random_state, **parameters) self._validate_input_X(X) self._validate_input_Y(Y) - if self._preprocessor_class is not None: - # TODO: copy everything or not? - parameters = {} - preproc_space = self._preprocessor_class\ - .get_hyperparameter_search_space() - preproc_name = preproc_space["name"] + preprocessor = self.configuration['preprocessor'] + if preprocessor.value != "__None__": + preproc_name = preprocessor.value + preproc_params = {} + + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name\ + .startswith(preproc_name): + continue + if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + continue - for key in preproc_space: - if "%s:%s" % (preproc_name, key) in self.parameters: - parameters[key] = self.parameters["%s:%s" % (preproc_name, key)] + name_ = instantiated_hyperparameter.hyperparameter.name. \ + split(":")[1] + preproc_params[name_] = instantiated_hyperparameter.value - self._preprocessor = self._preprocessor_class( - random_state=random_state, **parameters) + print preproc_params + self._preprocessor = components.preprocessing_components.\ + _preprocessors[preproc_name](random_state=random_state, **preproc_params) self._preprocessor.fit(X, Y) X = self._preprocessor.transform(X) @@ -340,31 +228,90 @@ def add_model_class(self, model): """ raise NotImplementedError() - def get_hyperparameter_search_space(self): + @staticmethod + def get_hyperparameter_search_space(): """Return the configuration space for the CASH problem. Returns ------- - cs : dict - A dictionary with all hyperparameters as hyperopt.pyll objects. + cs : HPOlibConfigSpace.configuration_space.Configuration + The configuration space describing the AutoSklearnClassifier. """ - classifiers = {} - for name in self._available_classifiers: - classifier_parameters = self._available_classifiers[name]\ - .get_hyperparameter_search_space() - classifier_parameters["name"] = name - classifiers["classifier:" + name] = classifier_parameters - - preprocessors = {} - preprocessors[None] = {} - for name in self._available_preprocessors: - preprocessor_parameters = self._available_preprocessors[name]\ - .get_hyperparameter_search_space() - preprocessor_parameters["name"] = name - preprocessors["preprocessing:" + name] = preprocessor_parameters - return {"classifier": hp_choice("classifier", classifiers.values()), - "preprocessing": hp_choice("preprocessing", preprocessors.values())} + cs = ConfigurationSpace() + + available_classifiers = \ + components.classification_components._classifiers + available_preprocessors = \ + components.preprocessing_components._preprocessors + + classifier = CategoricalHyperparameter( + "classifier", [name for name in available_classifiers]) + cs.add_hyperparameter(classifier) + + for name in available_classifiers: + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + for parameter in available_classifiers[name].\ + get_hyperparameter_search_space().get_hyperparameters(): + parameter.name = "%s:%s" % (name, parameter.name) + cs.add_hyperparameter(parameter) + condition = EqualsCondition(parameter, classifier, name) + cs.add_condition(condition) + + for condition in available_classifiers[name]. \ + get_hyperparameter_search_space().get_conditions(): + dlcs = condition.get_descendent_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_condition(condition) + + for forbidden_clause in available_classifiers[name]. \ + get_hyperparameter_search_space().forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + preprocessor = CategoricalHyperparameter( + "preprocessor", [name for name in available_preprocessors] + [ + "__None__"]) + cs.add_hyperparameter(preprocessor) + for name in available_preprocessors: + for parameter in available_preprocessors[name].\ + get_hyperparameter_search_space().get_hyperparameters(): + parameter.name = "%s:%s" % (name, parameter.name) + cs.add_hyperparameter(parameter) + condition = EqualsCondition(parameter, preprocessor, name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space().get_conditions(): + dlcs = condition.get_descendent_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.startwith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space().forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.startwith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + print forbidden_clause + cs.add_forbidden_clause(forbidden_clause) + + return cs # TODO: maybe provide an interface to the underlying predictor like # decision_function or predict_proba \ No newline at end of file diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py index dfe14086c0..352eb36127 100644 --- a/AutoSklearn/components/__init__.py +++ b/AutoSklearn/components/__init__.py @@ -10,4 +10,7 @@ Preprocessing -=============""" \ No newline at end of file +=============""" + +from . import classification as classification_components +from . import preprocessing as preprocessing_components \ No newline at end of file diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index fee6204d7e..2718e938a3 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -1,23 +1,33 @@ import sklearn.svm -from ...util import hp_uniform, hp_choice +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + from ..classification_base import AutoSklearnClassificationAlgorithm class LibLinear_SVC(AutoSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside # TODO: maybe add dual and crammer-singer? - def __init__(self, penalty="l2", loss="l2", C=1.0, LOG2_C=None, random_state=None): + def __init__(self, penalty, loss, C, dual, random_state=None,): self.penalty = penalty self.loss = loss self.C = C - self.LOG2_C = LOG2_C + self.dual = dual self.random_state = random_state self.estimator = None def fit(self, X, Y): - if self.LOG2_C is not None: - self.LOG2_C = float(self.LOG2_C) - self.C = 2 ** self.LOG2_C + #if self.LOG2_C is not None: + # self.LOG2_C = float(self.LOG2_C) + # self.C = 2 ** self.LOG2_C + + if self.dual == "__False__": + self.dual = False + elif self.dual == "__True__": + self.dual = True self.C = float(self.C) self.estimator = sklearn.svm.LinearSVC(penalty=self.penalty, @@ -45,17 +55,34 @@ def handles_non_binary_classes(self): # TODO: describe whether by OneVsOne or OneVsTheRest return True + @staticmethod + def get_meta_information(): + return {'shortname': 'Liblinear-SVC', + 'name': 'Liblinear Support Vector Classification'} + @staticmethod def get_hyperparameter_search_space(): - # penalty l1 and loss l1 together are forbidden - penalty_and_loss = hp_choice("penalty_and_loss", - [{"penalty": "l1", "loss": "l2"}, - {"penalty": "l2", "loss": "l1"}, - {"penalty": "l2", "loss": "l2"}]) - loss = hp_choice("loss", ["l1", "l2"]) - LOG2_C = hp_uniform("LOG2_C", -5, 15) - return {"name": "liblinear", "penalty_and_loss": penalty_and_loss, - "LOG2_C": LOG2_C} + penalty = CategoricalHyperparameter("penalty", ["l1", "l2"]) + loss = CategoricalHyperparameter("loss", ["l1", "l2"]) + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True) + dual = Constant("dual", "__False__") + cs = ConfigurationSpace() + cs.add_hyperparameter(penalty) + cs.add_hyperparameter(loss) + cs.add_hyperparameter(C) + cs.add_hyperparameter(dual) + penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(penalty, "l1"), + ForbiddenEqualsClause(loss, "l1") + ) + constant_penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "__False__"), + ForbiddenEqualsClause(penalty, "l2"), + ForbiddenEqualsClause(loss, "l1") + ) + cs.add_forbidden_clause(penalty_and_loss) + cs.add_forbidden_clause(constant_penalty_and_loss) + return cs @staticmethod def get_all_accepted_hyperparameter_names(): diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 942134713d..b74570caf1 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -1,27 +1,25 @@ import sklearn.svm -from ...util import hp_uniform +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter + from ..classification_base import AutoSklearnClassificationAlgorithm class LibSVM_SVC(AutoSklearnClassificationAlgorithm): # TODO: maybe ad shrinking to the parameters? - def __init__(self, C=1.0, gamma=0.0, - LOG2_C=None, LOG2_gamma=None, - random_state=None): + def __init__(self, C, gamma, random_state=None): self.C = C self.gamma = gamma - self.LOG2_C = LOG2_C - self.LOG2_gamma = LOG2_gamma self.random_state = random_state self.estimator = None def fit(self, X, Y): - if self.LOG2_C is not None: - self.LOG2_C = float(self.LOG2_C) - self.C = 2 ** self.LOG2_C - if self.LOG2_gamma is not None: - self.LOG2_gamma = float(self.LOG2_gamma) - self.gamma = 2 ** self.LOG2_gamma + # if self.LOG2_C is not None: + # self.LOG2_C = float(self.LOG2_C) + # self.C = 2 ** self.LOG2_C + # if self.LOG2_gamma is not None: + # self.LOG2_gamma = float(self.LOG2_gamma) + # self.gamma = 2 ** self.LOG2_gamma self.C = float(self.C) self.gamma = float(self.gamma) @@ -49,12 +47,20 @@ def handles_non_binary_classes(self): # TODO: describe whether by OneVsOne or OneVsTheRest return True + @staticmethod + def get_meta_information(): + return {'shortname': 'LibSVM-SVC', + 'name': 'LibSVM Support Vector Classification'} + @staticmethod def get_hyperparameter_search_space(): - LOG2_C = hp_uniform("LOG2_C", -5, 15) - LOG2_gamma = hp_uniform("LOG2_gamma", -15, 3) - return {"name": "libsvm_svc", "LOG2_C": LOG2_C, "LOG2_gamma": - LOG2_gamma} + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(C) + cs.add_hyperparameter(gamma) + return cs @staticmethod def get_all_accepted_hyperparameter_names(): diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 8d22efa196..ba812bac76 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -1,14 +1,16 @@ import sklearn.ensemble -from hyperopt.pyll import scope +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter -from ...util import hp_uniform, hp_choice, hp_quniform from ..classification_base import AutoSklearnClassificationAlgorithm class RandomForest(AutoSklearnClassificationAlgorithm): - def __init__(self, n_estimators=10, criterion='gini', max_features='auto', - max_depth=None, min_samples_split=2, min_samples_leaf=1, - bootstrap=True, random_state=None, n_jobs=1): + def __init__(self, n_estimators, criterion, max_features, + max_depth, min_samples_split, min_samples_leaf, + bootstrap, random_state=None, n_jobs=1): self.n_estimators = n_estimators self.criterion = criterion self.max_features = max_features @@ -22,7 +24,9 @@ def __init__(self, n_estimators=10, criterion='gini', max_features='auto', def fit(self, X, Y): self.n_estimators = int(self.n_estimators) - if self.max_depth is not None: + if self.max_depth == "__None__": + self.max_depth = None + elif self.max_depth is not None: self.max_depth = int(self.max_depth) self.min_samples_split = int(self.min_samples_split) self.min_samples_leaf = int(self.min_samples_leaf) @@ -55,22 +59,34 @@ def handles_non_binary_classes(self): # TODO: describe whether by OneVsOne or OneVsTheRest return True + @staticmethod + def get_meta_information(): + return {'shortname': 'RF', + 'name': 'Random Forest'} + @staticmethod def get_hyperparameter_search_space(): - n_estimators = scope.int(hp_quniform("n_estimators", 10, 100, 1)) - criterion = hp_choice("criterion", ["gini", "entropy"]) - max_features = hp_uniform("max_features", 0.01, 1.0) + n_estimators = UniformIntegerHyperparameter("n_estimators", 10, 100) + criterion = CategoricalHyperparameter("criterion", ["gini", "entropy"]) + max_features = UniformFloatHyperparameter("max_features", 0.01, 1.0) # Don't know how to parametrize this...RF should rather be # regularized by the other parameters # max_depth = hp_uniform("max_depth", lower, upper) - min_samples_split = scope.int(hp_quniform("min_samples_split", 1, 20, 1)) - min_samples_leaf = scope.int(hp_quniform("min_samples_leaf", 1, 20, 1)) - bootstrap = hp_choice("bootstrap", [True, False]) - return {"name": "random_forest", - "n_estimators": n_estimators, "criterion": criterion, - "max_features": max_features, "min_samples_split": - min_samples_split, "min_samples_leaf": min_samples_leaf, - "bootstrap": bootstrap} + max_depth = UnParametrizedHyperparameter("max_depth", "__None__") + min_samples_split = UniformIntegerHyperparameter("min_samples_split", + 1, 20) + min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf", + 1, 20) + bootstrap = CategoricalHyperparameter("bootstrap", ["True", "False"]) + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(criterion) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(bootstrap) + return cs @staticmethod def get_all_accepted_hyperparameter_names(): diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 7ec6857782..3719bd7621 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -1,10 +1,15 @@ import sklearn.decomposition -from ...util import hp_uniform, hp_choice +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter + from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + class PCA(AutoSklearnPreprocessingAlgorithm): - def __init__(self, keep_variance=1.0, whiten=False, random_state=None): + def __init__(self, keep_variance, whiten, random_state=None): self.keep_variance = keep_variance self.whiten = whiten @@ -25,11 +30,13 @@ def fit(self, X, Y): idx += 1 components = self.preprocessor.components_ + print components self.preprocessor.components_ = components[:idx] def transform(self, X): if self.preprocessor is None: raise NotImplementedError() + print "Transform" return self.preprocessor.transform(X) def handles_missing_values(self): @@ -46,10 +53,12 @@ def handles_non_binary_classes(self): @staticmethod def get_hyperparameter_search_space(): - keep_variance = hp_uniform("keep_variance", 0.5, 1.0) - whiten = hp_choice("whiten", ["False", "True"]) - return {"name": "pca", "keep_variance": keep_variance, - "whiten": whiten} + keep_variance = UniformFloatHyperparameter("keep_variance", 0.5, 1.0) + whiten = CategoricalHyperparameter("whiten", ["False", "True"]) + cs = ConfigurationSpace() + cs.add_hyperparameter(keep_variance) + cs.add_hyperparameter(whiten) + return cs @staticmethod def get_all_accepted_hyperparameter_names(): diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index 1a36dbd338..5af9c44a4f 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -1,4 +1,10 @@ -import hyperopt.pyll as pyll +import importlib +import inspect +import os +import pkgutil +import sklearn +import sklearn.base +import sys class NoModelException(Exception): @@ -11,96 +17,34 @@ def __str__(self): % (type(self.cls), self.method)) -def hp_pchoice(label, p_options): - """ - label: string - p_options: list of (probability, option) pairs - """ - if not isinstance(label, basestring): - raise TypeError('require string label') - p, options = zip(*p_options) - n_options = len(options) - ch = pyll.scope.hyperopt_param(label, - pyll.scope.categorical( - p, - upper=n_options)) - return pyll.scope.switch(ch, *options) +def find_sklearn_classifiers(): + classifiers = [] + all_subdirectories = [] + sklearn_path = sklearn.__path__[0] + for root, dirs, files in os.walk(sklearn_path): + all_subdirectories.append(root) + for module_loader, module_name, ispkg in \ + pkgutil.iter_modules(all_subdirectories): -def hp_choice(label, options): - if not isinstance(label, basestring): - raise TypeError('require string label') - ch = pyll.scope.hyperopt_param(label, - pyll.scope.randint(len(options))) - return pyll.scope.switch(ch, *options) + # Work around some issues... + if module_name in ["hmm", "mixture"]: + print "Skipping %s" % module_name + continue + module_file = module_loader.__dict__["path"] + sklearn_module = module_file.replace(sklearn_path, "").replace("/", ".") + full_module_name = "sklearn" + sklearn_module + "." + module_name -def hp_randint(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.hyperopt_param(label, - pyll.scope.randint(*args, **kwargs)) + pkg = importlib.import_module(full_module_name) + for member_name, obj in inspect.getmembers(pkg): + if inspect.isclass(obj) and \ + issubclass(obj, sklearn.base.ClassifierMixin): + classifier = obj + print member_name, obj + classifiers.append(classifier) -def hp_uniform(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.uniform(*args, **kwargs))) + print classifiers - -def hp_quniform(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.quniform(*args, **kwargs))) - - -def hp_loguniform(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.loguniform(*args, **kwargs))) - - -def hp_qloguniform(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.qloguniform(*args, **kwargs))) - - -def hp_normal(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.normal(*args, **kwargs))) - - -def hp_qnormal(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.qnormal(*args, **kwargs))) - - -def hp_lognormal(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.lognormal(*args, **kwargs))) - - -def hp_qlognormal(label, *args, **kwargs): - if not isinstance(label, basestring): - raise TypeError('require string label') - return pyll.scope.float( - pyll.scope.hyperopt_param(label, - pyll.scope.qlognormal(*args, **kwargs))) \ No newline at end of file +find_sklearn_classifiers() \ No newline at end of file diff --git a/source/first_steps.rst b/source/first_steps.rst index 5be649844c..6ca37b256d 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -1,25 +1,26 @@ First Steps with AutoSklearn **************************** -This example demonstrates how to get the whole search space covered by -AutoSklearn, feed it to the random search algorithm implemented by the hyperopt -package and then train a classifier with a random configuration on the iris -dataset. +This example demonstrates how to get the whole configuration space covered by +AutoSklearn, feed it to the random search algorithm implemented by the +HPOlibConfigSpace package and then train a classifier with a random +configuration on the iris dataset. >>> from AutoSklearn.autosklearn import AutoSklearnClassifier + >>> from HPOlibConfigSpace.random_sampler import RandomSampler >>> import sklearn.datasets >>> import sklearn.metrics >>> import numpy as np - >>> import hyperopt >>> iris = sklearn.datasets.load_iris() >>> X = iris.data >>> Y = iris.target >>> indices = np.arange(X.shape[0]) >>> np.random.shuffle(indices) - >>> auto = AutoSklearnClassifier() - >>> search_space = auto.get_hyperparameter_search_space() - >>> configuration = hyperopt.pyll.stochastic.sample(search_space) - >>> auto = AutoSklearnClassifier(classifier=configuration['classifier'], preprocessor=configuration['preprocessing']) + >>> configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() + >>> sampler = RandomSampler(configuration_space, 1) + >>> configuration = sampler.sample_configuration() + >>> print configuration + >>> auto = AutoSklearnClassifier(configuration) >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) diff --git a/source/introduction.rst b/source/introduction.rst index 50458fa7a7..5f6a86b34e 100644 --- a/source/introduction.rst +++ b/source/introduction.rst @@ -23,7 +23,7 @@ modification, are permitted provided that the following conditions are met: * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name of the nor the + * Neither the name of the University of Freiburg, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. From 62090456da58cebd9bacceb88f95a935ca2ef707 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 24 Nov 2014 16:08:52 +0100 Subject: [PATCH 016/352] Add default configuration --- AutoSklearn/__init__.py | 4 +- AutoSklearn/autosklearn.py | 16 +-- .../components/classification/liblinear.py | 12 +- .../components/classification/libsvm_svc.py | 5 +- .../classification/random_forest.py | 28 +++-- AutoSklearn/components/preprocessing/pca.py | 8 +- AutoSklearn/util.py | 4 +- setup.py | 3 +- tests/test_autosklearn.py | 111 +++++++----------- 9 files changed, 90 insertions(+), 101 deletions(-) diff --git a/AutoSklearn/__init__.py b/AutoSklearn/__init__.py index bf27f9dd6d..f47aeea3f9 100644 --- a/AutoSklearn/__init__.py +++ b/AutoSklearn/__init__.py @@ -3,4 +3,6 @@ AutoSklearn provides a configuration space spanning a huge part of the scikit-learn models. This configuration space can be searched by one of the -hyperparameter optimization algorithms in the HPOlib.""" \ No newline at end of file +hyperparameter optimization algorithms in the HPOlib.""" + +__version__ = "0.15.2dev" \ No newline at end of file diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 0213899967..64d4e0f2a0 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -1,6 +1,13 @@ +from . import __version__ + import numpy as np from numpy import float64 +import sklearn +if sklearn.__version__ != "0.15.2": + raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + "you installed %s." % sklearn.__version__) + from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.utils import check_random_state from sklearn.utils.validation import safe_asarray, assert_all_finite @@ -120,12 +127,10 @@ def fit(self, X, Y): if isinstance(instantiated_hyperparameter, InactiveHyperparameter): continue - print instantiated_hyperparameter.hyperparameter.name name_ = instantiated_hyperparameter.hyperparameter.name.\ split(":")[1] parameters[name_] = instantiated_hyperparameter.value - print parameters random_state = check_random_state(self.random_state) self._estimator = components.classification_components._classifiers\ [name](random_state=random_state, **parameters) @@ -134,7 +139,7 @@ def fit(self, X, Y): self._validate_input_Y(Y) preprocessor = self.configuration['preprocessor'] - if preprocessor.value != "__None__": + if preprocessor.value != "None": preproc_name = preprocessor.value preproc_params = {} @@ -149,7 +154,6 @@ def fit(self, X, Y): split(":")[1] preproc_params[name_] = instantiated_hyperparameter.value - print preproc_params self._preprocessor = components.preprocessing_components.\ _preprocessors[preproc_name](random_state=random_state, **preproc_params) self._preprocessor.fit(X, Y) @@ -248,7 +252,6 @@ def get_hyperparameter_search_space(): classifier = CategoricalHyperparameter( "classifier", [name for name in available_classifiers]) cs.add_hyperparameter(classifier) - for name in available_classifiers: # We have to retrieve the configuration space every time because # we change the objects it returns. If we reused it, we could not @@ -282,7 +285,7 @@ def get_hyperparameter_search_space(): preprocessor = CategoricalHyperparameter( "preprocessor", [name for name in available_preprocessors] + [ - "__None__"]) + "None"]) cs.add_hyperparameter(preprocessor) for name in available_preprocessors: for parameter in available_preprocessors[name].\ @@ -308,7 +311,6 @@ def get_hyperparameter_search_space(): if not dlc.hyperparameter.startwith(name): dlc.hyperparameter.name = "%s:%s" % (name, dlc.hyperparameter.name) - print forbidden_clause cs.add_forbidden_clause(forbidden_clause) return cs diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 2718e938a3..480e7211d6 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -62,10 +62,12 @@ def get_meta_information(): @staticmethod def get_hyperparameter_search_space(): - penalty = CategoricalHyperparameter("penalty", ["l1", "l2"]) - loss = CategoricalHyperparameter("loss", ["l1", "l2"]) - C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True) - dual = Constant("dual", "__False__") + penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], + default="l2") + loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, + default=1.0) + dual = Constant("dual", "False") cs = ConfigurationSpace() cs.add_hyperparameter(penalty) cs.add_hyperparameter(loss) @@ -76,7 +78,7 @@ def get_hyperparameter_search_space(): ForbiddenEqualsClause(loss, "l1") ) constant_penalty_and_loss = ForbiddenAndConjunction( - ForbiddenEqualsClause(dual, "__False__"), + ForbiddenEqualsClause(dual, "False"), ForbiddenEqualsClause(penalty, "l2"), ForbiddenEqualsClause(loss, "l1") ) diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index b74570caf1..0ebe5fedca 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -54,9 +54,10 @@ def get_meta_information(): @staticmethod def get_hyperparameter_search_space(): - C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True) + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, + default=1.0) gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, - log=True) + log=True, default=0.1) cs = ConfigurationSpace() cs.add_hyperparameter(C) cs.add_hyperparameter(gamma) diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index ba812bac76..94668acfe7 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -24,7 +24,7 @@ def __init__(self, n_estimators, criterion, max_features, def fit(self, X, Y): self.n_estimators = int(self.n_estimators) - if self.max_depth == "__None__": + if self.max_depth == "Non_": self.max_depth = None elif self.max_depth is not None: self.max_depth = int(self.max_depth) @@ -32,6 +32,10 @@ def fit(self, X, Y): self.min_samples_leaf = int(self.min_samples_leaf) if self.max_features not in ("sqrt", "log2", "auto"): self.max_features = float(self.max_features) + if self.bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False self.estimator = sklearn.ensemble.RandomForestClassifier( n_estimators=self.n_estimators, criterion=self.criterion, @@ -66,18 +70,22 @@ def get_meta_information(): @staticmethod def get_hyperparameter_search_space(): - n_estimators = UniformIntegerHyperparameter("n_estimators", 10, 100) - criterion = CategoricalHyperparameter("criterion", ["gini", "entropy"]) - max_features = UniformFloatHyperparameter("max_features", 0.01, 1.0) + n_estimators = UniformIntegerHyperparameter( + "n_estimators", 10, 100, default=10) + criterion = CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini") + max_features = UniformFloatHyperparameter( + "max_features", 0.01, 1.0, default=1.0) # Don't know how to parametrize this...RF should rather be # regularized by the other parameters # max_depth = hp_uniform("max_depth", lower, upper) - max_depth = UnParametrizedHyperparameter("max_depth", "__None__") - min_samples_split = UniformIntegerHyperparameter("min_samples_split", - 1, 20) - min_samples_leaf = UniformIntegerHyperparameter("min_samples_leaf", - 1, 20) - bootstrap = CategoricalHyperparameter("bootstrap", ["True", "False"]) + max_depth = UnParametrizedHyperparameter("max_depth", "None") + min_samples_split = UniformIntegerHyperparameter( + "min_samples_split", 1, 20, default=2) + min_samples_leaf = UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1) + bootstrap = CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="True") cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) cs.add_hyperparameter(criterion) diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 3719bd7621..ef27f8ac3b 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -30,13 +30,11 @@ def fit(self, X, Y): idx += 1 components = self.preprocessor.components_ - print components self.preprocessor.components_ = components[:idx] def transform(self, X): if self.preprocessor is None: raise NotImplementedError() - print "Transform" return self.preprocessor.transform(X) def handles_missing_values(self): @@ -53,8 +51,10 @@ def handles_non_binary_classes(self): @staticmethod def get_hyperparameter_search_space(): - keep_variance = UniformFloatHyperparameter("keep_variance", 0.5, 1.0) - whiten = CategoricalHyperparameter("whiten", ["False", "True"]) + keep_variance = UniformFloatHyperparameter( + "keep_variance", 0.5, 1.0, default=1.0) + whiten = CategoricalHyperparameter( + "whiten", ["False", "True"], default="False") cs = ConfigurationSpace() cs.add_hyperparameter(keep_variance) cs.add_hyperparameter(whiten) diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index 5af9c44a4f..3755f09db9 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -47,4 +47,6 @@ def find_sklearn_classifiers(): print classifiers -find_sklearn_classifiers() \ No newline at end of file + +if __name__ == "__main__": + find_sklearn_classifiers() \ No newline at end of file diff --git a/setup.py b/setup.py index b35c8b9b50..ba20e9ab31 100644 --- a/setup.py +++ b/setup.py @@ -7,8 +7,7 @@ packages=setuptools.find_packages(), install_requires=["numpy", "scipy", - "scikit_learn==0.15.1", - #"HPOlibConfigSpace==0.1dev"], + "scikit-learn==0.15.2", "--editable git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev"], package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index b423cf816c..50f4a2fdb7 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -4,13 +4,13 @@ import StringIO import unittest -import hyperopt - import sklearn.datasets import sklearn.decomposition import sklearn.ensemble import sklearn.svm +from HPOlibConfigSpace.configuration_space import Configuration, ConfigurationSpace + from AutoSklearn.autosklearn import AutoSklearnClassifier from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm @@ -75,8 +75,23 @@ def test_init_parameters_as_dict_or_as_keywords(self): pass def test_predict_iris(self): - auto = AutoSklearnClassifier(parameters={"classifier": "liblinear", - "preprocessing": None}) + cs = AutoSklearnClassifier.get_hyperparameter_search_space() + hyperparameters = {} + hyperparameters['classifier'] = cs.get_hyperparameter( + "classifier").instantiate("liblinear") + hyperparameters['liblinear:C'] = cs.get_hyperparameter("liblinear:C").\ + instantiate(1.0) + hyperparameters['liblinear:dual'] = cs.get_hyperparameter( + 'liblinear:dual').instantiate('False') + hyperparameters['liblinear:loss'] = cs.get_hyperparameter( + 'liblinear:loss').instantiate('l2') + hyperparameters['liblinear:penalty'] = cs.get_hyperparameter( + 'liblinear:penalty').instantiate('l2') + hyperparameters['preprocessor'] = cs.get_hyperparameter( + 'preprocessor').instantiate('None') + config = Configuration(cs, hyperparameters=hyperparameters) + + auto = AutoSklearnClassifier(config) X_train, Y_train, X_test, Y_test = self.get_iris() auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) @@ -86,32 +101,28 @@ def test_predict_iris(self): self.assertIsInstance(auto._estimator.estimator, sklearn.svm.LinearSVC) self.assertAlmostEqual(accuracy, 1.0) - def test_predict_svm(self): - auto = AutoSklearnClassifier(parameters={"classifier": "libsvm_svc", - "preprocessing": None}) - X_train, Y_train, X_test, Y_test = self.get_iris() - auto = auto.fit(X_train, Y_train) - predictions = auto.predict(X_test) - accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertIsInstance(auto, AutoSklearnClassifier) - self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) - self.assertIsInstance(auto._estimator.estimator, sklearn.svm.SVC) - self.assertAlmostEqual(accuracy, 0.959999999999) - - def test_predict_iris_rf(self): - auto = AutoSklearnClassifier(parameters={"classifier": "random_forest", - "preprocessing": None}) - X_train, Y_train, X_test, Y_test = self.get_iris() - auto = auto.fit(X_train, Y_train) - predictions = auto.predict(X_test) - accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertIsInstance(auto, AutoSklearnClassifier) - self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) - self.assertIsInstance(auto._estimator.estimator, sklearn.ensemble.RandomForestClassifier) - self.assertAlmostEqual(accuracy, 0.959999999999) - def test_fit_with_preproc(self): - auto = AutoSklearnClassifier("liblinear", "pca") + cs = AutoSklearnClassifier.get_hyperparameter_search_space() + hyperparameters = {} + hyperparameters['classifier'] = cs.get_hyperparameter( + "classifier").instantiate("liblinear") + hyperparameters['liblinear:C'] = cs.get_hyperparameter("liblinear:C"). \ + instantiate(1.0) + hyperparameters['liblinear:dual'] = cs.get_hyperparameter( + 'liblinear:dual').instantiate('False') + hyperparameters['liblinear:loss'] = cs.get_hyperparameter( + 'liblinear:loss').instantiate('l2') + hyperparameters['liblinear:penalty'] = cs.get_hyperparameter( + 'liblinear:penalty').instantiate('l2') + hyperparameters['preprocessor'] = cs.get_hyperparameter( + 'preprocessor').instantiate('pca') + hyperparameters['pca:keep_variance'] = cs.get_hyperparameter( + 'pca:keep_variance').instantiate(1.0) + hyperparameters['pca:whiten'] = cs.get_hyperparameter( + 'pca:whiten').instantiate('False') + config = Configuration(cs, hyperparameters=hyperparameters) + + auto = AutoSklearnClassifier(config) X_train, Y_train, X_test, Y_test = self.get_iris() auto = auto.fit(X_train, Y_train) self.assertIsInstance(auto, AutoSklearnClassifier) @@ -119,49 +130,11 @@ def test_fit_with_preproc(self): self.assertIsInstance(auto._preprocessor.preprocessor, sklearn .decomposition.PCA) - def test_predict_with_preproc(self): - auto = AutoSklearnClassifier("liblinear", "pca") - X_train, Y_train, X_test, Y_test = self.get_iris() - auto = auto.fit(X_train, Y_train) prediction = auto.predict(X_test) - self.assertIsInstance(auto, AutoSklearnClassifier) - self.assertIsInstance(auto._preprocessor, AutoSklearnPreprocessingAlgorithm) - - def test_specify_hyperparameters(self): - auto = AutoSklearnClassifier(random_state=1, - parameters={"classifier": "random_forest", "preprocessing": - "pca", "random_forest:n_estimators": 1, - "random_forest:max_features": 1.0}) - X_train, Y_train, X_test, Y_test = self.get_iris() - auto = auto.fit(X_train, Y_train) - self.assertIsNotNone(auto._preprocessor) - self.assertIsNotNone(auto._preprocessor.preprocessor) - self.assertIsNotNone(auto._estimator) - self.assertIsNotNone(auto._estimator.estimator) - predictions = auto.predict(X_test) - accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertAlmostEqual(accuracy, 0.92) - self.assertEqual(auto._estimator.estimator.n_estimators, 1) - - def test_specify_unknown_hyperparameters(self): - self.assertRaisesRegexp(ValueError, - "Parameter random_forest:blablabla is unknown.", - AutoSklearnClassifier, random_state=1, - parameters={"classifier": "random_forest", - "preprocessing": "pca", - "random_forest:blablabla": 1}) - self.assertRaisesRegexp(ValueError, - "Parameter pca:blablabla is unknown.", - AutoSklearnClassifier, random_state=1, - parameters={"classifier": "random_forest", - "preprocessing": "pca", - "pca:blablabla": 1}) def test_get_hyperparameter_search_space(self): - auto = AutoSklearnClassifier(None, None) - space = auto.get_hyperparameter_search_space() - space = hyperopt.pyll.base.as_apply(space) - print space + config = AutoSklearnClassifier.get_hyperparameter_search_space() + self.assertIsInstance(config, ConfigurationSpace) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From e110e78db4be13ef8178c44c1d2ce87c3f0d6b0d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 9 Dec 2014 10:01:38 +0100 Subject: [PATCH 017/352] Include comments from meeting on 2nd december --- AutoSklearn/__init__.py | 2 +- AutoSklearn/autosklearn.py | 8 +- AutoSklearn/components/__init__.py | 26 +++- .../components/classification/liblinear.py | 2 +- .../components/classification/libsvm_svc.py | 2 +- .../classification/random_forest.py | 4 +- AutoSklearn/components/classification_base.py | 97 +++---------- AutoSklearn/components/preprocessing/pca.py | 1 + AutoSklearn/components/preprocessor_base.py | 87 +++--------- AutoSklearn/implementations/__init__.py | 1 + AutoSklearn/util.py | 53 ++++++-- Makefile | 2 +- setup.py | 2 + source/api.rst | 16 +-- source/components.rst | 6 +- source/conf.py | 4 +- source/first_steps.rst | 5 +- source/index.rst | 2 +- source/installation.rst | 1 + source/introduction.rst | 17 +-- tests/components/classification/liblinear.py | 11 ++ tests/components/classification/libsvm_svc.py | 14 ++ .../classification/random_forest.py | 14 ++ tests/components/preprocessing/pca.py | 16 +++ tests/doctests.py | 13 ++ tests/test_all_combinations.py | 127 ------------------ tests/test_autosklearn.py | 43 +----- 27 files changed, 214 insertions(+), 362 deletions(-) create mode 100644 AutoSklearn/implementations/__init__.py create mode 100644 tests/components/classification/liblinear.py create mode 100644 tests/components/classification/libsvm_svc.py create mode 100644 tests/components/classification/random_forest.py create mode 100644 tests/components/preprocessing/pca.py create mode 100644 tests/doctests.py delete mode 100644 tests/test_all_combinations.py diff --git a/AutoSklearn/__init__.py b/AutoSklearn/__init__.py index f47aeea3f9..c217c9ba88 100644 --- a/AutoSklearn/__init__.py +++ b/AutoSklearn/__init__.py @@ -3,6 +3,6 @@ AutoSklearn provides a configuration space spanning a huge part of the scikit-learn models. This configuration space can be searched by one of the -hyperparameter optimization algorithms in the HPOlib.""" +hyperparameter optimization algorithms in HPOlib.""" __version__ = "0.15.2dev" \ No newline at end of file diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 64d4e0f2a0..c220ef3cfa 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -18,9 +18,7 @@ from HPOlibConfigSpace.conditions import EqualsCondition from . import components as components -from .util import NoModelException -task_types = set(["classification"]) class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): """This class implements the classification task. @@ -50,12 +48,12 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): ---------- _estimator : The underlying scikit-learn classification model. This variable is assigned after a call to the - :ref:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. _preprocessor : The underlying scikit-learn preprocessing algorithm. This variable is only assigned if a preprocessor is specified and after a call to the - :ref:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. See also -------- @@ -113,8 +111,6 @@ def fit(self, X, Y): # initializing a class in the init function! # TODO: can this happen now that a configuration is specified at # instantiation time - if "classifier" not in self.configuration: - raise NoModelException(self, "fit(X, Y)") # Extract Hyperparameters from the configuration object name = self.configuration["classifier"].value diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py index 352eb36127..8ff6a8f39e 100644 --- a/AutoSklearn/components/__init__.py +++ b/AutoSklearn/components/__init__.py @@ -5,12 +5,36 @@ found, the algorithm must be provide a class implementing one of the given interfaces. +Coding Guidelines +================= +Please try to adhere to the `scikit-learn coding guidelines `_. + +Own Implementation of Algorithms +================================ +When adding new algorithms, it is possible to implement it directly in the +fit/predict/transform method of a component. We do not recommend this, +but rather recommend to implement an algorithm in a scikit-learn compatible +way (`see here `_). +Such an implementation should then be put into the `implementation` directory. +and can then be easily wrapped with to become a component in AutoSklearn. + Classification ============== +The AutoSklearnClassificationAlgorithm provides an interface for +Classification Algorithms inside AutoSklearn. It provides four important +functions. Two of them, +:meth:`get_hyperparameter_search_space() ` +and +:meth:`get_properties() ` +are used to +automatically create a valid configuration space. The other two, +:meth:`fit() ` and +:meth:`predict() ` +are an implementation of the `scikit-learn predictor API `_. Preprocessing =============""" from . import classification as classification_components -from . import preprocessing as preprocessing_components \ No newline at end of file +from . import preprocessing as preprocessing_components diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 480e7211d6..b5c6d400a8 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -34,7 +34,7 @@ def fit(self, X, Y): loss=self.loss, C=self.C, random_state=self.random_state, dual=False) - self.estimator.fit(X, Y) + return self.estimator.fit(X, Y) def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 0ebe5fedca..79bb3d35e1 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -26,7 +26,7 @@ def fit(self, X, Y): self.estimator = sklearn.svm.SVC(C=self.C, gamma=self.gamma, random_state=self.random_state, cache_size=2000) - self.estimator.fit(X, Y) + return self.estimator.fit(X, Y) def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 94668acfe7..79c199eb4e 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -24,7 +24,7 @@ def __init__(self, n_estimators, criterion, max_features, def fit(self, X, Y): self.n_estimators = int(self.n_estimators) - if self.max_depth == "Non_": + if self.max_depth == "None": self.max_depth = None elif self.max_depth is not None: self.max_depth = int(self.max_depth) @@ -43,7 +43,7 @@ def fit(self, X, Y): .min_samples_split, min_samples_leaf=self.min_samples_leaf, max_features=self.max_features, random_state=self.random_state, n_jobs=self.n_jobs) - self.estimator.fit(X, Y) + return self.estimator.fit(X, Y) def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 83a0c06ae8..2df1b7c89e 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -6,100 +6,33 @@ class AutoSklearnClassificationAlgorithm(object): `AutoSklearn/components/classification` to make it available.""" def __init__(self): self.estimator = None + self.properties = None - def handles_missing_values(self): - """Can the underlying algorithm handle missing values itself? + def get_properties(self): + """Get the properties of the underlying algorithm. These are: - Returns - ------- - flag : Boolean - True if the underlying algorithm handles missing values itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Missing values are not supported. - """ - raise NotImplementedError() - - def handles_nominal_features(self): - """Can the underlying algorithm handle nominal features? - - Returns - ------- - flag : Boolean - True if the underlying algorithm handles nominal values itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Nominal values are not - supported. It is suggested to perform a OneHotEncoding on them. - """ - raise NotImplementedError() - - def handles_numeric_features(self): - """Can the underlying algorithm handle numeric features itself? - - Returns - ------- - flag : Boolean - True if the underlying algorithm handles numeric features itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Every algorithm support numeric - features. - """ - raise NotImplementedError() - - def handles_non_binary_classes(self): - """Can the underlying algorithm handle multiclass problems itself? + * Can the algorithm handle missing values + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features + (handles_numerical_features : {True, False}) + * Can the algorithm handle multiclass-classification problems + (handles_multiclass : {True, False}) Returns ------- - flag : Boolean - True if the underlying algorithm handles multiclass problems itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Multiclass problems are - supported by every algorithm. + dict """ raise NotImplementedError() def get_hyperparameter_search_space(self): - """Return the configuration space of this classifier. - - Returns - ------- - cs : dict - A dictionary with all hyperparameters as hyperopt.pyll objects. - - """ - raise NotImplementedError() - - def get_all_accepted_hyperparameter_names(self): - """Return the name of all hyperparameters accepted by this classifier. - - This must not be the same as the list returned by - :meth:`get_hyperparameter_search_space`. An example can be found in - the components for the linear svm and the libsvm, where it is also - possible to specifiy the parameters as the exponent to the base two. - - This list is used by the - :class:`AutoSklearn.autosklearn.AutoSklearnClassifier` to check if it - is called with illegal hyperparameters. + """Return the configuration space of this classification algorithm. Returns ------- - names : A list of accepted hyperparameter names. + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this classification algorithm. """ raise NotImplementedError() diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index ef27f8ac3b..dcd591e894 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -31,6 +31,7 @@ def fit(self, X, Y): components = self.preprocessor.components_ self.preprocessor.components_ = components[:idx] + return self def transform(self, X): if self.preprocessor is None: diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 782a87e55f..851c749265 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -1,54 +1,29 @@ class AutoSklearnPreprocessingAlgorithm(object): + """Provide an abstract interface for preprocessing algorithms in + AutoSklearn. + + Make a subclass of this and put it into the directory + `AutoSklearn/components/preprocessing` to make it available.""" def __init__(self): self.preprocessor = None - def handles_missing_values(self): - """Can the underlying algorithm handle missing values itself? - - Returns - ------- - flag : Boolean - True if the underlying algorithm handles missing values itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Missing values are not supported. - """ - raise NotImplementedError() + def get_properties(self): + """Get the properties of the underlying algorithm. These are: - def handles_nominal_features(self): - """Can the underlying algorithm handle nominal features? + * Can the algorithm handle missing values + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features + (handles_numerical_features : {True, False}) + * Can the algorithm handle multiclass-classification problems + (handles_multiclass : {True, False}) + * Can preprocess classification data + (handles_classification_data : {True, False} Returns ------- - flag : Boolean - True if the underlying algorithm handles nominal values itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Nominal values are not - supported. It is suggested to perform a OneHotEncoding on them. - """ - raise NotImplementedError() - - def handles_numeric_features(self): - """Can the underlying algorithm handle numeric features itself? - - Returns - ------- - flag : Boolean - True if the underlying algorithm handles numeric features itself, - otherwise False. - - Note - ---- - - This feature is not implemented yet. Every algorithm support numeric - features. + dict """ raise NotImplementedError() @@ -57,28 +32,8 @@ def get_hyperparameter_search_space(self): Returns ------- - cs : dict - A dictionary with all hyperparameters as hyperopt.pyll objects. - - """ - raise NotImplementedError() - - def get_all_accepted_hyperparameter_names(self): - """Return the name of all hyperparameters accepted by this preprocessing - algorithm. - - This must not be the same as the list returned by - :meth:`get_hyperparameter_search_space`. An example can be found in - the components for the linear svm and the libsvm, where it is also - possible to specifiy the parameters as the exponent to the base two. - - This list is used by the - :class:`AutoSklearn.autosklearn.AutoSklearnClassifier` to check if it - is called with illegal hyperparameters. - - Returns - ------- - names : A list of accepted hyperparameter names. + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this preprocessing algorithm. """ raise NotImplementedError() @@ -105,7 +60,7 @@ def fit(self, X, Y): raise NotImplementedError() def transform(self, X): - """The predict function calls the transform function of the + """The transform function calls the transform function of the underlying scikit-learn model and returns the transformed array. Parameters diff --git a/AutoSklearn/implementations/__init__.py b/AutoSklearn/implementations/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/AutoSklearn/implementations/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index 3755f09db9..f37ff66ea6 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -2,19 +2,13 @@ import inspect import os import pkgutil + +import numpy as np import sklearn import sklearn.base -import sys - - -class NoModelException(Exception): - def __init__(self, cls, method): - self.cls = cls - self.method = method +import sklearn.datasets - def __str__(self): - return repr("You called %s.%s without specifying a model first." - % (type(self.cls), self.method)) +from .autosklearn import AutoSklearnClassifier def find_sklearn_classifiers(): @@ -48,5 +42,44 @@ def find_sklearn_classifiers(): print classifiers +def get_iris(): + iris = sklearn.datasets.load_iris() + X = iris.data + Y = iris.target + rs = np.random.RandomState(42) + indices = np.arange(X.shape[0]) + rs.shuffle(indices) + X = X[indices] + Y = Y[indices] + X_train = X[:100] + Y_train = Y[:100] + X_test = X[100:] + Y_test = Y[100:] + return X_train, Y_train, X_test, Y_test + + +def test_classifier_with_iris(Classifier): + X_train, Y_train, X_test, Y_test = get_iris() + configuration_space = Classifier.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + classifier = Classifier(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + return predictions, Y_test + + +def test_preprocessing_with_iris(Preprocessor): + X_train, Y_train, X_test, Y_test = get_iris() + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + transformer = preprocessor.fit(X_train, Y_train) + return transformer.transform(X_test), X_test + + if __name__ == "__main__": find_sklearn_classifiers() \ No newline at end of file diff --git a/Makefile b/Makefile index 4eaa687bce..90b4515b7f 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build -PAPER = +PAPER = BUILDDIR = build # User-friendly check for sphinx-build diff --git a/setup.py b/setup.py index ba20e9ab31..1d54bc14da 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,9 @@ install_requires=["numpy", "scipy", "scikit-learn==0.15.2", + "nose", "--editable git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev"], + test_suite="nose.collector", package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", author_email="feurerm@informatik.uni-freiburg.de", diff --git a/source/api.rst b/source/api.rst index 6436324149..7e3110ad57 100644 --- a/source/api.rst +++ b/source/api.rst @@ -1,23 +1,19 @@ +:orphan: + .. _api: -API -*** +APIs +**** Main modules ============ .. autoclass:: AutoSklearn.autosklearn.AutoSklearnClassifier - :members: - -.. autoclass:: AutoSklearn.autosklearn.AutoSklearnRegressor - :members: + Extension Interfaces ==================== .. autoclass:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm - - .. automethod:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm.__init__ -.. autoclass:: AutoSklearn.components.preprocessor_base.AutoSklearnPreprocessingAlgorithm - +.. autoclass:: AutoSklearn.components.preprocessor_base.AutoSklearnPreprocessingAlgorithm \ No newline at end of file diff --git a/source/components.rst b/source/components.rst index 3bca511491..f8f485b1da 100644 --- a/source/components.rst +++ b/source/components.rst @@ -1,7 +1,9 @@ +:orphan: + .. _components: -Components -********** +Available Components +******************** Classification ============== diff --git a/source/conf.py b/source/conf.py index 589bcbf6c2..5e55d86ab3 100644 --- a/source/conf.py +++ b/source/conf.py @@ -19,6 +19,7 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('../..')) # -- General configuration ------------------------------------------------ @@ -37,8 +38,9 @@ 'numpydoc', # Important for get headings like Parameters... ] +numpydoc_show_class_members = False autosummary_generate = True -autodoc_default_flags = ['members', 'inherited-members', 'undoc-members', +autodoc_default_flags = ['members', 'inherited-members', 'show-inheritance'] # Add any paths that contain templates here, relative to this directory. diff --git a/source/first_steps.rst b/source/first_steps.rst index 6ca37b256d..ab3369f4ae 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -15,13 +15,14 @@ configuration on the iris dataset. >>> X = iris.data >>> Y = iris.target >>> indices = np.arange(X.shape[0]) + >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() >>> sampler = RandomSampler(configuration_space, 1) >>> configuration = sampler.sample_configuration() - >>> print configuration - >>> auto = AutoSklearnClassifier(configuration) + >>> auto = AutoSklearnClassifier(configuration, random_state=1) >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) + 0.88 diff --git a/source/index.rst b/source/index.rst index fbef0eb019..27718c75ef 100644 --- a/source/index.rst +++ b/source/index.rst @@ -21,6 +21,6 @@ Indices and Tables ================== * :ref:`API ` -* :ref:`Components ` +* :ref:`Available Components ` * :ref:`search` diff --git a/source/installation.rst b/source/installation.rst index 0d405ab144..70e4decf11 100644 --- a/source/installation.rst +++ b/source/installation.rst @@ -1,3 +1,4 @@ Install AutoSklearn ******************* +Please see the file `README.md`. \ No newline at end of file diff --git a/source/introduction.rst b/source/introduction.rst index 5f6a86b34e..6b64d8ba43 100644 --- a/source/introduction.rst +++ b/source/introduction.rst @@ -18,14 +18,15 @@ All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - * Neither the name of the University of Freiburg, nor the - names of its contributors may be used to endorse or promote products - derived from this software without specific prior written permission. + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of the University of Freiburg, nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED diff --git a/tests/components/classification/liblinear.py b/tests/components/classification/liblinear.py new file mode 100644 index 0000000000..2b38b1d83d --- /dev/null +++ b/tests/components/classification/liblinear.py @@ -0,0 +1,11 @@ +import unittest + +from AutoSklearn.components.classification.liblinear import LibLinear_SVC +from AutoSklearn.util import test_classifier_with_iris + + +class LibLinearComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = test_classifier_with_iris(LibLinear_SVC) + self.assertTrue(all(targets == predictions)) \ No newline at end of file diff --git a/tests/components/classification/libsvm_svc.py b/tests/components/classification/libsvm_svc.py new file mode 100644 index 0000000000..4c84f4de2a --- /dev/null +++ b/tests/components/classification/libsvm_svc.py @@ -0,0 +1,14 @@ +import unittest + +from AutoSklearn.components.classification.libsvm_svc import LibSVM_SVC +from AutoSklearn.util import test_classifier_with_iris + +import sklearn.metrics + + +class LibSVM_SVCComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = test_classifier_with_iris(LibSVM_SVC) + self.assertAlmostEqual(0.96, + sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/components/classification/random_forest.py b/tests/components/classification/random_forest.py new file mode 100644 index 0000000000..2c2ee4d937 --- /dev/null +++ b/tests/components/classification/random_forest.py @@ -0,0 +1,14 @@ +import unittest + +from AutoSklearn.components.classification.random_forest import RandomForest +from AutoSklearn.util import test_classifier_with_iris + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = test_classifier_with_iris(RandomForest) + self.assertAlmostEqual(0.94, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/preprocessing/pca.py b/tests/components/preprocessing/pca.py new file mode 100644 index 0000000000..788a37a952 --- /dev/null +++ b/tests/components/preprocessing/pca.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.preprocessing.pca import PCA +from AutoSklearn.util import test_preprocessing_with_iris + + +class LibLinearComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = test_preprocessing_with_iris(PCA) + self.assertEqual(transformation.shape, original.shape) + self.assertFalse((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue((transformations[-1] == transformations[-2]).all()) \ No newline at end of file diff --git a/tests/doctests.py b/tests/doctests.py new file mode 100644 index 0000000000..fe513dd2f7 --- /dev/null +++ b/tests/doctests.py @@ -0,0 +1,13 @@ +import doctest +import os +import unittest + +import AutoSklearn + + +class DocumentationTest(unittest.TestCase): + def test_first_steps(self): + filename = os.path.dirname(AutoSklearn.__file__) + filename = os.path.join(filename, "..", "source", "first_steps.rst") + failed, run = doctest.testfile(filename, module_relative=False) + self.assertEqual(0, failed) \ No newline at end of file diff --git a/tests/test_all_combinations.py b/tests/test_all_combinations.py deleted file mode 100644 index 201974e2de..0000000000 --- a/tests/test_all_combinations.py +++ /dev/null @@ -1,127 +0,0 @@ -__author__ = 'feurerm' - -import numpy as np -import time -import unittest - -import itertools - -import sklearn.datasets -import sklearn.decomposition - -from AutoSklearn.autosklearn import AutoSklearnClassifier - -class TestAllCombinations(unittest.TestCase): - def get_iris(self): - iris = sklearn.datasets.load_iris() - X = iris.data - Y = iris.target - rs = np.random.RandomState(42) - indices = np.arange(X.shape[0]) - rs.shuffle(indices) - X = X[indices] - Y = Y[indices] - X_train = X[:100] - Y_train = Y[:100] - X_test = X[100:] - Y_test = Y[100:] - return X_train, Y_train, X_test, Y_test - - def test_all_combinations(self): - # TODO: do the combination testing on the basis of one component - # TODO: automate the testing, so far it is enumerated by hand - parameter_combinations = list() - - libsvm_svc = [] - libsvm_svc_C_values = range(-5, 15 + 1) - libsvm_svc_gamma_values = range(-15, 3 + 1) - for C, gamma in itertools.product(libsvm_svc_C_values, libsvm_svc_gamma_values): - libsvm_svc.append({"libsvm_svc:LOG2_C": C, - "libsvm_svc:LOG2_gamma": gamma, - "classifier": "libsvm_svc"}) - print "Parameter configurations LibSVM-SVC", len(libsvm_svc) - - liblinear = [] - liblinear_C_values = range(-5, 15 + 1) - for C in liblinear_C_values: - for penalty_and_loss in [{"penalty": "l1", "loss": "l2"}, - {"penalty": "l2", "loss": "l1"}, - {"penalty": "l2", "loss": "l2"}]: - liblinear.append({"liblinear:LOG2_C": C, - "liblinear:penalty": penalty_and_loss["penalty"], - "liblinear:loss": penalty_and_loss["loss"], - "classifier": "liblinear"}) - print "Parameter configurations LibLinear", len(liblinear) - - random_forest = [] - random_forest_n_estimators = range(10, 100 + 1, 10) - # This makes things too expensive - # random_forst_min_samples_leaf = [1, 2, 4, 7, 10, 15, 20] - random_forst_min_splits = [1, 2, 4, 7, 10] - random_forest_max_features = np.linspace(0.01, 1.0, 8) - random_forest_max_features = itertools.chain( - random_forest_max_features, ["sqrt", "log2"]) - random_forest_criterion = ["gini", "entropy"] - # random_forest_bootstrap = [True, False] - - #for n_est, min_leaf, min_splits, max_features, criterion, bootstrap in \ - for n_est, min_splits, max_features, criterion in \ - itertools.product(random_forest_n_estimators, - #random_forst_min_samples_leaf, - random_forst_min_splits, - random_forest_max_features, - random_forest_criterion): - #random_forest_bootstrap) - random_forest.append(({"random_forest:n_estimators": n_est, - "random_forest:criterion": criterion, - "random_forest:max_features": max_features, - "random_forest:min_samples_split": min_splits, - #"random_forest:min_samples_leaf": min_leaf, - #"random_forest:bootstrap": bootstrap, - "classifier": "random_forest"})) - print "Parameter configurations RF", len(random_forest) - - pca = [] - pca_n_components = np.linspace(0.60, 1.0, 10) - # pca_whiten = [True, False] - #for n_components, whiten in itertools.product(pca_n_components): - #pca_whiten): - for n_components in pca_n_components: - pca.append({"pca:keep_variance": n_components, - #"pca:whiten": whiten, - "preprocessing": "pca"}) - print "Parameter configurations PCA", len(pca) - - classifiers = [liblinear, libsvm_svc, random_forest] - preprocessors = [pca, [{"preprocessing": None}]] - - for classifier, preprocessor in itertools.product(classifiers, - preprocessors): - print classifier[0]["classifier"], preprocessor[0]["preprocessing"] - for classifier_params, preprocessor_params in itertools.product( - classifier, preprocessor): - params = {} - params.update(classifier_params) - params.update(preprocessor_params) - parameter_combinations.append(params) - - starttime = time.time() - print len(parameter_combinations) - for i, parameter_combination in enumerate(parameter_combinations): - auto = AutoSklearnClassifier(parameters=parameter_combination) - X_train, Y_train, X_test, Y_test = self.get_iris() - try: - auto = auto.fit(X_train, Y_train) - except Exception as e: - print parameter_combination - print (parameter_combination['random_forest:max_features'] * X_train.shape[1]) - raise e - predictions = auto.predict(X_test) - accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - - if i % 1000 == 0 and i != 0: - print "Iteration", i - print (time.time() - starttime) * 1000 / i - - print "Finished, took", time.time() - starttime - diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 50f4a2fdb7..6b747ba0c4 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -16,26 +16,12 @@ from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm import AutoSklearn.components.classification as classification_components import AutoSklearn.components.preprocessing as preprocessing_components -from AutoSklearn.util import NoModelException +from AutoSklearn.util import get_iris class TestAutoSKlearnClassifier(unittest.TestCase): # TODO: test for both possible ways to initialize AutoSklearn # parameters and other... - def get_iris(self): - iris = sklearn.datasets.load_iris() - X = iris.data - Y = iris.target - rs = np.random.RandomState(42) - indices = np.arange(X.shape[0]) - rs.shuffle(indices) - X = X[indices] - Y = Y[indices] - X_train = X[:100] - Y_train = Y[:100] - X_test = X[100:] - Y_test = Y[100:] - return X_train, Y_train, X_test, Y_test def test_find_classifiers(self): classifiers = classification_components._classifiers @@ -51,29 +37,6 @@ def test_find_preprocessors(self): self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) - def test_init_no_classifier(self): - try: - AutoSklearnClassifier(None, None) - except NoModelException as e: - self.assertEqual(e.__str__(), - '"You called .__init__() without ' - 'specifying a model first."') - - def test_init_unknown_classifier(self): - self.assertRaises(KeyError, AutoSklearnClassifier, - "qufrpdvltromeaiudtroembdtaiubo", None) - - def test_init_unknown_parameter(self): - self.assertRaises(KeyError, AutoSklearnClassifier, - None, None,parameters={"classifier": "liblinear", - "preprocessing": None, - "libsvm_svc:gamma": 0.025}) - - @unittest.skip("test_init_parameters_as_dict_or_as_keywords Not yet Implemented") - def test_init_parameters_as_dict_or_as_keywords(self): - pass - def test_predict_iris(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() hyperparameters = {} @@ -92,7 +55,7 @@ def test_predict_iris(self): config = Configuration(cs, hyperparameters=hyperparameters) auto = AutoSklearnClassifier(config) - X_train, Y_train, X_test, Y_test = self.get_iris() + X_train, Y_train, X_test, Y_test = get_iris() auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) @@ -123,7 +86,7 @@ def test_fit_with_preproc(self): config = Configuration(cs, hyperparameters=hyperparameters) auto = AutoSklearnClassifier(config) - X_train, Y_train, X_test, Y_test = self.get_iris() + X_train, Y_train, X_test, Y_test = get_iris() auto = auto.fit(X_train, Y_train) self.assertIsInstance(auto, AutoSklearnClassifier) self.assertIsInstance(auto._preprocessor, AutoSklearnPreprocessingAlgorithm) From 68bb543a1a8bf3f606f8e4fa34fb7d0c8a00d1ee Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 11 Dec 2014 10:09:02 +0100 Subject: [PATCH 018/352] Small changes to the component interfaces --- AutoSklearn/components/classification_base.py | 16 ++++++++++++---- AutoSklearn/components/preprocessor_base.py | 12 ++++++++++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 2df1b7c89e..0e7468939f 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -11,14 +11,22 @@ def __init__(self): def get_properties(self): """Get the properties of the underlying algorithm. These are: - * Can the algorithm handle missing values + * Can the algorithm handle missing values? (handles_missing_values : {True, False}) - * Can the algorithm handle nominal features + * Can the algorithm handle nominal features? (handles_nominal_features : {True, False}) - * Can the algorithm handle numerical features + * Can the algorithm handle numerical features? (handles_numerical_features : {True, False}) - * Can the algorithm handle multiclass-classification problems + * Can the algorithm handle multiclass-classification problems? (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) Returns ------- diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 851c749265..f788568fe6 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -16,10 +16,18 @@ def get_properties(self): (handles_nominal_features : {True, False}) * Can the algorithm handle numerical features (handles_numerical_features : {True, False}) - * Can the algorithm handle multiclass-classification problems - (handles_multiclass : {True, False}) * Can preprocess classification data (handles_classification_data : {True, False} + * Can the algorithm handle multiclass-classification problems + (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) Returns ------- From f34b186d5265557aa7a0e9e658cd1788550fabad Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 11 Dec 2014 10:09:50 +0100 Subject: [PATCH 019/352] Add a list of finished implementations --- AutoSklearn/util.py | 6 ++---- misc/classifiers.csv | 43 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 4 deletions(-) create mode 100644 misc/classifiers.csv diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index f37ff66ea6..30607361dc 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -8,11 +8,9 @@ import sklearn.base import sklearn.datasets -from .autosklearn import AutoSklearnClassifier - def find_sklearn_classifiers(): - classifiers = [] + classifiers = set() all_subdirectories = [] sklearn_path = sklearn.__path__[0] for root, dirs, files in os.walk(sklearn_path): @@ -37,7 +35,7 @@ def find_sklearn_classifiers(): issubclass(obj, sklearn.base.ClassifierMixin): classifier = obj print member_name, obj - classifiers.append(classifier) + classifiers.add(classifier) print classifiers diff --git a/misc/classifiers.csv b/misc/classifiers.csv new file mode 100644 index 0000000000..347a5fa9d6 --- /dev/null +++ b/misc/classifiers.csv @@ -0,0 +1,43 @@ +Name,class,added, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +RandomForestClassifier,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +SVC,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +LinearSVC,, +,, +,, +,, From 532acbc0a2c75bfe1f35fa5a667c2c1c650792b3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 11 Dec 2014 14:26:41 +0100 Subject: [PATCH 020/352] Add all hyperparameters of the three models already included --- AutoSklearn/autosklearn.py | 34 ++--- .../components/classification/liblinear.py | 91 +++++++++----- .../components/classification/libsvm_svc.py | 116 +++++++++++++----- .../classification/random_forest.py | 67 +++++----- AutoSklearn/components/classification_base.py | 21 +++- AutoSklearn/components/preprocessing/pca.py | 45 ++++--- AutoSklearn/components/preprocessor_base.py | 18 ++- misc/classifiers.csv | 6 +- setup.py | 2 +- tests/test_autosklearn.py | 59 --------- 10 files changed, 266 insertions(+), 193 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index c220ef3cfa..05f4530501 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -1,5 +1,3 @@ -from . import __version__ - import numpy as np from numpy import float64 @@ -258,16 +256,20 @@ def get_hyperparameter_search_space(): get_hyperparameter_search_space().get_hyperparameters(): parameter.name = "%s:%s" % (name, parameter.name) cs.add_hyperparameter(parameter) - condition = EqualsCondition(parameter, classifier, name) - cs.add_condition(condition) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if cs.get_parents_of(parameter): + condition = EqualsCondition(parameter, classifier, name) + cs.add_condition(condition) for condition in available_classifiers[name]. \ get_hyperparameter_search_space().get_conditions(): - dlcs = condition.get_descendent_literal_clauses() + dlcs = condition.get_descendant_literal_conditions() for dlc in dlcs: - if not dlc.hyperparameter.name.startswith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) cs.add_condition(condition) for forbidden_clause in available_classifiers[name]. \ @@ -288,16 +290,20 @@ def get_hyperparameter_search_space(): get_hyperparameter_search_space().get_hyperparameters(): parameter.name = "%s:%s" % (name, parameter.name) cs.add_hyperparameter(parameter) - condition = EqualsCondition(parameter, preprocessor, name) - cs.add_condition(condition) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if cs.get_parents_of(parameter): + condition = EqualsCondition(parameter, preprocessor, name) + cs.add_condition(condition) for condition in available_preprocessors[name]. \ get_hyperparameter_search_space().get_conditions(): - dlcs = condition.get_descendent_literal_clauses() + dlcs = condition.get_descendent_literal_conditions() for dlc in dlcs: - if not dlc.hyperparameter.startwith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) cs.add_condition(condition) for forbidden_clause in available_preprocessors[name]. \ diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index b5c6d400a8..3deff50d75 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -2,7 +2,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, Constant + CategoricalHyperparameter, Constant, UnParametrizedHyperparameter from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction @@ -11,29 +11,39 @@ class LibLinear_SVC(AutoSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside # TODO: maybe add dual and crammer-singer? - def __init__(self, penalty, loss, C, dual, random_state=None,): + def __init__(self, penalty, loss, dual, tol, C, class_weight, + random_state=None): self.penalty = penalty self.loss = loss - self.C = C self.dual = dual + self.tol = tol + self.C = C + self.class_weight = class_weight self.random_state = random_state self.estimator = None def fit(self, X, Y): - #if self.LOG2_C is not None: - # self.LOG2_C = float(self.LOG2_C) - # self.C = 2 ** self.LOG2_C + self.C = float(self.C) + self.tol = float(self.tol) - if self.dual == "__False__": + if self.dual == "False": self.dual = False - elif self.dual == "__True__": + elif self.dual == "True": self.dual = True + else: + raise ValueError("Parameter dual '%s' not in ['True', 'False']" % + (self.dual)) + + if self.class_weight == "None": + self.class_weight = None - self.C = float(self.C) self.estimator = sklearn.svm.LinearSVC(penalty=self.penalty, - loss=self.loss, C=self.C, - random_state=self.random_state, - dual=False) + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + random_state=self.random_state) return self.estimator.fit(X, Y) def predict(self, X): @@ -41,38 +51,59 @@ def predict(self, X): raise NotImplementedError() return self.estimator.predict(X) - def handles_missing_values(self): - # TODO: should be able to handle sparse data itself... - return False - - def handles_nominal_features(self): - return False - - def handles_numeric_features(self): - return True - - def handles_non_binary_classes(self): - # TODO: describe whether by OneVsOne or OneVsTheRest - return True + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.decision_function(X) @staticmethod def get_meta_information(): return {'shortname': 'Liblinear-SVC', - 'name': 'Liblinear Support Vector Classification'} + 'name': 'Liblinear Support Vector Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': False, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + # TODO find out what is best used here! + 'preferred_dtype' : None} @staticmethod def get_hyperparameter_search_space(): penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], default="l2") loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") + dual = Constant("dual", "False") + # This is set ad-how + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default=1.0) - dual = Constant("dual", "False") + multi_class = UnParametrizedHyperparameter("multi_class", "ovr") + # These are set ad-hoc + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + intercept_scaling = UnParametrizedHyperparameter("intercept_scaling", 1) + # This does not allow for other resampling methods! + class_weight = CategoricalHyperparameter("class_weight", + ["None", "auto"], + default="None") cs = ConfigurationSpace() cs.add_hyperparameter(penalty) cs.add_hyperparameter(loss) - cs.add_hyperparameter(C) cs.add_hyperparameter(dual) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(C) + cs.add_hyperparameter(multi_class) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(intercept_scaling) + cs.add_hyperparameter(class_weight) penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(penalty, "l1"), ForbiddenEqualsClause(loss, "l1") @@ -86,9 +117,5 @@ def get_hyperparameter_search_space(): cs.add_forbidden_clause(constant_penalty_and_loss) return cs - @staticmethod - def get_all_accepted_hyperparameter_names(): - return (["LOG2_C", "C", "penalty", "loss"]) - def __str__(self): return "AutoSklearn Liblinear Classifier" diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 79bb3d35e1..791a281a41 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -1,29 +1,54 @@ import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter,\ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter from ..classification_base import AutoSklearnClassificationAlgorithm class LibSVM_SVC(AutoSklearnClassificationAlgorithm): - # TODO: maybe ad shrinking to the parameters? - def __init__(self, C, gamma, random_state=None): + def __init__(self, C, kernel, degree, gamma, coef0, shrinking, tol, + class_weight, max_iter, random_state=None): self.C = C + self.kernel = kernel + self.degree = degree self.gamma = gamma + self.coef0 = coef0 + self.shrinking = shrinking + self.tol = tol + self.class_weight = class_weight + self.max_iter = max_iter self.random_state = random_state self.estimator = None def fit(self, X, Y): - # if self.LOG2_C is not None: - # self.LOG2_C = float(self.LOG2_C) - # self.C = 2 ** self.LOG2_C - # if self.LOG2_gamma is not None: - # self.LOG2_gamma = float(self.LOG2_gamma) - # self.gamma = 2 ** self.LOG2_gamma - self.C = float(self.C) + self.degree = int(self.degree) self.gamma = float(self.gamma) - self.estimator = sklearn.svm.SVC(C=self.C, gamma=self.gamma, + self.coef0 = float(self.coef0) + self.tol = float(self.tol) + self.max_iter = float(self.max_iter) + + try: + self.shrinking = bool(self.shrinking) + except TypeError as e: + raise TypeError("Value %s not allowed for hyperparameter " + "shrinking" % str(self.shrinking)) + + if self.class_weight == "None": + self.class_weight = None + + self.estimator = sklearn.svm.SVC(C=self.C, + kernel=self.kernel, + degree=self.degree, + gamma=self.gamma, + coef0=self.coef0, + shrinking=self.shrinking, + tol=self.tol, + class_weight=self.class_weight, + max_iter=self.max_iter, random_state=self.random_state, cache_size=2000) return self.estimator.fit(X, Y) @@ -33,39 +58,72 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def handles_missing_values(self): - # TODO: should be able to handle sparse data itself... - return False - - def handles_nominal_features(self): - return False - - def handles_numeric_features(self): - return True - - def handles_non_binary_classes(self): - # TODO: describe whether by OneVsOne or OneVsTheRest - return True + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.decision_function(X) @staticmethod def get_meta_information(): return {'shortname': 'LibSVM-SVC', - 'name': 'LibSVM Support Vector Classification'} + 'name': 'LibSVM Support Vector Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparsity... + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + # TODO find out what is best used here! + # C-continouos and double precision... + 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(): C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default=1.0) + # No linear kernel here, because we have liblinear + kernel = CategoricalHyperparameter("kernel", ["rbf", "poly", "sigmoid"]) + degree = UniformIntegerHyperparameter("degree", 1, 5, default=3) gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, log=True, default=0.1) + # TODO this is totally ad-hoc + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + # probability is no hyperparameter, but an argument to the SVM algo + shrinking = CategoricalHyperparameter("shrinking", ["True", "False"], + default="True") + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + # cache size is not a hyperparameter, but an argument to the program! + class_weight = CategoricalHyperparameter("class_weight", + ["None", "auto"], + default="None") + max_iter = UnParametrizedHyperparameter("max_iter", -1) + cs = ConfigurationSpace() cs.add_hyperparameter(C) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) cs.add_hyperparameter(gamma) - return cs + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(shrinking) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(class_weight) + cs.add_hyperparameter(max_iter) - @staticmethod - def get_all_accepted_hyperparameter_names(): - return (["LOG2_C", "C", "LOG2_gamma", "gamma"]) + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_depends_on_poly = EqualsCondition(coef0, kernel, "poly") + coef0_depends_on_sigmoid = EqualsCondition(coef0, kernel, "sigmoid") + coe0_conditions = OrConjunction(coef0_depends_on_poly, coef0_depends_on_sigmoid) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coe0_conditions) + + return cs def __str__(self): return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 79c199eb4e..a97e593966 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -10,23 +11,25 @@ class RandomForest(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, - bootstrap, random_state=None, n_jobs=1): + bootstrap, max_leaf_nodes, random_state=None, n_jobs=1): self.n_estimators = n_estimators self.criterion = criterion self.max_features = max_features self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf - self.n_jobs = n_jobs self.bootstrap = bootstrap + self.max_leaf_nodes = max_leaf_nodes self.random_state = random_state + self.n_jobs = n_jobs self.estimator = None def fit(self, X, Y): self.n_estimators = int(self.n_estimators) + if self.max_depth == "None": self.max_depth = None - elif self.max_depth is not None: + else: self.max_depth = int(self.max_depth) self.min_samples_split = int(self.min_samples_split) self.min_samples_leaf = int(self.min_samples_leaf) @@ -36,12 +39,19 @@ def fit(self, X, Y): self.bootstrap = True else: self.bootstrap = False + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None self.estimator = sklearn.ensemble.RandomForestClassifier( - n_estimators=self.n_estimators, criterion=self.criterion, - max_depth=self.max_depth, min_samples_split=self - .min_samples_split, min_samples_leaf=self.min_samples_leaf, - max_features=self.max_features, random_state=self.random_state, + n_estimators=self.n_estimators, + criterion=self.criterion, + max_features=self.max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, n_jobs=self.n_jobs) return self.estimator.fit(X, Y) @@ -50,40 +60,43 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def handles_missing_values(self): - return False - - def handles_nominal_features(self): - return False - - def handles_numeric_features(self): - return True - - def handles_non_binary_classes(self): - # TODO: describe whether by OneVsOne or OneVsTheRest - return True + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) @staticmethod def get_meta_information(): return {'shortname': 'RF', - 'name': 'Random Forest'} + 'name': 'Random Forest Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} @staticmethod def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 100, default=10) + "n_estimators", 10, 1000, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( "max_features", 0.01, 1.0, default=1.0) - # Don't know how to parametrize this...RF should rather be - # regularized by the other parameters - # max_depth = hp_uniform("max_depth", lower, upper) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( "min_samples_leaf", 1, 20, default=1) + max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") bootstrap = CategoricalHyperparameter( "bootstrap", ["True", "False"], default="True") cs = ConfigurationSpace() @@ -93,13 +106,9 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(max_depth) cs.add_hyperparameter(min_samples_split) cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(max_leaf_nodes) cs.add_hyperparameter(bootstrap) return cs - @staticmethod - def get_all_accepted_hyperparameter_names(): - return (["n_estimators", "criterion", "max_features", - "min_samples_split", "min_samples_leaf", "bootstrap"]) - def __str__(self): return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 0e7468939f..5bd76bf0cc 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -11,12 +11,18 @@ def __init__(self): def get_properties(self): """Get the properties of the underlying algorithm. These are: + * Short name + * Full name * Can the algorithm handle missing values? (handles_missing_values : {True, False}) * Can the algorithm handle nominal features? (handles_nominal_features : {True, False}) * Can the algorithm handle numerical features? (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} * Can the algorithm handle multiclass-classification problems? (handles_multiclass : {True, False}) * Can the algorithm handle multilabel-classification problems? @@ -77,7 +83,7 @@ def predict(self, X): Returns ------- - C : array, shape = (n_samples,) + array, shape = (n_samples,) Returns the predicted values Notes @@ -87,6 +93,19 @@ def predict(self, X): -learn-objects>`_ for further information.""" raise NotImplementedError() + def scores(self, X): + """Predict confidence scores for samples. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + def get_estimator(self): """Return the underlying estimator object. diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index dcd591e894..72fdf855bc 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -10,16 +10,18 @@ class PCA(AutoSklearnPreprocessingAlgorithm): def __init__(self, keep_variance, whiten, random_state=None): + # TODO document that this implementation does not allow the number of + # components to be specified, but rather the amount of variance to + # be kept! + # TODO it would also be possible to use a heuristic for the number of + # PCA components! self.keep_variance = keep_variance self.whiten = whiten + self.random_state = random_state def fit(self, X, Y): - # TODO: implement that keep_variance can be a percentage (in int) self.preprocessor = sklearn.decomposition.PCA(whiten=self.whiten, copy=True) - # num components is - # selected further down - # the code self.preprocessor.fit(X, Y) sum_ = 0. @@ -38,17 +40,26 @@ def transform(self, X): raise NotImplementedError() return self.preprocessor.transform(X) - def handles_missing_values(self): - return False - - def handles_nominal_features(self): - return False - - def handles_numeric_features(self): - return True - - def handles_non_binary_classes(self): - return True + @staticmethod + def get_meta_information(): + return {'shortname': 'PCA', + 'name': 'Principle Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + # TODO write a test to make sure that the PCA scales data itself + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparsity... + 'prefers_data_normalized': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + # TODO document that we have to be very careful + 'is_deterministic': False, + # TODO find out of this is right! + 'handles_sparse': False, + # TODO find out what is best used here! + 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(): @@ -61,9 +72,5 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(whiten) return cs - @staticmethod - def get_all_accepted_hyperparameter_names(): - return (["keep_variance", "whiten"]) - def __str__(self): return "AutoSklearn Principle Component Analysis preprocessor." diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index f788568fe6..56a03f61ad 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -10,15 +10,21 @@ def __init__(self): def get_properties(self): """Get the properties of the underlying algorithm. These are: - * Can the algorithm handle missing values + * Short name + * Full name + * Can the algorithm handle missing values? (handles_missing_values : {True, False}) - * Can the algorithm handle nominal features + * Can the algorithm handle nominal features? (handles_nominal_features : {True, False}) - * Can the algorithm handle numerical features + * Can the algorithm handle numerical features? (handles_numerical_features : {True, False}) - * Can preprocess classification data - (handles_classification_data : {True, False} - * Can the algorithm handle multiclass-classification problems + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Can preprocess classification data? + (handles_classification : {True, False} + * Can the algorithm handle multiclass-classification problems? (handles_multiclass : {True, False}) * Can the algorithm handle multilabel-classification problems? (handles_multilabel : {True, False} diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 347a5fa9d6..6721a03441 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -11,7 +11,7 @@ Name,class,added, ,, ,, ,, -RandomForestClassifier,, +RandomForestClassifier,,True ,, ,, ,, @@ -25,7 +25,7 @@ RandomForestClassifier,, ,, ,, ,, -SVC,, +SVC,,True ,, ,, ,, @@ -37,7 +37,7 @@ SVC,, ,, ,, ,, -LinearSVC,, +LinearSVC,,True ,, ,, ,, diff --git a/setup.py b/setup.py index 1d54bc14da..e63b26f633 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ "scipy", "scikit-learn==0.15.2", "nose", - "--editable git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev"], + "HPOlibConfigSpace"], test_suite="nose.collector", package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 6b747ba0c4..de10e22f01 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -22,7 +22,6 @@ class TestAutoSKlearnClassifier(unittest.TestCase): # TODO: test for both possible ways to initialize AutoSklearn # parameters and other... - def test_find_classifiers(self): classifiers = classification_components._classifiers self.assertGreaterEqual(len(classifiers), 1) @@ -37,64 +36,6 @@ def test_find_preprocessors(self): self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) - def test_predict_iris(self): - cs = AutoSklearnClassifier.get_hyperparameter_search_space() - hyperparameters = {} - hyperparameters['classifier'] = cs.get_hyperparameter( - "classifier").instantiate("liblinear") - hyperparameters['liblinear:C'] = cs.get_hyperparameter("liblinear:C").\ - instantiate(1.0) - hyperparameters['liblinear:dual'] = cs.get_hyperparameter( - 'liblinear:dual').instantiate('False') - hyperparameters['liblinear:loss'] = cs.get_hyperparameter( - 'liblinear:loss').instantiate('l2') - hyperparameters['liblinear:penalty'] = cs.get_hyperparameter( - 'liblinear:penalty').instantiate('l2') - hyperparameters['preprocessor'] = cs.get_hyperparameter( - 'preprocessor').instantiate('None') - config = Configuration(cs, hyperparameters=hyperparameters) - - auto = AutoSklearnClassifier(config) - X_train, Y_train, X_test, Y_test = get_iris() - auto = auto.fit(X_train, Y_train) - predictions = auto.predict(X_test) - accuracy = sklearn.metrics.accuracy_score(Y_test, predictions) - self.assertIsInstance(auto, AutoSklearnClassifier) - self.assertIsInstance(auto._estimator, AutoSklearnClassificationAlgorithm) - self.assertIsInstance(auto._estimator.estimator, sklearn.svm.LinearSVC) - self.assertAlmostEqual(accuracy, 1.0) - - def test_fit_with_preproc(self): - cs = AutoSklearnClassifier.get_hyperparameter_search_space() - hyperparameters = {} - hyperparameters['classifier'] = cs.get_hyperparameter( - "classifier").instantiate("liblinear") - hyperparameters['liblinear:C'] = cs.get_hyperparameter("liblinear:C"). \ - instantiate(1.0) - hyperparameters['liblinear:dual'] = cs.get_hyperparameter( - 'liblinear:dual').instantiate('False') - hyperparameters['liblinear:loss'] = cs.get_hyperparameter( - 'liblinear:loss').instantiate('l2') - hyperparameters['liblinear:penalty'] = cs.get_hyperparameter( - 'liblinear:penalty').instantiate('l2') - hyperparameters['preprocessor'] = cs.get_hyperparameter( - 'preprocessor').instantiate('pca') - hyperparameters['pca:keep_variance'] = cs.get_hyperparameter( - 'pca:keep_variance').instantiate(1.0) - hyperparameters['pca:whiten'] = cs.get_hyperparameter( - 'pca:whiten').instantiate('False') - config = Configuration(cs, hyperparameters=hyperparameters) - - auto = AutoSklearnClassifier(config) - X_train, Y_train, X_test, Y_test = get_iris() - auto = auto.fit(X_train, Y_train) - self.assertIsInstance(auto, AutoSklearnClassifier) - self.assertIsInstance(auto._preprocessor, AutoSklearnPreprocessingAlgorithm) - self.assertIsInstance(auto._preprocessor.preprocessor, sklearn - .decomposition.PCA) - - prediction = auto.predict(X_test) - def test_get_hyperparameter_search_space(self): config = AutoSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(config, ConfigurationSpace) From dfd23ba635f59f0dbc2c0cdf04445e5f6eda3c66 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 11 Dec 2014 15:18:06 +0100 Subject: [PATCH 021/352] Use the Pipeline class of Scikit-Learn --- AutoSklearn/autosklearn.py | 110 +++++++++++++++++++++++-------------- source/first_steps.rst | 2 +- tests/test_autosklearn.py | 12 ++++ 3 files changed, 82 insertions(+), 42 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 05f4530501..bd552e72b4 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -7,6 +7,7 @@ "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator, ClassifierMixin +from sklearn.pipeline import Pipeline from sklearn.utils import check_random_state from sklearn.utils.validation import safe_asarray, assert_all_finite @@ -73,8 +74,7 @@ def __init__(self, configuration, random_state=None): cs = self.get_hyperparameter_search_space() cs.check_configuration(configuration) - self._estimator = None - self._preprocessor = None + self._pipeline = None if random_state is None: self.random_state = check_random_state(1) @@ -86,10 +86,11 @@ def fit(self, X, Y): Parameters ---------- - X : array-like, shape = (n_samples, n_features) - Training data. All values must be in the range [0,1]. + X : array-like or sparse, shape = (n_samples, n_features) + Training data. The preferred type of the matrix (dense or sparse) + depends on the classifier selected. - y : array-like, shape = [n_samples] + y : array-like Targets Returns @@ -110,27 +111,7 @@ def fit(self, X, Y): # TODO: can this happen now that a configuration is specified at # instantiation time - # Extract Hyperparameters from the configuration object - name = self.configuration["classifier"].value - - parameters = {} - for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name.startswith( - name): - continue - if isinstance(instantiated_hyperparameter, InactiveHyperparameter): - continue - - name_ = instantiated_hyperparameter.hyperparameter.name.\ - split(":")[1] - parameters[name_] = instantiated_hyperparameter.value - - random_state = check_random_state(self.random_state) - self._estimator = components.classification_components._classifiers\ - [name](random_state=random_state, **parameters) - - self._validate_input_X(X) - self._validate_input_Y(Y) + steps = [] preprocessor = self.configuration['preprocessor'] if preprocessor.value != "None": @@ -138,26 +119,50 @@ def fit(self, X, Y): preproc_params = {} for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name\ + if not instantiated_hyperparameter.hyperparameter.name \ .startswith(preproc_name): continue - if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + if isinstance(instantiated_hyperparameter, + InactiveHyperparameter): continue name_ = instantiated_hyperparameter.hyperparameter.name. \ split(":")[1] preproc_params[name_] = instantiated_hyperparameter.value - self._preprocessor = components.preprocessing_components.\ - _preprocessors[preproc_name](random_state=random_state, **preproc_params) - self._preprocessor.fit(X, Y) - X = self._preprocessor.transform(X) + preprocessor_object = components.preprocessing_components. \ + _preprocessors[preproc_name](random_state=self.random_state, + **preproc_params) + steps.append((preproc_name, preprocessor_object)) + + # Extract Hyperparameters from the configuration object + classifier_name = self.configuration["classifier"].value + classifier_parameters = {} + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name.startswith( + classifier_name): + continue + if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + continue + + name_ = instantiated_hyperparameter.hyperparameter.name.\ + split(":")[1] + classifier_parameters[name_] = instantiated_hyperparameter.value + + classifier_object = components.classification_components._classifiers\ + [classifier_name](random_state=self.random_state, + **classifier_parameters) + steps.append((classifier_name, classifier_object)) - self._estimator.fit(X, Y) + self._validate_input_X(X) + self._validate_input_Y(Y) + + self._pipeline = Pipeline(steps) + self._pipeline.fit(X, Y) return self def predict(self, X): - """Predict the classes using the selected model.. + """Predict the classes using the selected model. Parameters ---------- @@ -165,18 +170,35 @@ def predict(self, X): Returns ------- - C : array, shape = (n_samples,) + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) Returns the predicted values""" # TODO check if fit() was called before... - if self._preprocessor is not None: - X = self._preprocessor.transform(X) self._validate_input_X(X) - return self._estimator.predict(X) + return self._pipeline.predict(X) + + def scores(self, X): + """Predict confidence scores for samples using the selected model. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + self._validate_input_X(X) + + Xt = X + for name, transform in self._pipeline.steps[:-1]: + Xt = transform.transform(Xt) + return self._pipeline.steps[-1].scores(Xt) def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to # handle them - if not self._estimator.handles_missing_values() or \ + """ + if not self._pipeline[-1].handles_missing_values() or \ (self._preprocessor is not None and not\ self._preprocessor.handles_missing_value()): assert_all_finite(X) @@ -201,8 +223,11 @@ def _validate_input_X(self, X): if X.dtype not in (np.float64, float64, np.float32, float): raise ValueError("Data type of X matrix is not float but %s!" % X.dtype) + """ + pass def _validate_input_Y(self, Y): + """ Y = np.atleast_1d(Y) if not self._estimator.handles_non_binary_classes() or \ (self._preprocessor is not None and not \ @@ -217,6 +242,8 @@ def _validate_input_Y(self, Y): if len(Y.shape) > 1: raise NotImplementedError() + """ + pass def add_model_class(self, model): """ @@ -244,7 +271,8 @@ def get_hyperparameter_search_space(): components.preprocessing_components._preprocessors classifier = CategoricalHyperparameter( - "classifier", [name for name in available_classifiers]) + "classifier", [name for name in available_classifiers], + default='random_forest') cs.add_hyperparameter(classifier) for name in available_classifiers: # We have to retrieve the configuration space every time because @@ -283,7 +311,7 @@ def get_hyperparameter_search_space(): preprocessor = CategoricalHyperparameter( "preprocessor", [name for name in available_preprocessors] + [ - "None"]) + "None"], default='None') cs.add_hyperparameter(preprocessor) for name in available_preprocessors: for parameter in available_preprocessors[name].\ diff --git a/source/first_steps.rst b/source/first_steps.rst index ab3369f4ae..9c956b0a25 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,5 +24,5 @@ configuration on the iris dataset. >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.88 + 0.90000000000000002 diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index de10e22f01..51fe6c8cf9 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -40,6 +40,18 @@ def test_get_hyperparameter_search_space(self): config = AutoSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(config, ConfigurationSpace) + def test_default_configuration(self): + for i in range(10): + cs = AutoSklearnClassifier.get_hyperparameter_search_space() + default = cs.get_default_configuration() + print default + X_train, Y_train, X_test, Y_test = get_iris() + auto = AutoSklearnClassifier(default) + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + self.assertAlmostEqual(0.94, + sklearn.metrics.accuracy_score(predictions, Y_test)) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() From 170f05c6b618602f2984cd8910ec45a3d611cee5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 12 Dec 2014 10:23:01 +0100 Subject: [PATCH 022/352] Fix error in score calculation in the pipeline --- AutoSklearn/autosklearn.py | 2 +- tests/test_autosklearn.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index bd552e72b4..62fce659f3 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -192,7 +192,7 @@ def scores(self, X): Xt = X for name, transform in self._pipeline.steps[:-1]: Xt = transform.transform(Xt) - return self._pipeline.steps[-1].scores(Xt) + return self._pipeline.steps[-1][-1].scores(Xt) def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 51fe6c8cf9..2809504a07 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -51,6 +51,8 @@ def test_default_configuration(self): predictions = auto.predict(X_test) self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, Y_test)) + scores = auto.scores(X_test) + self.assertTrue((scores[3] == [0., 0.2, 0.8]).all()) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From c990ac06aafcb8ee6fae7d013e4eb3091012facc Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 12 Dec 2014 15:45:50 +0100 Subject: [PATCH 023/352] Add scaling and imputation to the pipeline, fix minor issues --- AutoSklearn/autosklearn.py | 56 +++++++++--------- .../components/classification/liblinear.py | 23 +++++--- .../components/classification/libsvm_svc.py | 4 +- .../classification/random_forest.py | 2 +- .../components/preprocessing/imputation.py | 52 +++++++++++++++++ .../components/preprocessing/rescaling.py | 57 +++++++++++++++++++ tests/components/__init__.py | 1 + tests/components/classification/__init__.py | 1 + .../{liblinear.py => test_liblinear.py} | 0 .../{libsvm_svc.py => test_libsvm_svc.py} | 0 ...random_forest.py => test_random_forest.py} | 0 tests/components/preprocessing/__init__.py | 1 + .../preprocessing/test_imputation.py | 17 ++++++ .../preprocessing/{pca.py => test_pca.py} | 0 tests/test_autosklearn.py | 3 +- tests/{doctests.py => test_doctests.py} | 0 16 files changed, 177 insertions(+), 40 deletions(-) create mode 100644 AutoSklearn/components/preprocessing/imputation.py create mode 100644 AutoSklearn/components/preprocessing/rescaling.py create mode 100644 tests/components/__init__.py create mode 100644 tests/components/classification/__init__.py rename tests/components/classification/{liblinear.py => test_liblinear.py} (100%) rename tests/components/classification/{libsvm_svc.py => test_libsvm_svc.py} (100%) rename tests/components/classification/{random_forest.py => test_random_forest.py} (100%) create mode 100644 tests/components/preprocessing/__init__.py create mode 100644 tests/components/preprocessing/test_imputation.py rename tests/components/preprocessing/{pca.py => test_pca.py} (100%) rename tests/{doctests.py => test_doctests.py} (100%) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 62fce659f3..f61afc3774 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -113,27 +113,29 @@ def fit(self, X, Y): steps = [] - preprocessor = self.configuration['preprocessor'] - if preprocessor.value != "None": - preproc_name = preprocessor.value - preproc_params = {} - - for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name \ - .startswith(preproc_name): - continue - if isinstance(instantiated_hyperparameter, - InactiveHyperparameter): - continue - - name_ = instantiated_hyperparameter.hyperparameter.name. \ - split(":")[1] - preproc_params[name_] = instantiated_hyperparameter.value - - preprocessor_object = components.preprocessing_components. \ - _preprocessors[preproc_name](random_state=self.random_state, - **preproc_params) - steps.append((preproc_name, preprocessor_object)) + preprocessors_names = ["imputation", "rescaling", + self.configuration['preprocessor'].value] + + for preproc_name in preprocessors_names: + if preproc_name != "None": + preproc_params = {} + + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name \ + .startswith(preproc_name): + continue + if isinstance(instantiated_hyperparameter, + InactiveHyperparameter): + continue + + name_ = instantiated_hyperparameter.hyperparameter.name. \ + split(":")[1] + preproc_params[name_] = instantiated_hyperparameter.value + + preprocessor_object = components.preprocessing_components. \ + _preprocessors[preproc_name](random_state=self.random_state, + **preproc_params) + steps.append((preproc_name, preprocessor_object)) # Extract Hyperparameters from the configuration object classifier_name = self.configuration["classifier"].value @@ -263,6 +265,8 @@ def get_hyperparameter_search_space(): The configuration space describing the AutoSklearnClassifier. """ + always_active = ["imputation", "rescaling"] + cs = ConfigurationSpace() available_classifiers = \ @@ -270,8 +274,8 @@ def get_hyperparameter_search_space(): available_preprocessors = \ components.preprocessing_components._preprocessors - classifier = CategoricalHyperparameter( - "classifier", [name for name in available_classifiers], + classifier = CategoricalHyperparameter("classifier", + [name for name in available_classifiers if name not in always_active], default='random_forest') cs.add_hyperparameter(classifier) for name in available_classifiers: @@ -309,9 +313,9 @@ def get_hyperparameter_search_space(): dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - preprocessor = CategoricalHyperparameter( - "preprocessor", [name for name in available_preprocessors] + [ - "None"], default='None') + preprocessor = CategoricalHyperparameter("preprocessor", + [name for name in available_preprocessors if name not in always_active] + + ["None"], default='None') cs.add_hyperparameter(preprocessor) for name in available_preprocessors: for parameter in available_preprocessors[name].\ diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 3deff50d75..af87c32782 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -11,13 +11,17 @@ class LibLinear_SVC(AutoSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside # TODO: maybe add dual and crammer-singer? - def __init__(self, penalty, loss, dual, tol, C, class_weight, - random_state=None): + def __init__(self, penalty, loss, dual, tol, C, multi_class, + fit_intercept, intercept_scaling, class_weight, + random_state=None): self.penalty = penalty self.loss = loss self.dual = dual self.tol = tol self.C = C + self.multi_class = multi_class + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling self.class_weight = class_weight self.random_state = random_state self.estimator = None @@ -26,13 +30,9 @@ def fit(self, X, Y): self.C = float(self.C) self.tol = float(self.tol) - if self.dual == "False": - self.dual = False - elif self.dual == "True": - self.dual = True - else: - raise ValueError("Parameter dual '%s' not in ['True', 'False']" % - (self.dual)) + self.dual = bool(self.dual) + self.fit_intercept = bool(self.fit_intercept) + self.intercept_scaling = float(self.intercept_scaling) if self.class_weight == "None": self.class_weight = None @@ -113,8 +113,13 @@ def get_hyperparameter_search_space(): ForbiddenEqualsClause(penalty, "l2"), ForbiddenEqualsClause(loss, "l1") ) + penalty_and_dual = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(penalty, "l1") + ) cs.add_forbidden_clause(penalty_and_loss) cs.add_forbidden_clause(constant_penalty_and_loss) + cs.add_forbidden_clause(penalty_and_dual) return cs def __str__(self): diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 791a281a41..b63afa7ee2 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -9,8 +9,8 @@ from ..classification_base import AutoSklearnClassificationAlgorithm class LibSVM_SVC(AutoSklearnClassificationAlgorithm): - def __init__(self, C, kernel, degree, gamma, coef0, shrinking, tol, - class_weight, max_iter, random_state=None): + def __init__(self, C, kernel, shrinking, tol, class_weight, max_iter, + degree=3, gamma=0.1, coef0=0, random_state=None): self.C = C self.kernel = kernel self.degree = degree diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index a97e593966..d123ac15af 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -86,7 +86,7 @@ def get_meta_information(): @staticmethod def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 1000, default=10) + "n_estimators", 10, 11, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py new file mode 100644 index 0000000000..f38eb01510 --- /dev/null +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -0,0 +1,52 @@ +import sklearn.preprocessing + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + + +class Imputation(AutoSklearnPreprocessingAlgorithm): + def __init__(self, strategy, random_state=None): + # TODO pay attention to the cases when a copy is made (CSR matrices) + self.strategy = strategy + + def fit(self, X, Y): + self.preprocessor = sklearn.preprocessing.Imputer( + strategy=self.strategy, copy=False) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'Imputation', + 'name': 'Imputation', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(): + # TODO add replace by zero! + strategy = CategoricalHyperparameter( + "strategy", ["mean", "median", "most_frequent"], default="mean") + cs = ConfigurationSpace() + cs.add_hyperparameter(strategy) + return cs + + def __str__(self): + return "AutoSklearn Imputer to replace missing values." diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py new file mode 100644 index 0000000000..7c23861e07 --- /dev/null +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -0,0 +1,57 @@ +import sklearn.preprocessing + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + + +class Rescaling(AutoSklearnPreprocessingAlgorithm): + def __init__(self, strategy, random_state=None): + # TODO pay attention to the cases when a copy is made + self.strategy = strategy + + def fit(self, X, Y): + if self.strategy == "min/max": + self.preprocessor = sklearn.preprocessing.MinMaxScaler(copy=False) + elif self.strategy == "standard": + self.preprocessor = sklearn.preprocessing.StandardScaler(copy=False) + else: + raise ValueError(self.strategy) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'Rescaling', + 'name': 'Rescaling', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + # Add something here... + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(): + # TODO add replace by zero! + strategy = CategoricalHyperparameter( + "strategy", ["min/max", "standard"], default="min/max") + cs = ConfigurationSpace() + cs.add_hyperparameter(strategy) + return cs + + def __str__(self): + return "AutoSklearn Imputer to replace missing values." diff --git a/tests/components/__init__.py b/tests/components/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/tests/components/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/tests/components/classification/__init__.py b/tests/components/classification/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/tests/components/classification/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/tests/components/classification/liblinear.py b/tests/components/classification/test_liblinear.py similarity index 100% rename from tests/components/classification/liblinear.py rename to tests/components/classification/test_liblinear.py diff --git a/tests/components/classification/libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py similarity index 100% rename from tests/components/classification/libsvm_svc.py rename to tests/components/classification/test_libsvm_svc.py diff --git a/tests/components/classification/random_forest.py b/tests/components/classification/test_random_forest.py similarity index 100% rename from tests/components/classification/random_forest.py rename to tests/components/classification/test_random_forest.py diff --git a/tests/components/preprocessing/__init__.py b/tests/components/preprocessing/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/tests/components/preprocessing/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py new file mode 100644 index 0000000000..234585af6b --- /dev/null +++ b/tests/components/preprocessing/test_imputation.py @@ -0,0 +1,17 @@ +import unittest + +from AutoSklearn.components.static_preprocessing.imputation import Imputation +from AutoSklearn.util import test_preprocessing_with_iris + + +class LibLinearComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = test_preprocessing_with_iris(Imputation) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) \ No newline at end of file diff --git a/tests/components/preprocessing/pca.py b/tests/components/preprocessing/test_pca.py similarity index 100% rename from tests/components/preprocessing/pca.py rename to tests/components/preprocessing/test_pca.py diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 2809504a07..1e9deed6d1 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -41,10 +41,9 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(config, ConfigurationSpace) def test_default_configuration(self): - for i in range(10): + for i in range(2): cs = AutoSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() - print default X_train, Y_train, X_test, Y_test = get_iris() auto = AutoSklearnClassifier(default) auto = auto.fit(X_train, Y_train) diff --git a/tests/doctests.py b/tests/test_doctests.py similarity index 100% rename from tests/doctests.py rename to tests/test_doctests.py From 4ff09f6b5d2e720a9fc44d222e5e8b8c0c435eab Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 12:53:47 +0100 Subject: [PATCH 024/352] Fix version in README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index f264808c2f..acdb2ac0b9 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ Installation with `pip` - pip install numpy scipy scikit-learn==0.15.1 numpydoc sphinx + pip install numpy scipy scikit-learn==0.15.2 numpydoc sphinx pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev pip install --editable git+https://bitbucket.org/mfeurer/autosklearn#egg=AutoSklearn From ec2b12119f668ac6c3c7ff3fc7095a30c4c49348 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 12:58:51 +0100 Subject: [PATCH 025/352] Add a OneHotEncoder which works on non-NaN arrays --- AutoSklearn/implementations/OneHotEncoder.py | 281 +++++++++++++++++++ tests/implementations/__init__.py | 1 + tests/implementations/test_OneHotEncoder.py | 32 +++ 3 files changed, 314 insertions(+) create mode 100644 AutoSklearn/implementations/OneHotEncoder.py create mode 100644 tests/implementations/__init__.py create mode 100644 tests/implementations/test_OneHotEncoder.py diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py new file mode 100644 index 0000000000..5834b0f603 --- /dev/null +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -0,0 +1,281 @@ +import numbers + +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.externals import six +from sklearn.utils import check_arrays +from sklearn.utils import atleast2d_or_csc, safe_asarray + +zip = six.moves.zip +map = six.moves.map +range = six.moves.range + + +def _transform_selected(X, transform, selected="all", copy=True): + """Apply a transform function to portion of selected features + + Parameters + ---------- + X : array-like or sparse matrix, shape=(n_samples, n_features) + Dense array or sparse matrix. + + transform : callable + A callable transform(X) -> X_transformed + + copy : boolean, optional + Copy X even if it could be avoided. + + selected: "all" or array of indices or mask + Specify which features to apply the transform to. + + Returns + ------- + X : array or sparse matrix, shape=(n_samples, n_features_new) + """ + if selected == "all": + X = safe_asarray(X, copy=copy, force_all_finite=False) + return transform(X) + + X = atleast2d_or_csc(X, copy=copy, force_all_finite=False) + + if len(selected) == 0: + return X + + n_features = X.shape[1] + ind = np.arange(n_features) + sel = np.zeros(n_features, dtype=bool) + sel[np.asarray(selected)] = True + not_sel = np.logical_not(sel) + n_selected = np.sum(sel) + + if n_selected == 0: + # No features selected. + return X + elif n_selected == n_features: + # All features selected. + return transform(X) + else: + X_sel = transform(X[:, ind[sel]]) + X_not_sel = X[:, ind[not_sel]] + + if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): + return sparse.hstack((X_sel, X_not_sel)) + else: + return np.hstack((X_sel, X_not_sel)) + + +class OneHotEncoder(BaseEstimator, TransformerMixin): + """Encode categorical integer features using a one-hot aka one-of-K scheme. + + The input to this transformer should be a matrix of integers, denoting + the values taken on by categorical (discrete) features. The output will be + a sparse matrix were each column corresponds to one possible value of one + feature. It is assumed that input features take on values in the range + [0, n_values). + + This encoding is needed for feeding categorical data to many scikit-learn + estimators, notably linear models and SVMs with the standard kernels. + + Parameters + ---------- + n_values : 'auto', int or array of ints + Number of values per feature. + + - 'auto' : determine value range from training data. + - int : maximum value for all features. + - array : maximum value per feature. + + categorical_features: "all" or array of indices or mask + Specify what features are treated as categorical. + + - 'all' (default): All features are treated as categorical. + - array of indices: Array of categorical feature indices. + - mask: Array of length n_features and with dtype=bool. + + Non-categorical features are always stacked to the right of the matrix. + + dtype : number type, default=np.float + Desired dtype of output. + + sparse : boolean, default=True + Will return sparse matrix if set True else will return an array. + + Attributes + ---------- + `active_features_` : array + Indices for active features, meaning values that actually occur + in the training set. Only available when n_values is ``'auto'``. + + `feature_indices_` : array of shape (n_features,) + Indices to feature ranges. + Feature ``i`` in the original data is mapped to features + from ``feature_indices_[i]`` to ``feature_indices_[i+1]`` + (and then potentially masked by `active_features_` afterwards) + + `n_values_` : array of shape (n_features,) + Maximum number of values per feature. + + Examples + -------- + Given a dataset with three features and two samples, we let the encoder + find the maximum value per feature and transform the data to a binary + one-hot encoding. + + >>> from sklearn.preprocessing import OneHotEncoder + >>> enc = OneHotEncoder() + >>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \ +[1, 0, 2]]) # doctest: +ELLIPSIS + OneHotEncoder(categorical_features='all', dtype=<... 'float'>, + n_values='auto', sparse=True) + >>> enc.n_values_ + array([2, 3, 4]) + >>> enc.feature_indices_ + array([0, 2, 5, 9]) + >>> enc.transform([[0, 1, 1]]).toarray() + array([[ 1., 0., 0., 1., 0., 0., 1., 0., 0.]]) + + See also + -------- + sklearn.feature_extraction.DictVectorizer : performs a one-hot encoding of + dictionary items (also handles string-valued features). + sklearn.feature_extraction.FeatureHasher : performs an approximate one-hot + encoding of dictionary items or strings. + """ + + def __init__(self, n_values="auto", categorical_features="all", + dtype=np.float, sparse=True): + self.n_values = n_values + self.categorical_features = categorical_features + self.dtype = dtype + self.sparse = sparse + + def fit(self, X, y=None): + """Fit OneHotEncoder to X. + + Parameters + ---------- + X : array-like, shape=(n_samples, n_feature) + Input array of type int. + + Returns + ------- + self + """ + self.fit_transform(X) + return self + + def _fit_transform(self, X): + """Assumes X contains only categorical features.""" + n_samples, n_features = X.shape + + uniques = [np.unique(X[:,i], False, True, False) + for i in range(n_features)] + n_values = [0] + column_indices = [] + feature_indices = [] + + for idx, values_ in enumerate(uniques): + unique_elements, inverse = values_ + + feature_indices_idx = dict() + + # Number of unique elements in that column (without np.NaN) + n_uniques = np.sum(np.isfinite(unique_elements)) + n_values.append(n_uniques) + offset = np.sum(n_values[:-1]) + + # Transform the inverse to proper indices, where all np.NaN are + # indexed by a -1. + column_indices_idx = [-1 if index > n_uniques else index + for index in inverse] + feature_indices_idx = {unique: index + offset for index, unique in + enumerate(unique_elements)} + + column_indices_idx = np.array(column_indices_idx) + offset + + column_indices.extend(column_indices_idx) + feature_indices.append(feature_indices_idx) + + row_indices = np.tile(np.arange(n_samples, dtype=np.int32), + n_features) + + self.feature_indices_ = feature_indices + self.n_values_ = n_values + data = np.ones(n_samples * n_features) + out = sparse.coo_matrix((data, (row_indices, column_indices)), + shape=(n_samples, np.sum(n_values)), + dtype=self.dtype).tocsr() + + if self.n_values == 'auto': + mask = np.array(out.sum(axis=0)).ravel() != 0 + active_features = np.where(mask)[0] + out = out[:, active_features] + self.active_features_ = active_features + + return out if self.sparse else out.toarray() + + def fit_transform(self, X, y=None): + """Fit OneHotEncoder to X, then transform X. + + Equivalent to self.fit(X).transform(X), but more convenient and more + efficient. See fit for the parameters, transform for the return value. + """ + return _transform_selected(X, self._fit_transform, + self.categorical_features, copy=True) + + def _transform(self, X): + """Assumes X contains only categorical features.""" + X = check_arrays(X, sparse_format='dense', dtype=np.int)[0] + if np.any(X < 0): + raise ValueError("X needs to contain only non-negative integers.") + n_samples, n_features = X.shape + + indices = self.feature_indices_ + if n_features != len(indices): + raise ValueError("X has different shape than during fitting." + " Expected %d, got %d." + % (len(indices), n_features)) + + #if (np.max(X, axis=0) >= self.n_values_).any(): + # raise ValueError("Feature out of bounds. Try setting n_values.") + + #column_indices = (X + indices[:-1]).ravel() + row_indices = np.tile(np.arange(n_samples, dtype=np.int32), + n_features) + + column_indices = [] + max_n_features = 0 + for idx, feature in enumerate(range(n_features)): + # TODO + indices_idx = indices[idx] + column_indices.extend([indices_idx[value] for value in X[:,idx]]) + max_n_features = max(max_n_features, max(column_indices)) + # The highest index we find is zero-based... + max_n_features += 1 + + data = np.ones(n_samples * n_features) + out = sparse.coo_matrix((data, (row_indices, column_indices)), + shape=(n_samples, max_n_features), + dtype=self.dtype).tocsr() + if self.n_values == 'auto': + out = out[:, self.active_features_] + + return out if self.sparse else out.toarray() + + def transform(self, X): + """Transform X using one-hot encoding. + + Parameters + ---------- + X : array-like, shape=(n_samples, n_features) + Input array of type int. + + Returns + ------- + X_out : sparse matrix if sparse=True else a 2-d array, dtype=int + Transformed input. + """ + return _transform_selected(X, self._transform, + self.categorical_features, copy=True) diff --git a/tests/implementations/__init__.py b/tests/implementations/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/tests/implementations/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py new file mode 100644 index 0000000000..0d7f6710b7 --- /dev/null +++ b/tests/implementations/test_OneHotEncoder.py @@ -0,0 +1,32 @@ +import unittest + +import numpy as np + +from AutoSklearn.implementations.OneHotEncoder import OneHotEncoder + +dense1 = [[1, 5, 9], + [1, 3, 9]] +dense1_1h = [[1, 0, 1, 1], + [1, 1, 0, 1]] + +dense2 = [[1, np.NaN, 9], + [np.NaN, 3, 9], + [2, 1, 7]] +dense2_1h = [[1, 0, 0, 0, 0, 1], + [0, 0, 0, 1, 0, 1], + [0, 1, 1, 0, 1, 0]] + + +class OneHotEncoderTest(unittest.TestCase): + def test_dense1(self): + self.fit_then_transform(dense1_1h, dense1) + + def test_dense2(self): + self.fit_then_transform(dense2_1h, dense2) + + def fit_then_transform(self, expected, input): + ohe = OneHotEncoder() + ohe.fit(input) + transformation = ohe.transform(input) + transformation = transformation.todense() + self.assertTrue((expected == transformation).all()) \ No newline at end of file From de074e29f36c33d5a8627f9e8ab92f1e0fd46d82 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 14:38:18 +0100 Subject: [PATCH 026/352] Add support for sparse matrices and np.NaN to the OneHotEncoder --- AutoSklearn/implementations/OneHotEncoder.py | 71 +++++++------------- tests/implementations/test_OneHotEncoder.py | 27 +++++++- 2 files changed, 50 insertions(+), 48 deletions(-) diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index 5834b0f603..e4e2584750 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -61,7 +61,7 @@ def _transform_selected(X, transform, selected="all", copy=True): X_not_sel = X[:, ind[not_sel]] if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): - return sparse.hstack((X_sel, X_not_sel)) + return sparse.hstack((X_sel, X_not_sel)).tocsr() else: return np.hstack((X_sel, X_not_sel)) @@ -80,13 +80,6 @@ class OneHotEncoder(BaseEstimator, TransformerMixin): Parameters ---------- - n_values : 'auto', int or array of ints - Number of values per feature. - - - 'auto' : determine value range from training data. - - int : maximum value for all features. - - array : maximum value per feature. - categorical_features: "all" or array of indices or mask Specify what features are treated as categorical. @@ -144,9 +137,8 @@ class OneHotEncoder(BaseEstimator, TransformerMixin): encoding of dictionary items or strings. """ - def __init__(self, n_values="auto", categorical_features="all", + def __init__(self, categorical_features="all", dtype=np.float, sparse=True): - self.n_values = n_values self.categorical_features = categorical_features self.dtype = dtype self.sparse = sparse @@ -173,47 +165,40 @@ def _fit_transform(self, X): uniques = [np.unique(X[:,i], False, True, False) for i in range(n_features)] n_values = [0] + column_indices = [] + data = [] feature_indices = [] for idx, values_ in enumerate(uniques): unique_elements, inverse = values_ - feature_indices_idx = dict() - # Number of unique elements in that column (without np.NaN) n_uniques = np.sum(np.isfinite(unique_elements)) n_values.append(n_uniques) offset = np.sum(n_values[:-1]) - # Transform the inverse to proper indices, where all np.NaN are - # indexed by a -1. - column_indices_idx = [-1 if index > n_uniques else index + column_indices_idx = [offset if index >= n_uniques + else index + offset for index in inverse] - feature_indices_idx = {unique: index + offset for index, unique in - enumerate(unique_elements)} - - column_indices_idx = np.array(column_indices_idx) + offset + data_idx = [0 if index >= n_uniques else 1 for index in inverse] + feature_indices_idx = {unique: index + offset + for index, unique in enumerate(unique_elements) + if np.isfinite(unique)} column_indices.extend(column_indices_idx) + data.extend(data_idx) feature_indices.append(feature_indices_idx) row_indices = np.tile(np.arange(n_samples, dtype=np.int32), n_features) self.feature_indices_ = feature_indices - self.n_values_ = n_values - data = np.ones(n_samples * n_features) + self.n_values = n_values out = sparse.coo_matrix((data, (row_indices, column_indices)), shape=(n_samples, np.sum(n_values)), dtype=self.dtype).tocsr() - if self.n_values == 'auto': - mask = np.array(out.sum(axis=0)).ravel() != 0 - active_features = np.where(mask)[0] - out = out[:, active_features] - self.active_features_ = active_features - return out if self.sparse else out.toarray() def fit_transform(self, X, y=None): @@ -227,9 +212,7 @@ def fit_transform(self, X, y=None): def _transform(self, X): """Assumes X contains only categorical features.""" - X = check_arrays(X, sparse_format='dense', dtype=np.int)[0] - if np.any(X < 0): - raise ValueError("X needs to contain only non-negative integers.") + X = check_arrays(X, sparse_format='dense', allow_nans=True)[0] n_samples, n_features = X.shape indices = self.feature_indices_ @@ -238,29 +221,27 @@ def _transform(self, X): " Expected %d, got %d." % (len(indices), n_features)) - #if (np.max(X, axis=0) >= self.n_values_).any(): - # raise ValueError("Feature out of bounds. Try setting n_values.") - #column_indices = (X + indices[:-1]).ravel() row_indices = np.tile(np.arange(n_samples, dtype=np.int32), n_features) + data = [] column_indices = [] - max_n_features = 0 + for idx, feature in enumerate(range(n_features)): - # TODO - indices_idx = indices[idx] - column_indices.extend([indices_idx[value] for value in X[:,idx]]) - max_n_features = max(max_n_features, max(column_indices)) - # The highest index we find is zero-based... - max_n_features += 1 - - data = np.ones(n_samples * n_features) + offset = np.sum(self.n_values[:idx+1]) + feature_indices_idx = self.feature_indices_[idx] + column_indices_idx = [feature_indices_idx.get(x, offset) + for x in X[:,idx]] + data_idx = [1 if feature_indices_idx.get(x) is not None else 0 + for x in X[:, idx]] + + column_indices.extend(column_indices_idx) + data.extend(data_idx) + out = sparse.coo_matrix((data, (row_indices, column_indices)), - shape=(n_samples, max_n_features), + shape=(n_samples, np.sum(self.n_values)), dtype=self.dtype).tocsr() - if self.n_values == 'auto': - out = out[:, self.active_features_] return out if self.sparse else out.toarray() diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index 0d7f6710b7..cfa688d26d 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -1,6 +1,7 @@ import unittest import numpy as np +import scipy.sparse from AutoSklearn.implementations.OneHotEncoder import OneHotEncoder @@ -15,18 +16,38 @@ dense2_1h = [[1, 0, 0, 0, 0, 1], [0, 0, 0, 1, 0, 1], [0, 1, 1, 0, 1, 0]] +dense2_partial_1h = [[1., 0., 0., 0., 9.], + [0., 0., 0., 1., 9.], + [0., 1., 1., 0., 7.]] class OneHotEncoderTest(unittest.TestCase): def test_dense1(self): self.fit_then_transform(dense1_1h, dense1) + self.fit_then_transform_dense(dense1_1h, dense1) def test_dense2(self): self.fit_then_transform(dense2_1h, dense2) + self.fit_then_transform_dense(dense2_1h, dense2) - def fit_then_transform(self, expected, input): - ohe = OneHotEncoder() + def test_dense2_with_non_sparse_components(self): + self.fit_then_transform(dense2_partial_1h, dense2, + categorical_features=[True, True, False]) + self.fit_then_transform_dense(dense2_partial_1h, dense2, + categorical_features=[True, True, False]) + + def fit_then_transform(self, expected, input, categorical_features='all'): + ohe = OneHotEncoder(categorical_features=categorical_features) ohe.fit(input) transformation = ohe.transform(input) + self.assertIsInstance(transformation, scipy.sparse.csr_matrix) transformation = transformation.todense() - self.assertTrue((expected == transformation).all()) \ No newline at end of file + self.assertTrue((expected == transformation).all()) + + def fit_then_transform_dense(self, expected, input, categorical_features='all'): + ohe = OneHotEncoder(categorical_features=categorical_features, + sparse=False) + ohe.fit(input) + transformation = ohe.transform(input) + self.assertIsInstance(transformation, np.ndarray) + self.assertTrue((expected == transformation).all()) From 456b398e4294a5b3481b621e9ca7efc75acb9cef Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 15:42:38 +0100 Subject: [PATCH 027/352] Fix tests --- AutoSklearn/autosklearn.py | 2 -- README.md | 1 + source/first_steps.rst | 4 ++-- tests/components/preprocessing/test_imputation.py | 2 +- tests/test_autosklearn.py | 2 +- 5 files changed, 5 insertions(+), 6 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index f61afc3774..9e54a56bbb 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -67,8 +67,6 @@ class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): def __init__(self, configuration, random_state=None): # TODO check sklearn version! - - self.random_state = random_state self.configuration = configuration cs = self.get_hyperparameter_search_space() diff --git a/README.md b/README.md index acdb2ac0b9..2d08db1163 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ git clone https://bitbucket.org/mfeurer/autosklearn.git cd autosklearn python setup.py install + python setup.py test Installation with `pip` diff --git a/source/first_steps.rst b/source/first_steps.rst index 9c956b0a25..7e5229d487 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -18,11 +18,11 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 1) + >>> sampler = RandomSampler(configuration_space, 5) >>> configuration = sampler.sample_configuration() >>> auto = AutoSklearnClassifier(configuration, random_state=1) >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.90000000000000002 + 0.95999999999999996 diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index 234585af6b..e266a6a7ee 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -1,6 +1,6 @@ import unittest -from AutoSklearn.components.static_preprocessing.imputation import Imputation +from AutoSklearn.components.preprocessing.imputation import Imputation from AutoSklearn.util import test_preprocessing_with_iris diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 1e9deed6d1..a4eef08265 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -51,7 +51,7 @@ def test_default_configuration(self): self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.scores(X_test) - self.assertTrue((scores[3] == [0., 0.2, 0.8]).all()) + self.assertTrue((scores[4] == [0.6, 0.4, 0.]).all()) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From 9ebdd6877fa62674fcac60fca4de82a97f7f123d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 16:35:19 +0100 Subject: [PATCH 028/352] Fix errors in tests --- AutoSklearn/util.py | 4 ++-- README.md | 1 + tests/components/classification/test_liblinear.py | 4 ++-- tests/components/classification/test_libsvm_svc.py | 4 ++-- tests/components/classification/test_random_forest.py | 4 ++-- tests/components/preprocessing/test_imputation.py | 4 ++-- tests/components/preprocessing/test_pca.py | 4 ++-- 7 files changed, 13 insertions(+), 12 deletions(-) diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index 30607361dc..70a422a1b0 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -56,7 +56,7 @@ def get_iris(): return X_train, Y_train, X_test, Y_test -def test_classifier_with_iris(Classifier): +def _test_classifier_with_iris(Classifier): X_train, Y_train, X_test, Y_test = get_iris() configuration_space = Classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() @@ -68,7 +68,7 @@ def test_classifier_with_iris(Classifier): return predictions, Y_test -def test_preprocessing_with_iris(Preprocessor): +def _test_preprocessing_with_iris(Preprocessor): X_train, Y_train, X_test, Y_test = get_iris() configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() diff --git a/README.md b/README.md index 2d08db1163..19ac7560ce 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ ### Download and build the documentation ### + pip install scikit-learn==0.15.2 pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev git clone https://bitbucket.org/mfeurer/autosklearn.git cd autosklearn diff --git a/tests/components/classification/test_liblinear.py b/tests/components/classification/test_liblinear.py index 2b38b1d83d..4faf3d0d54 100644 --- a/tests/components/classification/test_liblinear.py +++ b/tests/components/classification/test_liblinear.py @@ -1,11 +1,11 @@ import unittest from AutoSklearn.components.classification.liblinear import LibLinear_SVC -from AutoSklearn.util import test_classifier_with_iris +from AutoSklearn.util import _test_classifier_with_iris class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = test_classifier_with_iris(LibLinear_SVC) + predictions, targets = _test_classifier_with_iris(LibLinear_SVC) self.assertTrue(all(targets == predictions)) \ No newline at end of file diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index 4c84f4de2a..e00bec4ca1 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -1,7 +1,7 @@ import unittest from AutoSklearn.components.classification.libsvm_svc import LibSVM_SVC -from AutoSklearn.util import test_classifier_with_iris +from AutoSklearn.util import _test_classifier_with_iris import sklearn.metrics @@ -9,6 +9,6 @@ class LibSVM_SVCComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = test_classifier_with_iris(LibSVM_SVC) + predictions, targets = _test_classifier_with_iris(LibSVM_SVC) self.assertAlmostEqual(0.96, sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index 2c2ee4d937..e13bdc1114 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -1,7 +1,7 @@ import unittest from AutoSklearn.components.classification.random_forest import RandomForest -from AutoSklearn.util import test_classifier_with_iris +from AutoSklearn.util import _test_classifier_with_iris import sklearn.metrics @@ -9,6 +9,6 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = test_classifier_with_iris(RandomForest) + predictions, targets = _test_classifier_with_iris(RandomForest) self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index e266a6a7ee..d8c79b87b3 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -1,14 +1,14 @@ import unittest from AutoSklearn.components.preprocessing.imputation import Imputation -from AutoSklearn.util import test_preprocessing_with_iris +from AutoSklearn.util import _test_preprocessing_with_iris class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): - transformation, original = test_preprocessing_with_iris(Imputation) + transformation, original = _test_preprocessing_with_iris(Imputation) self.assertEqual(transformation.shape, original.shape) self.assertTrue((transformation == original).all()) transformations.append(transformation) diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index 788a37a952..e0484c7c24 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -1,14 +1,14 @@ import unittest from AutoSklearn.components.preprocessing.pca import PCA -from AutoSklearn.util import test_preprocessing_with_iris +from AutoSklearn.util import _test_preprocessing_with_iris class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): - transformation, original = test_preprocessing_with_iris(PCA) + transformation, original = _test_preprocessing_with_iris(PCA) self.assertEqual(transformation.shape, original.shape) self.assertFalse((transformation == original).all()) transformations.append(transformation) From 612b4949209cb1913f86cbc61c06ccb75b6d2a2d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 15 Dec 2014 16:36:22 +0100 Subject: [PATCH 029/352] Fix config space; forests are set to 11 --- AutoSklearn/components/classification/random_forest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index d123ac15af..a97e593966 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -86,7 +86,7 @@ def get_meta_information(): @staticmethod def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 11, default=10) + "n_estimators", 10, 1000, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( From 4a786c0b064ed0bf1f4a623c65e09e1c30aa5b02 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 11:34:41 +0100 Subject: [PATCH 030/352] Allow test scripts to load different datasets --- AutoSklearn/util.py | 13 +++++----- .../classification/test_liblinear.py | 5 ++-- .../classification/test_libsvm_svc.py | 4 +-- .../classification/test_random_forest.py | 4 +-- .../preprocessing/test_imputation.py | 4 +-- tests/components/preprocessing/test_pca.py | 4 +-- .../components/preprocessing/test_scaling.py | 25 +++++++++++++++++++ tests/test_autosklearn.py | 4 +-- 8 files changed, 45 insertions(+), 18 deletions(-) create mode 100644 tests/components/preprocessing/test_scaling.py diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index 70a422a1b0..e28725126b 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -40,12 +40,13 @@ def find_sklearn_classifiers(): print classifiers -def get_iris(): - iris = sklearn.datasets.load_iris() +def get_dataset(dataset='iris'): + iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data Y = iris.target rs = np.random.RandomState(42) indices = np.arange(X.shape[0]) + train_size = len(indices) / 3. * 2. rs.shuffle(indices) X = X[indices] Y = Y[indices] @@ -56,8 +57,8 @@ def get_iris(): return X_train, Y_train, X_test, Y_test -def _test_classifier_with_iris(Classifier): - X_train, Y_train, X_test, Y_test = get_iris() +def _test_classifier(Classifier, dataset='iris'): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset) configuration_space = Classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() classifier = Classifier(random_state=1, @@ -68,8 +69,8 @@ def _test_classifier_with_iris(Classifier): return predictions, Y_test -def _test_preprocessing_with_iris(Preprocessor): - X_train, Y_train, X_test, Y_test = get_iris() +def _test_preprocessing(Preprocessor, dataset='iris'): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset) configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, diff --git a/tests/components/classification/test_liblinear.py b/tests/components/classification/test_liblinear.py index 4faf3d0d54..3f732412ac 100644 --- a/tests/components/classification/test_liblinear.py +++ b/tests/components/classification/test_liblinear.py @@ -1,11 +1,12 @@ import unittest from AutoSklearn.components.classification.liblinear import LibLinear_SVC -from AutoSklearn.util import _test_classifier_with_iris +from AutoSklearn.util import _test_classifier class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier_with_iris(LibLinear_SVC) + predictions, targets = _test_classifier(LibLinear_SVC, + dataset='iris') self.assertTrue(all(targets == predictions)) \ No newline at end of file diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index e00bec4ca1..483cbb4e19 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -1,7 +1,7 @@ import unittest from AutoSklearn.components.classification.libsvm_svc import LibSVM_SVC -from AutoSklearn.util import _test_classifier_with_iris +from AutoSklearn.util import _test_classifier import sklearn.metrics @@ -9,6 +9,6 @@ class LibSVM_SVCComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier_with_iris(LibSVM_SVC) + predictions, targets = _test_classifier(LibSVM_SVC, dataset='iris') self.assertAlmostEqual(0.96, sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index e13bdc1114..8e3a1420be 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -1,7 +1,7 @@ import unittest from AutoSklearn.components.classification.random_forest import RandomForest -from AutoSklearn.util import _test_classifier_with_iris +from AutoSklearn.util import _test_classifier import sklearn.metrics @@ -9,6 +9,6 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier_with_iris(RandomForest) + predictions, targets = _test_classifier(RandomForest, dataset='iris') self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index d8c79b87b3..946fdf9861 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -1,14 +1,14 @@ import unittest from AutoSklearn.components.preprocessing.imputation import Imputation -from AutoSklearn.util import _test_preprocessing_with_iris +from AutoSklearn.util import _test_preprocessing class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): - transformation, original = _test_preprocessing_with_iris(Imputation) + transformation, original = _test_preprocessing(Imputation) self.assertEqual(transformation.shape, original.shape) self.assertTrue((transformation == original).all()) transformations.append(transformation) diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index e0484c7c24..a9b3d3b513 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -1,14 +1,14 @@ import unittest from AutoSklearn.components.preprocessing.pca import PCA -from AutoSklearn.util import _test_preprocessing_with_iris +from AutoSklearn.util import _test_preprocessing class LibLinearComponentTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): - transformation, original = _test_preprocessing_with_iris(PCA) + transformation, original = _test_preprocessing(PCA) self.assertEqual(transformation.shape, original.shape) self.assertFalse((transformation == original).all()) transformations.append(transformation) diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py new file mode 100644 index 0000000000..bb23b493c3 --- /dev/null +++ b/tests/components/preprocessing/test_scaling.py @@ -0,0 +1,25 @@ +import unittest + +import numpy as np +import sklearn.datasets + +from AutoSklearn.components.preprocessing.rescaling import Rescaling +from AutoSklearn.util import _test_preprocessing + + +class LibLinearComponentTest(unittest.TestCase): + def test_boston_is_not_scaled(self): + data = sklearn.datasets.load_boston()['data'] + self.assertGreaterEqual(np.max(data), 100) + + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = _test_preprocessing(Rescaling, + dataset='boston') + # The maximum is around 1.95 for the transformed array... + self.assertLessEqual(np.max(transformation), 2) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) \ No newline at end of file diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index a4eef08265..c9ffade490 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -16,7 +16,7 @@ from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm import AutoSklearn.components.classification as classification_components import AutoSklearn.components.preprocessing as preprocessing_components -from AutoSklearn.util import get_iris +from AutoSklearn.util import get_dataset class TestAutoSKlearnClassifier(unittest.TestCase): # TODO: test for both possible ways to initialize AutoSklearn @@ -44,7 +44,7 @@ def test_default_configuration(self): for i in range(2): cs = AutoSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() - X_train, Y_train, X_test, Y_test = get_iris() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') auto = AutoSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) From b6c6fb781a655906842b9d921401fc78b1136796 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 11:35:00 +0100 Subject: [PATCH 031/352] Add SGD classifier --- AutoSklearn/components/classification/sgd.py | 138 +++++++++++++++++++ misc/classifiers.csv | 2 +- tests/components/classification/test_sgd.py | 15 ++ 3 files changed, 154 insertions(+), 1 deletion(-) create mode 100644 AutoSklearn/components/classification/sgd.py create mode 100644 tests/components/classification/test_sgd.py diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py new file mode 100644 index 0000000000..5d30fc2fd0 --- /dev/null +++ b/AutoSklearn/components/classification/sgd.py @@ -0,0 +1,138 @@ +from sklearn.linear_model.stochastic_gradient import SGDClassifier + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction + +from ..classification_base import AutoSklearnClassificationAlgorithm + +class SGD(AutoSklearnClassificationAlgorithm): + def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, + learning_rate, class_weight, l1_ratio=0.15, epsilon=0.1, + eta0=0.01, power_t=0.5, random_state=None): + self.loss = loss + self.penalty = penalty + self.alpha = alpha + self.fit_intercept = fit_intercept + self.n_iter = n_iter + self.learning_rate = learning_rate + self.class_weight = class_weight + self.l1_ratio = l1_ratio + self.epsilon = epsilon + self.eta0 = eta0 + self.power_t = power_t + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + # TODO: maybe scale training data that its norm becomes 1? + # http://scikit-learn.org/stable/modules/sgd.html#id1 + self.alpha = float(self.alpha) + self.fit_intercept = bool(self.fit_intercept) + self.n_iter = int(self.n_iter) + if self.class_weight == "None": + self.class_weight = None + self.l1_ratio = float(self.l1_ratio) + self.epsilon = float(self.epsilon) + self.eta0 = float(self.eta0) + self.power_t = float(self.power_t) + + self.estimator = SGDClassifier(loss=self.loss, + penalty=self.penalty, + alpha=self.alpha, + fit_intercept=self.fit_intercept, + n_iter=self.n_iter, + learning_rate=self.learning_rate, + class_weight=self.class_weight, + l1_ratio=self.l1_ratio, + epsilon=self.epsilon, + eta0=self.eta0, + power_t=self.power_t, + shuffle=True, + random_state=self.random_state) + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + # TODO figure out if it's better to return proba in the cases where + # the loss function allows for this + return self.estimator.decision_function(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'SGD Classifier', + 'name': 'Stochastic Gradient Descent Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + 'preferred_dtype' : None} + + @staticmethod + def get_hyperparameter_search_space(): + loss = CategoricalHyperparameter("loss", + ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], + default="hinge") + penalty = CategoricalHyperparameter("penalty", ["l1", "l2", "elasticnet"], + default="l2") + alpha = UniformFloatHyperparameter("alpha", 10**-7, 10**-1, + log=True, default=0.0001) + l1_ratio = UniformFloatHyperparameter("l1_ratio", 0, 1, default=0.15) + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20) + epsilon = UniformFloatHyperparameter("epsilon", 1e-5, 1e-1, + default=1e-4, log=True) + learning_rate = CategoricalHyperparameter("learning_rate", + ["optimal", "invscaling", "constant"], default="optimal") + eta0 = UniformFloatHyperparameter("eta0", 10**-7, 0.1, default=0.01) + power_t = UniformFloatHyperparameter("power_t", 1e-5, 1, default=0.5) + # This does not allow for other resampling methods! + class_weight = CategoricalHyperparameter("class_weight", + ["None", "auto"], + default="None") + cs = ConfigurationSpace() + cs.add_hyperparameter(loss) + cs.add_hyperparameter(penalty) + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(l1_ratio) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(n_iter) + cs.add_hyperparameter(epsilon) + cs.add_hyperparameter(learning_rate) + cs.add_hyperparameter(eta0) + cs.add_hyperparameter(power_t) + cs.add_hyperparameter(class_weight) + + # TODO add passive/aggressive here, although not properly documented? + elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") + epsilon_condition = EqualsCondition(epsilon, loss, "huber") + # eta0 seems to be always active according to the source code; when + # learning_rate is set to optimial, eta0 is the starting value: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx + #eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling") + #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant") + #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant) + power_t_condition = EqualsCondition(power_t, learning_rate, "invscaling") + + cs.add_condition(elasticnet) + cs.add_condition(epsilon_condition) + cs.add_condition(power_t_condition) + + return cs + + def __str__(self): + return "AutoSklearn StochasticGradientClassifier" diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 6721a03441..9e0896ea9c 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -7,7 +7,7 @@ Name,class,added, ,, ,, ,, -,, +StochasticGradientDescentClassifier,, ,, ,, ,, diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py new file mode 100644 index 0000000000..9b3aacade7 --- /dev/null +++ b/tests/components/classification/test_sgd.py @@ -0,0 +1,15 @@ +import unittest + +from AutoSklearn.components.classification.sgd import SGD +from AutoSklearn.util import _test_classifier + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(SGD, dataset='iris') + self.assertAlmostEqual(0.96, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file From 7e4e0011ca43aa26555bb212620475800682c4e4 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 13:35:01 +0100 Subject: [PATCH 032/352] add gradient boosting --- .../classification/gradient_boosting.py | 239 ++++++++++++++++++ .../classification/test_gradient_boosting.py | 16 ++ 2 files changed, 255 insertions(+) create mode 100644 AutoSklearn/components/classification/gradient_boosting.py create mode 100644 tests/components/classification/test_gradient_boosting.py diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py new file mode 100644 index 0000000000..74820becb3 --- /dev/null +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -0,0 +1,239 @@ +import numpy as np +import sklearn.ensemble + +from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter + +from ..classification_base import AutoSklearnClassificationAlgorithm + +""" + param_dist = {"max_features": numpy.linspace(0.1, 1, num=10), + "learning_rate": 2**numpy.linspace(-1, -10, num=10), + "max_depth": range(1, 11), + "min_samples_leaf": range(2, 20, 2), + "n_estimators": range(10, 110, 10)} + param_list = [{"max_features": max_features, + "learning_rate": learning_rate, + "max_depth": max_depth, + "min_samples_leaf": min_samples_leaf, + "n_estimators": n_estimators}] + param_list.extend(list(ParameterSampler(param_dist, n_iter=random_iter-1, random_state +""" + + +class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, learning_rate, n_estimators, subsample, + min_samples_split, min_samples_leaf, max_features, + max_leaf_nodes_or_max_depth, max_depth, + max_leaf_nodes=None, loss='deviance', + warm_start=False, init=None, random_state=None, verbose=0): + """ + Parameters + ---------- + loss : {'deviance'}, optional (default='deviance') + loss function to be optimized. 'deviance' refers to + deviance (= logistic regression) for classification + with probabilistic outputs. + + learning_rate : float, optional (default=0.1) + learning rate shrinks the contribution of each tree by `learning_rate`. + There is a trade-off between learning_rate and n_estimators. + + n_estimators : int (default=100) + The number of boosting stages to perform. Gradient boosting + is fairly robust to over-fitting so a large number usually + results in better performance. + + max_depth : integer, optional (default=3) + maximum depth of the individual regression estimators. The maximum + depth limits the number of nodes in the tree. Tune this parameter + for best performance; the best value depends on the interaction + of the input variables. + Ignored if ``max_samples_leaf`` is not None. + + min_samples_split : integer, optional (default=2) + The minimum number of samples required to split an internal node. + + min_samples_leaf : integer, optional (default=1) + The minimum number of samples required to be at a leaf node. + + subsample : float, optional (default=1.0) + The fraction of samples to be used for fitting the individual base + learners. If smaller than 1.0 this results in Stochastic Gradient + Boosting. `subsample` interacts with the parameter `n_estimators`. + Choosing `subsample < 1.0` leads to a reduction of variance + and an increase in bias. + + max_features : int, float, string or None, optional (default="auto") + The number of features to consider when looking for the best split: + - If int, then consider `max_features` features at each split. + - If float, then `max_features` is a percentage and + `int(max_features * n_features)` features are considered at each + split. + - If "auto", then `max_features=sqrt(n_features)`. + - If "sqrt", then `max_features=sqrt(n_features)`. + - If "log2", then `max_features=log2(n_features)`. + - If None, then `max_features=n_features`. + + Choosing `max_features < n_features` leads to a reduction of variance + and an increase in bias. + + Note: the search for a split does not stop until at least one + valid partition of the node samples is found, even if it requires to + effectively inspect more than ``max_features`` features. + + max_leaf_nodes : int or None, optional (default=None) + Grow trees with ``max_leaf_nodes`` in best-first fashion. + Best nodes are defined as relative reduction in impurity. + If None then unlimited number of leaf nodes. + If not None then ``max_depth`` will be ignored. + + init : BaseEstimator, None, optional (default=None) + An estimator object that is used to compute the initial + predictions. ``init`` has to provide ``fit`` and ``predict``. + If None it uses ``loss.init_estimator``. + + verbose : int, default: 0 + Enable verbose output. If 1 then it prints progress and performance + once in a while (the more trees the lower the frequency). If greater + than 1 then it prints progress and performance for every tree. + + warm_start : bool, default: False + When set to ``True``, reuse the solution of the previous call to fit + and add more estimators to the ensemble, otherwise, just erase the + previous solution. + """ + self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) + + if self.max_leaf_nodes_or_max_depth == "max_depth": + self.max_depth = int(max_depth) + self.max_leaf_nodes = None + elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": + self.max_depth = None + self.max_leaf_nodes = int(max_leaf_nodes) + else: + raise ValueError("max_leaf_nodes_or_max_depth sould be in " + "('max_leaf_nodes', 'max_depth'): %s" % + self.max_leaf_nodes_or_max_depth) + + self.learning_rate = float(learning_rate) + self.n_estimators = int(n_estimators) + self.subsample = float(subsample) + self.min_samples_split = int(min_samples_split) + self.min_samples_leaf = int(min_samples_leaf) + if max_features in ("sqrt", "log2", "auto"): + raise ValueError("'max_features' should be a float: %s" % max_features) + self.max_features = float(max_features) + if self.max_features > 1: + raise ValueError("'max features' in should be < 1, you set %f" % + self.max_features) + self.loss = loss + self.warm_start = bool(warm_start) + self.init = init + self.random_state = random_state + self.verbose = int(verbose) + + def fit(self, X, Y): + self.estimator = sklearn.ensemble.GradientBoostingClassifier( + learning_rate=self.learning_rate, + n_estimators=self.n_estimators, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_features=self.max_features, + max_leaf_nodes=self.max_leaf_nodes, + loss=self.loss, + max_depth=self.max_depth, + warm_start=self.warm_start, + init=self.init, + random_state=self.random_state, + verbose=self.verbose + ) + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'GB', + 'name': 'Gradient Boosting Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + learning_rate = UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) + subsample = UniformFloatHyperparameter( + name="subsample", lower=0.1, upper=2, default=1.0, log=False) + + # Unparametrized + max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + name="max_leaf_nodes_or_max_depth", value="max_depth") + # CategoricalHyperparameter( + # "max_leaf_nodes_or_max_depth", + # choices=["max_leaf_nodes", "max_depth"], default="max_depth") + max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + value="None") + # UniformIntegerHyperparameter( + # name="max_leaf_nodes", lower=10, upper=1000, default=) + + # Copied from random_forest.py + n_estimators = UniformIntegerHyperparameter( + name="n_estimators", lower=10, upper=1000, default=10, log=False) + max_features = UniformFloatHyperparameter( + name="max_features", lower=0.01, upper=1.0, default=1.0) + max_depth = UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3, log=False) + min_samples_split = UniformIntegerHyperparameter( + name="min_samples_split", lower=1, upper=20, default=2, log=False) + min_samples_leaf = UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False) + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(learning_rate) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_leaf_nodes_or_max_depth) + cs.add_hyperparameter(max_leaf_nodes) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(subsample) + + # Conditions + cond_max_leaf_nodes_or_max_depth = \ + EqualsCondition(child=max_leaf_nodes, + parent=max_leaf_nodes_or_max_depth, + value="max_leaf_nodes") + + cs.add_condition(cond_max_leaf_nodes_or_max_depth) + + return cs + + def __str__(self): + return "AutoSklearn GradientBoosting Classifier" diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py new file mode 100644 index 0000000000..c8562ad8b3 --- /dev/null +++ b/tests/components/classification/test_gradient_boosting.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.classification.gradient_boosting import \ + GradientBoostingClassifier +from AutoSklearn.util import _test_classifier_with_iris + +import sklearn.metrics + + +class GradientBoostingComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier_with_iris(GradientBoostingClassifier) + self.assertAlmostEqual(0.92, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From 8cd00cdd41ab123b9e69534789c580c41019c343 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 13:39:10 +0100 Subject: [PATCH 033/352] remove comment --- .../classification/gradient_boosting.py | 96 +------------------ 1 file changed, 4 insertions(+), 92 deletions(-) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 74820becb3..6f3333eec8 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -1,7 +1,7 @@ import numpy as np import sklearn.ensemble -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction +from HPOlibConfigSpace.conditions import EqualsCondition from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -10,20 +10,6 @@ from ..classification_base import AutoSklearnClassificationAlgorithm -""" - param_dist = {"max_features": numpy.linspace(0.1, 1, num=10), - "learning_rate": 2**numpy.linspace(-1, -10, num=10), - "max_depth": range(1, 11), - "min_samples_leaf": range(2, 20, 2), - "n_estimators": range(10, 110, 10)} - param_list = [{"max_features": max_features, - "learning_rate": learning_rate, - "max_depth": max_depth, - "min_samples_leaf": min_samples_leaf, - "n_estimators": n_estimators}] - param_list.extend(list(ParameterSampler(param_dist, n_iter=random_iter-1, random_state -""" - class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): @@ -32,82 +18,7 @@ def __init__(self, learning_rate, n_estimators, subsample, max_leaf_nodes_or_max_depth, max_depth, max_leaf_nodes=None, loss='deviance', warm_start=False, init=None, random_state=None, verbose=0): - """ - Parameters - ---------- - loss : {'deviance'}, optional (default='deviance') - loss function to be optimized. 'deviance' refers to - deviance (= logistic regression) for classification - with probabilistic outputs. - - learning_rate : float, optional (default=0.1) - learning rate shrinks the contribution of each tree by `learning_rate`. - There is a trade-off between learning_rate and n_estimators. - - n_estimators : int (default=100) - The number of boosting stages to perform. Gradient boosting - is fairly robust to over-fitting so a large number usually - results in better performance. - - max_depth : integer, optional (default=3) - maximum depth of the individual regression estimators. The maximum - depth limits the number of nodes in the tree. Tune this parameter - for best performance; the best value depends on the interaction - of the input variables. - Ignored if ``max_samples_leaf`` is not None. - - min_samples_split : integer, optional (default=2) - The minimum number of samples required to split an internal node. - - min_samples_leaf : integer, optional (default=1) - The minimum number of samples required to be at a leaf node. - - subsample : float, optional (default=1.0) - The fraction of samples to be used for fitting the individual base - learners. If smaller than 1.0 this results in Stochastic Gradient - Boosting. `subsample` interacts with the parameter `n_estimators`. - Choosing `subsample < 1.0` leads to a reduction of variance - and an increase in bias. - - max_features : int, float, string or None, optional (default="auto") - The number of features to consider when looking for the best split: - - If int, then consider `max_features` features at each split. - - If float, then `max_features` is a percentage and - `int(max_features * n_features)` features are considered at each - split. - - If "auto", then `max_features=sqrt(n_features)`. - - If "sqrt", then `max_features=sqrt(n_features)`. - - If "log2", then `max_features=log2(n_features)`. - - If None, then `max_features=n_features`. - - Choosing `max_features < n_features` leads to a reduction of variance - and an increase in bias. - - Note: the search for a split does not stop until at least one - valid partition of the node samples is found, even if it requires to - effectively inspect more than ``max_features`` features. - - max_leaf_nodes : int or None, optional (default=None) - Grow trees with ``max_leaf_nodes`` in best-first fashion. - Best nodes are defined as relative reduction in impurity. - If None then unlimited number of leaf nodes. - If not None then ``max_depth`` will be ignored. - - init : BaseEstimator, None, optional (default=None) - An estimator object that is used to compute the initial - predictions. ``init`` has to provide ``fit`` and ``predict``. - If None it uses ``loss.init_estimator``. - - verbose : int, default: 0 - Enable verbose output. If 1 then it prints progress and performance - once in a while (the more trees the lower the frequency). If greater - than 1 then it prints progress and performance for every tree. - - warm_start : bool, default: False - When set to ``True``, reuse the solution of the previous call to fit - and add more estimators to the ensemble, otherwise, just erase the - previous solution. - """ + self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) if self.max_leaf_nodes_or_max_depth == "max_depth": @@ -127,7 +38,8 @@ def __init__(self, learning_rate, n_estimators, subsample, self.min_samples_split = int(min_samples_split) self.min_samples_leaf = int(min_samples_leaf) if max_features in ("sqrt", "log2", "auto"): - raise ValueError("'max_features' should be a float: %s" % max_features) + raise ValueError("'max_features' should be a float: %s" % + max_features) self.max_features = float(max_features) if self.max_features > 1: raise ValueError("'max features' in should be < 1, you set %f" % From 2bd5149ccd58c93524151333d19216a831d05743 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 13:46:23 +0100 Subject: [PATCH 034/352] update gb --- misc/classifiers.csv | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 6721a03441..b43d77a426 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,4 +1,4 @@ -Name,class,added, +Name,class,added ,, ,, ,, @@ -20,7 +20,7 @@ RandomForestClassifier,, ,, ,, ,, -,, +GradientBoostingClassifier,,TRUE ,, ,, ,, From 2ff9b5184be82fc0b58986df44e360b24a844f50 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 14:39:35 +0100 Subject: [PATCH 035/352] Change LibSVM_SVC that it emits a valid SMAC configuration --- AutoSklearn/components/classification/libsvm_svc.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index b63afa7ee2..0cc0a3d335 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -1,8 +1,9 @@ import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter,\ +from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction, \ + InCondition +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter @@ -117,11 +118,9 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(max_iter) degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") - coef0_depends_on_poly = EqualsCondition(coef0, kernel, "poly") - coef0_depends_on_sigmoid = EqualsCondition(coef0, kernel, "sigmoid") - coe0_conditions = OrConjunction(coef0_depends_on_poly, coef0_depends_on_sigmoid) + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) cs.add_condition(degree_depends_on_poly) - cs.add_condition(coe0_conditions) + cs.add_condition(coef0_condition) return cs From e2cd45ee0c3f565005f242ec4b92f655c845e02e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 14:40:26 +0100 Subject: [PATCH 036/352] Correct __str__ --- AutoSklearn/components/classification/random_forest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index a97e593966..3e05ee5762 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -111,4 +111,4 @@ def get_hyperparameter_search_space(): return cs def __str__(self): - return "AutoSklearn LibSVM Classifier" + return "AutoSklearn Random Forest" From 180ce91734d1e566b10d7dcaf50ad4620f3a20da Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 14:45:50 +0100 Subject: [PATCH 037/352] add KNN --- .../classification/k_neighbors_classifier.py | 94 +++++++++++++++++++ misc/classifiers.csv | 86 ++++++++--------- .../classification/test_k_nearest_neighbor.py | 16 ++++ 3 files changed, 153 insertions(+), 43 deletions(-) create mode 100644 AutoSklearn/components/classification/k_neighbors_classifier.py create mode 100644 tests/components/classification/test_k_nearest_neighbor.py diff --git a/AutoSklearn/components/classification/k_neighbors_classifier.py b/AutoSklearn/components/classification/k_neighbors_classifier.py new file mode 100644 index 0000000000..34b774f9a4 --- /dev/null +++ b/AutoSklearn/components/classification/k_neighbors_classifier.py @@ -0,0 +1,94 @@ +import sklearn.neighbors + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from ..classification_base import AutoSklearnClassificationAlgorithm + + +class KNearestNeighborsClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, + leaf_size=30, random_state=None): + + self.n_neighbors = int(n_neighbors) + if weights not in ("uniform", "distance"): + raise ValueError("'weights' should be in ('uniform', 'distance'): " + "%s" % weights) + self.weights = weights + if metric not in ("euclidean", "manhattan", "chebyshev", "minkowski"): + raise ValueError("'metric' should be in ('euclidean', 'chebyshev', " + "'manhattan', 'minkowski'): %s" % metric) + self.metric = metric + self.algorithm = algorithm + self.p = int(p) + self.leaf_size = int(leaf_size) + self.random_state = random_state + + def fit(self, X, Y): + self.estimator = \ + sklearn.neighbors.KNeighborsClassifier() + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.decision_function(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'KNN', + 'name': 'K-Nearest Neighbor Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + 'preferred_dtype' : None} + + @staticmethod + def get_hyperparameter_search_space(): + + n_neighbors = UniformIntegerHyperparameter( + name="n_neighbors", lower=1, upper=100, default=1) + weights = CategoricalHyperparameter( + name="weights", choices=["uniform", "distance"], default="uniform") + metric = UnParametrizedHyperparameter(name="metric", value="minkowski") + algorithm = Constant(name='algorithm', value="auto") + p = CategoricalHyperparameter( + name="p", choices=["1", "2", "5"], default="2") + leaf_size = Constant(name="leaf_size", value="30") + + # Unparametrized + # TODO: If we further parametrize 'metric' we need more metric params + metric = UnParametrizedHyperparameter(name="metric", value="minkowski") + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_neighbors) + cs.add_hyperparameter(weights) + cs.add_hyperparameter(metric) + cs.add_hyperparameter(algorithm) + cs.add_hyperparameter(p) + cs.add_hyperparameter(leaf_size) + + # Conditions + metric_p = EqualsCondition(parent=metric, child=p, value="minkowski") + cs.add_condition(metric_p) + + return cs + + def __str__(self): + return "AutoSklearn K-Neighbors Classifier" diff --git a/misc/classifiers.csv b/misc/classifiers.csv index b43d77a426..2651891f60 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,43 +1,43 @@ -Name,class,added -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -RandomForestClassifier,,True -,, -,, -,, -,, -,, -,, -,, -,, -GradientBoostingClassifier,,TRUE -,, -,, -,, -,, -SVC,,True -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -LinearSVC,,True -,, -,, -,, +Name,class,added,needed? +,,, +,,, +,,, +,,, +,,,x +,,, +RandomForestClassifier,,True,x +GradientBoostingClassifier,,True,x +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,, +,,TRUE,x +,,, +,,, +,,, +,,, +,,, +,,, +,,, +LinearSVC,,True,x +,,, +SVC,,True,x +,,, +,,, +,,, diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py new file mode 100644 index 0000000000..e343ccdab5 --- /dev/null +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.classification.k_neighbors_classifier import \ + KNearestNeighborsClassifier +from AutoSklearn.util import _test_classifier_with_iris + +import sklearn.metrics + + +class KNearestNeighborsComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier_with_iris(KNearestNeighborsClassifier) + self.assertAlmostEqual(0.95999999999999, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From 48a81cc1d82ef7ff63f19f68fdab7d36230b474f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 14:53:59 +0100 Subject: [PATCH 038/352] resort table --- misc/classifiers.csv | 68 ++++++++++++++++++++++---------------------- 1 file changed, 34 insertions(+), 34 deletions(-) diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 7a59a79db6..5472c49941 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,43 +1,43 @@ Name,class,added -,, -,, +,, +,, +,, +,, +,, +,, +RandomForestClassifier,,True +GradientBoostingClassifier,,True +,, ,, -,, -,, +,, +,, ,, -,, -,, -StochasticGradientDescentClassifier,, -,, +,, +,, +,, ,, -,, -RandomForestClassifier,,True +StochasticGradientDescentClassifier,, +,, +,, +,, ,, -,, -,, -,, -,, -,, ,, -KnearestNeighborsClassifier,,True -GradientBoostingClassifier,,True -,, -,, -,, -,, -SVC,,True -,, -,, -,, -,, -,, -,, -,, -,, +,, ,, +,, ,, -,, -LinearSVC,,True -,, +,, +KnearestNeighborsClassifier,,True +,, +,, +,, +,, ,, -,, +,, +,, +LinearSVC,,True +,, +SVC,,True +,, +,, +,, From 2972b87521f44ffe654678370a7cde64b10da767 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 15:43:12 +0100 Subject: [PATCH 039/352] FIX: all (except 4 base) hyperparameters are conditional again --- AutoSklearn/autosklearn.py | 34 ++++++++++++++++++++-------------- tests/test_autosklearn.py | 7 +++++++ 2 files changed, 27 insertions(+), 14 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 9e54a56bbb..beaf3d1654 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -1,5 +1,4 @@ -import numpy as np -from numpy import float64 +import copy import sklearn if sklearn.__version__ != "0.15.2": @@ -282,14 +281,18 @@ def get_hyperparameter_search_space(): # retrieve the conditions further down # TODO implement copy for hyperparameters and forbidden and # conditions! - for parameter in available_classifiers[name].\ - get_hyperparameter_search_space().get_hyperparameters(): - parameter.name = "%s:%s" % (name, parameter.name) - cs.add_hyperparameter(parameter) + + classifier_configuration_space = available_classifiers[name]. \ + get_hyperparameter_search_space() + for parameter in classifier_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) # We must only add a condition if the hyperparameter is not # conditional on something else - if cs.get_parents_of(parameter): - condition = EqualsCondition(parameter, classifier, name) + if len(classifier_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, classifier, name) cs.add_condition(condition) for condition in available_classifiers[name]. \ @@ -316,14 +319,17 @@ def get_hyperparameter_search_space(): + ["None"], default='None') cs.add_hyperparameter(preprocessor) for name in available_preprocessors: - for parameter in available_preprocessors[name].\ - get_hyperparameter_search_space().get_hyperparameters(): - parameter.name = "%s:%s" % (name, parameter.name) - cs.add_hyperparameter(parameter) + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space() + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) # We must only add a condition if the hyperparameter is not # conditional on something else - if cs.get_parents_of(parameter): - condition = EqualsCondition(parameter, preprocessor, name) + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0 and name not in always_active: + condition = EqualsCondition(new_parameter, preprocessor, name) cs.add_condition(condition) for condition in available_preprocessors[name]. \ diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index c9ffade490..ffbe086ed9 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -53,6 +53,13 @@ def test_default_configuration(self): scores = auto.scores(X_test) self.assertTrue((scores[4] == [0.6, 0.4, 0.]).all()) + def test_get_hyperparameter_search_space(self): + cs = AutoSklearnClassifier.get_hyperparameter_search_space() + conditions = cs.get_conditions() + hyperparameters = cs.get_hyperparameters() + self.assertEqual(43, len(hyperparameters)) + self.assertEqual(len(hyperparameters) - 4, len(conditions)) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() From eec8adbca8b9a6e228c46dcdb79ae8d93c766aff Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 16:14:21 +0100 Subject: [PATCH 040/352] fix tests --- .../{k_neighbors_classifier.py => k_nearest_neighbors.py} | 0 tests/components/classification/test_gradient_boosting.py | 4 ++-- .../components/classification/test_k_nearest_neighbor.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) rename AutoSklearn/components/classification/{k_neighbors_classifier.py => k_nearest_neighbors.py} (100%) diff --git a/AutoSklearn/components/classification/k_neighbors_classifier.py b/AutoSklearn/components/classification/k_nearest_neighbors.py similarity index 100% rename from AutoSklearn/components/classification/k_neighbors_classifier.py rename to AutoSklearn/components/classification/k_nearest_neighbors.py diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index c8562ad8b3..b77ebc4ec8 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -2,7 +2,7 @@ from AutoSklearn.components.classification.gradient_boosting import \ GradientBoostingClassifier -from AutoSklearn.util import _test_classifier_with_iris +from AutoSklearn.util import _test_classifier import sklearn.metrics @@ -11,6 +11,6 @@ class GradientBoostingComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = \ - _test_classifier_with_iris(GradientBoostingClassifier) + _test_classifier(GradientBoostingClassifier) self.assertAlmostEqual(0.92, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py index e343ccdab5..5699cc497c 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -1,8 +1,8 @@ import unittest -from AutoSklearn.components.classification.k_neighbors_classifier import \ +from AutoSklearn.components.classification.k_nearest_neighbors import \ KNearestNeighborsClassifier -from AutoSklearn.util import _test_classifier_with_iris +from AutoSklearn.util import _test_classifier import sklearn.metrics @@ -11,6 +11,6 @@ class KNearestNeighborsComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = \ - _test_classifier_with_iris(KNearestNeighborsClassifier) - self.assertAlmostEqual(0.95999999999999, + _test_classifier(KNearestNeighborsClassifier) + self.assertAlmostEqual(0.959999999999999, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From 531282576a9f9c92a2ad0d898eb21efcdccc65f2 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 16:14:54 +0100 Subject: [PATCH 041/352] fix gb --- .../components/classification/gradient_boosting.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 6f3333eec8..ccc225eb41 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -42,10 +42,10 @@ def __init__(self, learning_rate, n_estimators, subsample, max_features) self.max_features = float(max_features) if self.max_features > 1: - raise ValueError("'max features' in should be < 1, you set %f" % + raise ValueError("'max features' in should be < 1: %f" % self.max_features) self.loss = loss - self.warm_start = bool(warm_start) + self.warm_start = warm_start self.init = init self.random_state = random_state self.verbose = int(verbose) @@ -143,8 +143,13 @@ def get_hyperparameter_search_space(): parent=max_leaf_nodes_or_max_depth, value="max_leaf_nodes") - cs.add_condition(cond_max_leaf_nodes_or_max_depth) + cond2_max_leaf_nodes_or_max_depth = \ + EqualsCondition(child=max_depth, + parent=max_leaf_nodes_or_max_depth, + value="max_depth") + cs.add_condition(cond_max_leaf_nodes_or_max_depth) + cs.add_condition(cond2_max_leaf_nodes_or_max_depth) return cs def __str__(self): From 11383ffc83ac4cf14009114ec21c5a8c8e10a62b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 16 Dec 2014 16:15:49 +0100 Subject: [PATCH 042/352] add extra trees --- .../components/classification/extra_trees.py | 162 ++++++++++++++++++ misc/classifiers.csv | 2 +- .../classification/test_extra_trees.py | 16 ++ 3 files changed, 179 insertions(+), 1 deletion(-) create mode 100644 AutoSklearn/components/classification/extra_trees.py create mode 100644 tests/components/classification/test_extra_trees.py diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py new file mode 100644 index 0000000000..7b7cedc615 --- /dev/null +++ b/AutoSklearn/components/classification/extra_trees.py @@ -0,0 +1,162 @@ +import numpy as np +import sklearn.ensemble + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from ..classification_base import AutoSklearnClassificationAlgorithm + + +class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, n_estimators, criterion, use_max_depth, min_samples_leaf, + min_samples_split, max_leaf_nodes_or_max_depth, max_features, + bootstrap=False, max_leaf_nodes=None, max_depth=None, + oob_score=False, n_jobs=1, random_state=None, verbose=0, + min_density=None, compute_importances=None): + + self.n_estimators = int(n_estimators) + if criterion not in ("gini", "entropy"): + raise ValueError("'criterion' is not in ('gini', 'entropy'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if use_max_depth == "True": + self.max_depth = int(max_depth) + elif use_max_depth == "False": + self.max_depth = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + if self.max_features > 1: + raise ValueError("'max features' in should be < 1: %f" % + self.max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.min_density = min_density + self.compute_importances = compute_importances + + def fit(self, X, Y): + + self.estimator = sklearn.ensemble.ExtraTreesClassifier( + n_estimators=self.n_estimators, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, + max_features=self.max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, + random_state=self.random_state, min_density=self.min_density, + compute_importances=self.compute_importances + ) + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def scores(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_meta_information(): + return {'shortname': 'ET', + 'name': 'Extra Trees Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + + use_max_depth = CategoricalHyperparameter( + name="use_max_depth", choices=("True", "False"), default="False") + bootstrap = CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False") + + # Copied from random_forest.py + n_estimators = UniformIntegerHyperparameter( + "n_estimators", 10, 1000, default=10) + criterion = CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini") + max_features = UniformFloatHyperparameter( + "max_features", 0.01, 1.0, default=1.0) + min_samples_split = UniformIntegerHyperparameter( + "min_samples_split", 1, 20, default=2) + min_samples_leaf = UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1) + + # Unparametrized + max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") + max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + name="max_leaf_nodes_or_max_depth", value="max_depth") + # CategoricalHyperparameter( + # "max_leaf_nodes_or_max_depth", + # choices=["max_leaf_nodes", "max_depth"], default="max_depth") + max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + value="None") + # UniformIntegerHyperparameter( + # name="max_leaf_nodes", lower=10, upper=1000, default=) + max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(criterion) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(use_max_depth) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(max_leaf_nodes_or_max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(max_leaf_nodes) + cs.add_hyperparameter(bootstrap) + + # Conditions + cond_max_leaf_nodes_or_max_depth = \ + EqualsCondition(child=max_leaf_nodes, + parent=max_leaf_nodes_or_max_depth, + value="max_leaf_nodes") + cond2_max_leaf_nodes_or_max_depth = \ + EqualsCondition(child=use_max_depth, + parent=max_leaf_nodes_or_max_depth, + value="max_depth") + + cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, + value="True") + cs.add_condition(cond_max_leaf_nodes_or_max_depth) + cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + cs.add_condition(cond_max_depth) + + return cs + + def __str__(self): + return "AutoSklearn Extra Trees" diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 5472c49941..95f2e2f60f 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -3,7 +3,7 @@ Name,class,added ,, ,, ,, -,, +ExtraTreesClassifier,,True ,, RandomForestClassifier,,True GradientBoostingClassifier,,True diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py new file mode 100644 index 0000000000..a19496012a --- /dev/null +++ b/tests/components/classification/test_extra_trees.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.classification.extra_trees import \ + ExtraTreesClassifier +from AutoSklearn.util import _test_classifier + +import sklearn.metrics + + +class GradientBoostingComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(ExtraTreesClassifier) + self.assertAlmostEqual(0.959999999999999, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From bf92bb8c43c9d39f6fcbe787a02ccb22521c6f57 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 17:08:56 +0100 Subject: [PATCH 043/352] Add classifiers which we will not consider --- misc/classifiers.csv | 86 ++++++++++++++++++++++---------------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 5472c49941..b58e663b85 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,43 +1,43 @@ -Name,class,added -,, -,, -,, -,, -,, -,, -RandomForestClassifier,,True -GradientBoostingClassifier,,True -,, -,, -,, -,, -,, -,, -,, -,, -,, -StochasticGradientDescentClassifier,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -,, -KnearestNeighborsClassifier,,True -,, -,, -,, -,, -,, -,, -,, -LinearSVC,,True -,, -SVC,,True -,, -,, -,, +Name,class,added,comment +,,False,Mixin class which adds no functionality except the score function +,,False,Outlier detection +,,False,Please read the module name;) +,,False,This can blow up the configuration space, because we need to define a configured base object. Maybe consider later. +,,, +,,False,Scikit-learn source code says: This class should not be used directly +RandomForestClassifier,,True, +GradientBoostingClassifier,,True, +,,, +,,, +,,False,Mixin but no full model +,,, +,,, +,,, +,,, +,,, +,,False,This class has abstract methods +StochasticGradientDescentClassifier,,True, +,,False,This classifier is in a test module +,,False,This classifier is in a test module +,,False,Is a meta-estimator +,,False,Is a meta-estimator +,,False,Is a meta-estimator +,,, +,,False,Abstract base class for naive Bayes estimators +,,False,Abstract base class for naive Bayes on discrete/categorical data +,,, +,,, +KnearestNeighborsClassifier,,True, +,,, +,,, +,,, +,,False,semi-supervised learning +,,False,semi-supervised learning +,,False,semi-supervised learning +,,False,ABC for LibSVM-based classifiers +LinearSVC,,True, +,,, +SVC,,True, +,,False,This classifier is in a test module +,,, +,,, From 7076c340a9a42546cbe34b4bf21defd7c5c98460 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 17:31:10 +0100 Subject: [PATCH 044/352] Fix search spaces and tests --- .../components/classification/extra_trees.py | 15 +++++++-------- .../classification/gradient_boosting.py | 13 ++++++------- AutoSklearn/components/classification/sgd.py | 2 +- source/first_steps.rst | 3 +-- tests/test_autosklearn.py | 2 +- 5 files changed, 16 insertions(+), 19 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 7b7cedc615..1b48f9dff0 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -116,11 +116,9 @@ def get_hyperparameter_search_space(): "min_samples_leaf", 1, 20, default=1) # Unparametrized - max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( name="max_leaf_nodes_or_max_depth", value="max_depth") - # CategoricalHyperparameter( - # "max_leaf_nodes_or_max_depth", + # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", # choices=["max_leaf_nodes", "max_depth"], default="max_depth") max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", value="None") @@ -141,10 +139,11 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(bootstrap) # Conditions - cond_max_leaf_nodes_or_max_depth = \ - EqualsCondition(child=max_leaf_nodes, - parent=max_leaf_nodes_or_max_depth, - value="max_leaf_nodes") + # Not applicable because max_leaf_nodes is no legal value of the parent + #cond_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=max_leaf_nodes, + # parent=max_leaf_nodes_or_max_depth, + # value="max_leaf_nodes") cond2_max_leaf_nodes_or_max_depth = \ EqualsCondition(child=use_max_depth, parent=max_leaf_nodes_or_max_depth, @@ -152,7 +151,7 @@ def get_hyperparameter_search_space(): cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, value="True") - cs.add_condition(cond_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond_max_leaf_nodes_or_max_depth) cs.add_condition(cond2_max_leaf_nodes_or_max_depth) cs.add_condition(cond_max_depth) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index ccc225eb41..f3e5d59aa0 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -106,8 +106,7 @@ def get_hyperparameter_search_space(): # Unparametrized max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( name="max_leaf_nodes_or_max_depth", value="max_depth") - # CategoricalHyperparameter( - # "max_leaf_nodes_or_max_depth", + # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", # choices=["max_leaf_nodes", "max_depth"], default="max_depth") max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", value="None") @@ -138,17 +137,17 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(subsample) # Conditions - cond_max_leaf_nodes_or_max_depth = \ - EqualsCondition(child=max_leaf_nodes, - parent=max_leaf_nodes_or_max_depth, - value="max_leaf_nodes") + #cond_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=max_leaf_nodes, + # parent=max_leaf_nodes_or_max_depth, + # value="max_leaf_nodes") cond2_max_leaf_nodes_or_max_depth = \ EqualsCondition(child=max_depth, parent=max_leaf_nodes_or_max_depth, value="max_depth") - cs.add_condition(cond_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond_max_leaf_nodes_or_max_depth) cs.add_condition(cond2_max_leaf_nodes_or_max_depth) return cs diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index 5d30fc2fd0..94387ebf85 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -119,7 +119,7 @@ def get_hyperparameter_search_space(): # TODO add passive/aggressive here, although not properly documented? elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") - epsilon_condition = EqualsCondition(epsilon, loss, "huber") + epsilon_condition = EqualsCondition(epsilon, loss, "modified_huber") # eta0 seems to be always active according to the source code; when # learning_rate is set to optimial, eta0 is the starting value: # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx diff --git a/source/first_steps.rst b/source/first_steps.rst index 7e5229d487..416ca84d2d 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,5 +24,4 @@ configuration on the iris dataset. >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.95999999999999996 - + 0.90000000000000002 diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index ffbe086ed9..ebf4bb574f 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -57,7 +57,7 @@ def test_get_hyperparameter_search_space(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(43, len(hyperparameters)) + self.assertEqual(74, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) @unittest.skip("test_check_random_state Not yet Implemented") From 563941852f930226157910604b01066b7bced961 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 18:42:29 +0100 Subject: [PATCH 045/352] Fix hyperparameter ranges, make __str__ more general --- .../components/classification/extra_trees.py | 15 +++++++++------ .../classification/gradient_boosting.py | 18 +++++++++++------- .../classification/k_nearest_neighbors.py | 5 +---- .../components/classification/liblinear.py | 5 +---- .../components/classification/libsvm_svc.py | 9 +++------ .../components/classification/random_forest.py | 5 +---- AutoSklearn/components/classification_base.py | 3 ++- .../components/preprocessing/imputation.py | 3 ++- AutoSklearn/components/preprocessing/pca.py | 3 ++- .../components/preprocessing/rescaling.py | 3 ++- AutoSklearn/components/preprocessor_base.py | 3 ++- 11 files changed, 36 insertions(+), 36 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 1b48f9dff0..01376080c0 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -31,7 +31,10 @@ def __init__(self, n_estimators, criterion, use_max_depth, min_samples_leaf, elif use_max_depth == "False": self.max_depth = None else: - self.max_leaf_nodes = int(max_leaf_nodes) + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) self.max_depth = None self.min_samples_leaf = int(min_samples_leaf) @@ -78,7 +81,7 @@ def scores(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'ET', 'name': 'Extra Trees Classifier', 'handles_missing_values': False, @@ -124,7 +127,10 @@ def get_hyperparameter_search_space(): value="None") # UniformIntegerHyperparameter( # name="max_leaf_nodes", lower=10, upper=1000, default=) - max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") + + #max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") + # TODO these are very random guesses + max_depth = UniformIntegerHyperparameter("max_depth", 5, 50) cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) @@ -156,6 +162,3 @@ def get_hyperparameter_search_space(): cs.add_condition(cond_max_depth) return cs - - def __str__(self): - return "AutoSklearn Extra Trees" diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index f3e5d59aa0..b0ac71bfc4 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -15,18 +15,24 @@ class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, learning_rate, n_estimators, subsample, min_samples_split, min_samples_leaf, max_features, - max_leaf_nodes_or_max_depth, max_depth, + max_leaf_nodes_or_max_depth, max_depth=None, max_leaf_nodes=None, loss='deviance', warm_start=False, init=None, random_state=None, verbose=0): self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) if self.max_leaf_nodes_or_max_depth == "max_depth": - self.max_depth = int(max_depth) + if max_depth == 'None': + self.max_depth = None + else: + self.max_depth = int(max_depth) self.max_leaf_nodes = None elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": self.max_depth = None - self.max_leaf_nodes = int(max_leaf_nodes) + if max_leaf_nodes == 'None': + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) else: raise ValueError("max_leaf_nodes_or_max_depth sould be in " "('max_leaf_nodes', 'max_depth'): %s" % @@ -79,7 +85,7 @@ def scores(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'GB', 'name': 'Gradient Boosting Classifier', 'handles_missing_values': False, @@ -101,7 +107,7 @@ def get_hyperparameter_search_space(): learning_rate = UniformFloatHyperparameter( name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) subsample = UniformFloatHyperparameter( - name="subsample", lower=0.1, upper=2, default=1.0, log=False) + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False) # Unparametrized max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( @@ -151,5 +157,3 @@ def get_hyperparameter_search_space(): cs.add_condition(cond2_max_leaf_nodes_or_max_depth) return cs - def __str__(self): - return "AutoSklearn GradientBoosting Classifier" diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/AutoSklearn/components/classification/k_nearest_neighbors.py index 34b774f9a4..bd42dd81bc 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/AutoSklearn/components/classification/k_nearest_neighbors.py @@ -43,7 +43,7 @@ def scores(self, X): return self.estimator.decision_function(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'KNN', 'name': 'K-Nearest Neighbor Classification', 'handles_missing_values': False, @@ -89,6 +89,3 @@ def get_hyperparameter_search_space(): cs.add_condition(metric_p) return cs - - def __str__(self): - return "AutoSklearn K-Neighbors Classifier" diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index af87c32782..6fdd9d3f54 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -57,7 +57,7 @@ def scores(self, X): return self.estimator.decision_function(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'Liblinear-SVC', 'name': 'Liblinear Support Vector Classification', 'handles_missing_values': False, @@ -121,6 +121,3 @@ def get_hyperparameter_search_space(): cs.add_forbidden_clause(constant_penalty_and_loss) cs.add_forbidden_clause(penalty_and_dual) return cs - - def __str__(self): - return "AutoSklearn Liblinear Classifier" diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 0cc0a3d335..647a47d817 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -10,8 +10,8 @@ from ..classification_base import AutoSklearnClassificationAlgorithm class LibSVM_SVC(AutoSklearnClassificationAlgorithm): - def __init__(self, C, kernel, shrinking, tol, class_weight, max_iter, - degree=3, gamma=0.1, coef0=0, random_state=None): + def __init__(self, C, kernel, gamma, shrinking, tol, class_weight, max_iter, + degree=3, coef0=0, random_state=None): self.C = C self.kernel = kernel self.degree = degree @@ -65,7 +65,7 @@ def scores(self, X): return self.estimator.decision_function(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'LibSVM-SVC', 'name': 'LibSVM Support Vector Classification', 'handles_missing_values': False, @@ -123,6 +123,3 @@ def get_hyperparameter_search_space(): cs.add_condition(coef0_condition) return cs - - def __str__(self): - return "AutoSklearn LibSVM Classifier" diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 3e05ee5762..e25503e992 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -66,7 +66,7 @@ def scores(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'RF', 'name': 'Random Forest Classifier', 'handles_missing_values': False, @@ -109,6 +109,3 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(max_leaf_nodes) cs.add_hyperparameter(bootstrap) return cs - - def __str__(self): - return "AutoSklearn Random Forest" diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 5bd76bf0cc..fec0299f30 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -116,4 +116,5 @@ def get_estimator(self): return self.estimator def __str__(self): - raise NotImplementedError() + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py index f38eb01510..68d15b6812 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -49,4 +49,5 @@ def get_hyperparameter_search_space(): return cs def __str__(self): - return "AutoSklearn Imputer to replace missing values." + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 72fdf855bc..44101dcee7 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -73,4 +73,5 @@ def get_hyperparameter_search_space(): return cs def __str__(self): - return "AutoSklearn Principle Component Analysis preprocessor." + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index 7c23861e07..1b87a33d5a 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -54,4 +54,5 @@ def get_hyperparameter_search_space(): return cs def __str__(self): - return "AutoSklearn Imputer to replace missing values." + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 56a03f61ad..2dd73c5043 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -103,4 +103,5 @@ def get_preprocessor(self): return self.preprocessor def __str__(self): - raise NotImplementedError() + name = self.get_properties()['name'] + return "AutoSklearn %" % name From c2e07da18977f56531ec3758ab586a84bcf0586b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 19:11:52 +0100 Subject: [PATCH 046/352] Add exclude/include classifiers/preprocessors arguments to get_hyperparameter_search_space --- AutoSklearn/autosklearn.py | 88 ++++++++++++++++++++++++++++++++++---- tests/test_autosklearn.py | 24 ++++++++++- 2 files changed, 103 insertions(+), 9 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index beaf3d1654..62c47efede 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -253,15 +253,54 @@ def add_model_class(self, model): raise NotImplementedError() @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(include_classifiers=None, + exclude_classifiers=None, + include_preprocessors=None, + exclude_preprocessors=None): """Return the configuration space for the CASH problem. + Parameters + ---------- + include_classifiers : list of str + If include_classifiers is given, only the classifiers specified + are used. Specify them by their module name; e.g., to include + only the SVM use :python:`include_classifiers=['libsvm_svc']`. + Cannot be used together with :python:`exclude_classifiers`. + + exclude_classifiers : list of str + If exclude_classifiers is given, only the classifiers specified + are used. Specify them by their module name; e.g., to include + all classifiers except the SVM use + :python:`exclude_classifiers=['libsvm_svc']`. + Cannot be used together with :python:`include_classifiers`. + + include_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + only the PCA use :python:`include_preprocessors=['pca']`. + Cannot be used together with :python:`exclude_preprocessors`. + + exclude_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + all preprocessors except the PCA use + :python:`exclude_preprocessors=['pca']`. + Cannot be used together with :python:`include_preprocessors`. + Returns ------- cs : HPOlibConfigSpace.configuration_space.Configuration The configuration space describing the AutoSklearnClassifier. """ + if include_classifiers is not None and exclude_classifiers is not None: + raise ValueError("The arguments include_classifiers and " + "exclude_classifiers cannot be used together.") + + if include_preprocessors is not None and exclude_preprocessors is not None: + raise ValueError("The arguments include_preprocessors and " + "exclude_preprocessors cannot be used together.") + always_active = ["imputation", "rescaling"] cs = ConfigurationSpace() @@ -271,11 +310,29 @@ def get_hyperparameter_search_space(): available_preprocessors = \ components.preprocessing_components._preprocessors - classifier = CategoricalHyperparameter("classifier", - [name for name in available_classifiers if name not in always_active], - default='random_forest') - cs.add_hyperparameter(classifier) + names = [] + names_ = [] for name in available_classifiers: + if name in always_active: + names_.append(name) + continue + elif include_classifiers is not None and \ + name not in include_classifiers: + continue + elif exclude_classifiers is not None and \ + name in exclude_classifiers: + continue + names.append(name) + + if len(names + names_) == 0: + raise ValueError("No classifier to build a configuration space " + "for...") + + classifier = CategoricalHyperparameter("classifier", names, + default='random_forest' if 'random_forest' in names else names[0]) + cs.add_hyperparameter(classifier) + for name in names + names_: + # We have to retrieve the configuration space every time because # we change the objects it returns. If we reused it, we could not # retrieve the conditions further down @@ -314,11 +371,26 @@ def get_hyperparameter_search_space(): dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) + + names = [] + names_ = [] + for name in available_preprocessors: + if name in always_active: + names_.append(name) + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + names.append(name) + preprocessor = CategoricalHyperparameter("preprocessor", - [name for name in available_preprocessors if name not in always_active] - + ["None"], default='None') + ["None"] + names, + default='None') cs.add_hyperparameter(preprocessor) - for name in available_preprocessors: + for name in names + names_: preprocessor_configuration_space = available_preprocessors[name]. \ get_hyperparameter_search_space() for parameter in preprocessor_configuration_space.get_hyperparameters(): diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index ebf4bb574f..81f8ca56a7 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -9,7 +9,8 @@ import sklearn.ensemble import sklearn.svm -from HPOlibConfigSpace.configuration_space import Configuration, ConfigurationSpace +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from AutoSklearn.autosklearn import AutoSklearnClassifier from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm @@ -60,6 +61,27 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(74, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) + def test_get_hyperparameter_search_space_include_exclude_models(self): + cs = AutoSklearnClassifier.get_hyperparameter_search_space( + include_classifiers=['libsvm_svc']) + self.assertEqual(cs.get_hyperparameter('classifier'), + CategoricalHyperparameter('classifier', ['libsvm_svc'])) + + cs = AutoSklearnClassifier.get_hyperparameter_search_space( + exclude_classifiers=['libsvm_svc']) + self.assertNotIn('libsvm_svc', str(cs)) + + cs = AutoSklearnClassifier.get_hyperparameter_search_space( + include_preprocessors=['pca']) + self.assertEqual(cs.get_hyperparameter('preprocessor'), + CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + + cs = AutoSklearnClassifier.get_hyperparameter_search_space( + exclude_preprocessors=['pca']) + self.assertNotIn('pca', str(cs)) + + + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() From 8d5a182c97853922c11ddee0e7ad179dc72cb8f5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 16 Dec 2014 19:46:42 +0100 Subject: [PATCH 047/352] Add classifier restriction with flags --- AutoSklearn/autosklearn.py | 15 +++++++++- AutoSklearn/components/classification/sgd.py | 2 +- tests/test_autosklearn.py | 29 +++++++++++++++++++- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 62c47efede..0e1fbdce19 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -256,7 +256,10 @@ def add_model_class(self, model): def get_hyperparameter_search_space(include_classifiers=None, exclude_classifiers=None, include_preprocessors=None, - exclude_preprocessors=None): + exclude_preprocessors=None, + multiclass=False, + multilabel=False, + sparse=False): """Return the configuration space for the CASH problem. Parameters @@ -322,6 +325,16 @@ def get_hyperparameter_search_space(include_classifiers=None, elif exclude_classifiers is not None and \ name in exclude_classifiers: continue + + if multiclass is True and available_classifiers[name]. \ + get_properties()['handles_multiclass'] == False: + continue + if multilabel is True and available_classifiers[name]. \ + get_properties()['handles_multilabel'] == False: + continue + if sparse is True and available_classifiers[name]. \ + get_properties()['handles_sparse'] == False: + continue names.append(name) if len(names + names_) == 0: diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index 94387ebf85..fe3bc558ea 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -67,7 +67,7 @@ def scores(self, X): return self.estimator.decision_function(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'SGD Classifier', 'name': 'Stochastic Gradient Descent Classifier', 'handles_missing_values': False, diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 81f8ca56a7..2b59843111 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -80,7 +80,34 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): exclude_preprocessors=['pca']) self.assertNotIn('pca', str(cs)) - + def test_get_hyperparameter_search_space_dataset_properties(self): + full_cs = AutoSklearnClassifier.get_hyperparameter_search_space() + cs_mc = AutoSklearnClassifier.get_hyperparameter_search_space( + multiclass=True) + self.assertEqual(full_cs, cs_mc) + + cs_ml = AutoSklearnClassifier.get_hyperparameter_search_space( + multilabel=True) + self.assertNotIn('k_nearest_neighbors', str(cs_ml)) + self.assertNotIn('liblinear', str(cs_ml)) + self.assertNotIn('libsvm_svc', str(cs_ml)) + self.assertNotIn('sgd', str(cs_ml)) + + cs_sp = AutoSklearnClassifier.get_hyperparameter_search_space( + sparse=True) + self.assertNotIn('extra_trees', str(cs_sp)) + self.assertNotIn('gradient_boosting', str(cs_sp)) + self.assertNotIn('random_forest', str(cs_sp)) + + cs_mc_ml = AutoSklearnClassifier.get_hyperparameter_search_space( + multiclass=True, multilabel=True) + self.assertEqual(cs_ml, cs_mc_ml) + + self.assertRaisesRegexp(ValueError, + "No classifier to build a configuration space " + "for...", AutoSklearnClassifier. + get_hyperparameter_search_space, + multiclass=True, multilabel=True, sparse=True) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From b2be77d69e871fc9a8a0de71a6af539115f01ced Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 17 Dec 2014 19:22:21 +0100 Subject: [PATCH 048/352] Fix kNN and RF --- .../components/classification/k_nearest_neighbors.py | 6 +++--- AutoSklearn/components/classification/random_forest.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/AutoSklearn/components/classification/k_nearest_neighbors.py index bd42dd81bc..a5ccc4ddef 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/AutoSklearn/components/classification/k_nearest_neighbors.py @@ -40,7 +40,7 @@ def predict(self, X): def scores(self, X): if self.estimator is None: raise NotImplementedError() - return self.estimator.decision_function(X) + return self.estimator.predict_proba(X) @staticmethod def get_properties(): @@ -69,8 +69,8 @@ def get_hyperparameter_search_space(): metric = UnParametrizedHyperparameter(name="metric", value="minkowski") algorithm = Constant(name='algorithm', value="auto") p = CategoricalHyperparameter( - name="p", choices=["1", "2", "5"], default="2") - leaf_size = Constant(name="leaf_size", value="30") + name="p", choices=[1, 2, 5], default=2) + leaf_size = Constant(name="leaf_size", value=30) # Unparametrized # TODO: If we further parametrize 'metric' we need more metric params diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index e25503e992..d670953391 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -90,7 +90,7 @@ def get_hyperparameter_search_space(): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 1.0, default=1.0) + "max_features", 0.01, 1.0, default=0.1) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) From 2da01409ffd01e7436b8a7967caf289a7ab85e90 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 18 Dec 2014 13:22:29 +0100 Subject: [PATCH 049/352] Add random sampling of configurations --- misc/random_sampling.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 misc/random_sampling.py diff --git a/misc/random_sampling.py b/misc/random_sampling.py new file mode 100644 index 0000000000..24fd13c5f3 --- /dev/null +++ b/misc/random_sampling.py @@ -0,0 +1,24 @@ +from AutoSklearn.autosklearn import AutoSklearnClassifier +from HPOlibConfigSpace.random_sampler import RandomSampler +import sklearn.datasets +import sklearn.metrics +import numpy as np + +iris = sklearn.datasets.load_iris() +X = iris.data +Y = iris.target +indices = np.arange(X.shape[0]) +np.random.shuffle(indices) +configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() +sampler = RandomSampler(configuration_space, 1) +for i in range(10000): + configuration = sampler.sample_configuration() + auto = AutoSklearnClassifier(configuration) + try: + auto = auto.fit(X[indices[:100]], Y[indices[:100]]) + except Exception as e: + print configuration + print e + continue + predictions = auto.predict(X[indices[100:]]) + print sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) \ No newline at end of file From 4968047aa03ed8f5fb8e7c4d618ebda1244f769f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 18 Dec 2014 16:29:08 +0100 Subject: [PATCH 050/352] Add warning to 1HotEncoder documentation; GradientBoost cannot handle multilabel data --- AutoSklearn/components/classification/gradient_boosting.py | 2 +- AutoSklearn/implementations/OneHotEncoder.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index b0ac71bfc4..5fe984143b 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -95,7 +95,7 @@ def get_properties(): # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, 'handles_multiclass': True, - 'handles_multilabel': True, + 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, # TODO find out what is best used here! diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index e4e2584750..f14fb77e17 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -1,5 +1,3 @@ -import numbers - import numpy as np from scipy import sparse @@ -67,7 +65,9 @@ def _transform_selected(X, transform, selected="all", copy=True): class OneHotEncoder(BaseEstimator, TransformerMixin): - """Encode categorical integer features using a one-hot aka one-of-K scheme. + """Don't trust the documentation of this module! + + Encode categorical integer features using a one-hot aka one-of-K scheme. The input to this transformer should be a matrix of integers, denoting the values taken on by categorical (discrete) features. The output will be From b00c42347d9fe657df67047389af22305987ad88 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 18 Dec 2014 16:33:39 +0100 Subject: [PATCH 051/352] Add StandardScaler which works on sparse data --- .../components/preprocessing/rescaling.py | 3 +- AutoSklearn/implementations/StandardScaler.py | 200 ++++++++++++++++++ tests/implementations/test_standard_scaler.py | 129 +++++++++++ 3 files changed, 331 insertions(+), 1 deletion(-) create mode 100644 AutoSklearn/implementations/StandardScaler.py create mode 100644 tests/implementations/test_standard_scaler.py diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index 1b87a33d5a..d43dc6940e 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -3,6 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from ...implementations.StandardScaler import StandardScaler from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm @@ -15,7 +16,7 @@ def fit(self, X, Y): if self.strategy == "min/max": self.preprocessor = sklearn.preprocessing.MinMaxScaler(copy=False) elif self.strategy == "standard": - self.preprocessor = sklearn.preprocessing.StandardScaler(copy=False) + self.preprocessor = StandardScaler(copy=False) else: raise ValueError(self.strategy) self.preprocessor.fit(X, Y) diff --git a/AutoSklearn/implementations/StandardScaler.py b/AutoSklearn/implementations/StandardScaler.py new file mode 100644 index 0000000000..7e5c9a674d --- /dev/null +++ b/AutoSklearn/implementations/StandardScaler.py @@ -0,0 +1,200 @@ +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import check_arrays, warn_if_not_float +from sklearn.utils.sparsefuncs import inplace_column_scale, \ + mean_variance_axis0 + + +def _mean_and_std(X, axis=0, with_mean=True, with_std=True): + """Compute mean and std deviation for centering, scaling. + Zero valued std components are reset to 1.0 to avoid NaNs when scaling. + """ + X = np.asarray(X) + Xr = np.rollaxis(X, axis) + + if with_mean: + mean_ = Xr.mean(axis=0) + else: + mean_ = None + + if with_std: + std_ = Xr.std(axis=0) + if isinstance(std_, np.ndarray): + std_[std_ == 0.0] = 1.0 + elif std_ == 0.: + std_ = 1. + else: + std_ = None + + return mean_, std_ + + +class StandardScaler(BaseEstimator, TransformerMixin): + """Standardize features by removing the mean and scaling to unit variance + Centering and scaling happen independently on each feature by computing + the relevant statistics on the samples in the training set. Mean and + standard deviation are then stored to be used on later data using the + `transform` method. + Standardization of a dataset is a common requirement for many + machine learning estimators: they might behave badly if the + individual feature do not more or less look like standard normally + distributed data (e.g. Gaussian with 0 mean and unit variance). + For instance many elements used in the objective function of + a learning algorithm (such as the RBF kernel of Support Vector + Machines or the L1 and L2 regularizers of linear models) assume that + all features are centered around 0 and have variance in the same + order. If a feature has a variance that is orders of magnitude larger + that others, it might dominate the objective function and make the + estimator unable to learn from other features correctly as expected. + Parameters + ---------- + with_mean : boolean, True by default + If True, center the data before scaling. + This does not work (and will raise an exception) when attempted on + sparse matrices, because centering them entails building a dense + matrix which in common use cases is likely to be too large to fit in + memory. + + with_std : boolean, True by default + If True, scale the data to unit variance (or equivalently, + unit standard deviation). + + copy : boolean, optional, default is True + If False, try to avoid a copy and do inplace scaling instead. + This is not guaranteed to always work inplace; e.g. if the data is + not a NumPy array or scipy.sparse CSR matrix, a copy may still be + returned. + + Attributes + ---------- + `mean_` : array of floats with shape [n_features] + The mean value for each feature in the training set. + + `std_` : array of floats with shape [n_features] + The standard deviation for each feature in the training set. + + See also + -------- + :func:`sklearn.preprocessing.scale` to perform centering and + scaling without using the ``Transformer`` object oriented API + + :class:`sklearn.decomposition.RandomizedPCA` with `whiten=True` + to further remove the linear correlation across features. + """ + + + def __init__(self, copy=True, with_mean=True, with_std=True, + center_sparse=True): + self.with_mean = with_mean + self.with_std = with_std + self.copy = copy + self.center_sparse = center_sparse + + def fit(self, X, y=None): + """Don't trust the documentation of this module! + + Compute the mean and std to be used for later scaling. + + Parameters + ---------- + X : array-like or CSR matrix with shape [n_samples, n_features] + The data used to compute the mean and standard deviation + used for later scaling along the features axis. + """ + X = check_arrays(X, copy=self.copy, sparse_format="csr")[0] + if warn_if_not_float(X, estimator=self): + X = X.astype(np.float) + if sparse.issparse(X): + if self.center_sparse: + # This only works for csr matrices... + self.mean_ = [X.data[X.indices == i].mean() + for i in range(X.shape[1])] + var = np.array([X.data[X.indices == i].var() + for i in range(X.shape[1])]) + self.std_ = np.sqrt(var) + self.std_[var == 0.0] = 1.0 + return self + elif self.with_mean: + raise ValueError( + "Cannot center sparse matrices: pass `with_mean=False` " + "instead. See docstring for motivation and alternatives.") + else: + self.mean_ = None + + if self.with_std: + var = mean_variance_axis0(X)[1] + self.std_ = np.sqrt(var) + self.std_[var == 0.0] = 1.0 + else: + self.std_ = None + return self + else: + self.mean_, self.std_ = _mean_and_std( + X, axis=0, with_mean=self.with_mean, with_std=self.with_std) + return self + + def transform(self, X, y=None, copy=None): + """Perform standardization by centering and scaling + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + The data used to scale along the features axis. + """ + copy = copy if copy is not None else self.copy + X = check_arrays(X, copy=copy, sparse_format="csr")[0] + if warn_if_not_float(X, estimator=self): + X = X.astype(np.float) + if sparse.issparse(X): + if self.center_sparse: + for i in range(X.shape[1]): + X.data[X.indices == i] -= self.mean_[i] + elif self.with_mean: + raise ValueError( + "Cannot center sparse matrices: pass `with_mean=False` " + "instead. See docstring for motivation and alternatives.") + else: + pass + + if self.std_ is not None: + inplace_column_scale(X, 1 / self.std_) + else: + if self.with_mean: + X -= self.mean_ + if self.with_std: + X /= self.std_ + return X + + + def inverse_transform(self, X, copy=None): + """Scale back the data to the original representation + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + The data used to scale along the features axis. + """ + copy = copy if copy is not None else self.copy + if sparse.issparse(X): + if self.with_mean: + raise ValueError( + "Cannot uncenter sparse matrices: pass `with_mean=False` " + "instead See docstring for motivation and alternatives.") + if not sparse.isspmatrix_csr(X): + X = X.tocsr() + copy = False + if copy: + X = X.copy() + if self.std_ is not None: + inplace_column_scale(X, self.std_) + else: + X = np.asarray(X) + if copy: + X = X.copy() + if self.with_std: + X *= self.std_ + if self.with_mean: + X += self.mean_ + return X diff --git a/tests/implementations/test_standard_scaler.py b/tests/implementations/test_standard_scaler.py new file mode 100644 index 0000000000..21b1c2ee1e --- /dev/null +++ b/tests/implementations/test_standard_scaler.py @@ -0,0 +1,129 @@ +import unittest + +import numpy as np +import scipy.sparse +from sklearn.utils.testing import assert_array_almost_equal +from sklearn.preprocessing.data import scale +from sklearn.utils.sparsefuncs import inplace_column_scale, \ + mean_variance_axis0 + +from AutoSklearn.implementations.StandardScaler import StandardScaler + +matrix1 = [[0, 1, 2], + [0, 1, 2], + [0, 1, 2]] + + +class TestStandardScaler(unittest.TestCase): + def test_scaler_1d(self): + """Test scaling of dataset along single axis""" + rng = np.random.RandomState(0) + X = rng.randn(5) + X_orig_copy = X.copy() + + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + assert_array_almost_equal(X_scaled_back, X_orig_copy) + + # Test with 1D list + X = [0., 1., 2, 0.4, 1.] + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + X_scaled = scale(X) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + # Test with sparse list + X = scipy.sparse.coo_matrix((np.random.random((10,)), + ([i**2 for i in range(10)], + [0 for i in range(10)]))) + X = X.tocsr() + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + + self.assertFalse(np.any(np.isnan(X_scaled.data))) + self.assertAlmostEqual(X_scaled.mean(axis=0), 0) + assert_array_almost_equal(np.sqrt([X.data[X.indices == i].var() + for i in range(X.shape[1])]), 1) + + # Check that X has not been copied + self.assertTrue(X_scaled is X) + # Check that the matrix is still sparse + self.assertEqual(len(X.indices), 10) + + def test_scaler_2d_arrays(self): + """Test scaling of 2d array along first axis""" + rng = np.random.RandomState(0) + X = rng.randn(4, 5) + X[:, 0] = 0.0 # first feature is always of zero + + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has been copied + self.assertTrue(X_scaled is not X) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + self.assertTrue(X_scaled_back is not X) + self.assertTrue(X_scaled_back is not X_scaled) + assert_array_almost_equal(X_scaled_back, X) + + X_scaled = scale(X, axis=1, with_std=False) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=1), 4 * [0.0]) + X_scaled = scale(X, axis=1, with_std=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=1), 4 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=1), 4 * [1.0]) + # Check that the data hasn't been modified + self.assertTrue(X_scaled is not X) + + X_scaled = scaler.fit(X).transform(X, copy=False) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has not been copied + self.assertTrue(X_scaled is X) + + X = rng.randn(4, 5) + X[:, 0] = 1.0 # first feature is a constant, non zero feature + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has not been copied + self.assertTrue(X_scaled is not X) + + # Same thing for sparse matrices... + X = scipy.sparse.coo_matrix((np.random.random((12,)), + ([i for i in range(12)], + [int(i / 3) for i in range(12)]))) + X = X.tocsr() + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + + self.assertFalse(np.any(np.isnan(X_scaled.data))) + assert_array_almost_equal(X_scaled.mean(axis=0), + np.zeros((1, 4), dtype=np.float64)) + assert_array_almost_equal(np.sqrt([X.data[X.indices == i].var() + for i in range(X.shape[1])]).reshape((1, 4)), + np.ones((1, 4), dtype=np.float64)) + + # Check that X has not been copied + self.assertTrue(X_scaled is X) + # Check that the matrix is still sparse + self.assertEqual(len(X.indices), 12) + From 9e0afc756fcf4311e19aac5168ae52a30ebc7a6e Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 19 Dec 2014 09:55:56 +0100 Subject: [PATCH 052/352] chenge '==False' to 'is False' --- AutoSklearn/autosklearn.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 0e1fbdce19..f7320bc30e 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -327,13 +327,13 @@ def get_hyperparameter_search_space(include_classifiers=None, continue if multiclass is True and available_classifiers[name]. \ - get_properties()['handles_multiclass'] == False: + get_properties()['handles_multiclass'] is False: continue if multilabel is True and available_classifiers[name]. \ - get_properties()['handles_multilabel'] == False: + get_properties()['handles_multilabel'] is False: continue if sparse is True and available_classifiers[name]. \ - get_properties()['handles_sparse'] == False: + get_properties()['handles_sparse'] is False: continue names.append(name) From 812a030e7c11e862b53e8e4716e4613999737cdc Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 19 Dec 2014 09:56:22 +0100 Subject: [PATCH 053/352] make get_properties and get_hyperparameter_search_space static --- AutoSklearn/components/classification_base.py | 6 ++++-- AutoSklearn/components/preprocessor_base.py | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index fec0299f30..3c4a950f72 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -8,7 +8,8 @@ def __init__(self): self.estimator = None self.properties = None - def get_properties(self): + @staticmethod + def get_properties(): """Get the properties of the underlying algorithm. These are: * Short name @@ -40,7 +41,8 @@ def get_properties(self): """ raise NotImplementedError() - def get_hyperparameter_search_space(self): + @staticmethod + def get_hyperparameter_search_space(): """Return the configuration space of this classification algorithm. Returns diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index 2dd73c5043..e2d398cec3 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -7,7 +7,8 @@ class AutoSklearnPreprocessingAlgorithm(object): def __init__(self): self.preprocessor = None - def get_properties(self): + @staticmethod + def get_properties(): """Get the properties of the underlying algorithm. These are: * Short name @@ -41,7 +42,8 @@ def get_properties(self): """ raise NotImplementedError() - def get_hyperparameter_search_space(self): + @staticmethod + def get_hyperparameter_search_space(): """Return the configuration space of this preprocessing algorithm. Returns From 62815e4b29eed6841758313b00570d7c245e9a74 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 10:23:36 +0100 Subject: [PATCH 054/352] Adjust numbers in autosklearn_test.py --- tests/test_autosklearn.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 2b59843111..f9eff7de7d 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -52,13 +52,12 @@ def test_default_configuration(self): self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.scores(X_test) - self.assertTrue((scores[4] == [0.6, 0.4, 0.]).all()) def test_get_hyperparameter_search_space(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(74, len(hyperparameters)) + self.assertEqual(68, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 29826fd364f0ba256af21894cfceb02378815093 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 10:24:26 +0100 Subject: [PATCH 055/352] Adjust number in test_random_forest.py --- tests/components/classification/test_random_forest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index 8e3a1420be..28e55cd918 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -10,5 +10,5 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(RandomForest, dataset='iris') - self.assertAlmostEqual(0.94, + self.assertAlmostEqual(0.92, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From a584d638f9ba52a7f8993a6528078acefa725dc6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 15:05:15 +0100 Subject: [PATCH 056/352] Add MinMaxScaler which is capable of sparse data --- AutoSklearn/implementations/MinMaxScaler.py | 126 ++++++++++++++++++++ tests/implementations/test_minmaxscaler.py | 89 ++++++++++++++ 2 files changed, 215 insertions(+) create mode 100644 AutoSklearn/implementations/MinMaxScaler.py create mode 100644 tests/implementations/test_minmaxscaler.py diff --git a/AutoSklearn/implementations/MinMaxScaler.py b/AutoSklearn/implementations/MinMaxScaler.py new file mode 100644 index 0000000000..b69cf239d3 --- /dev/null +++ b/AutoSklearn/implementations/MinMaxScaler.py @@ -0,0 +1,126 @@ +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import check_arrays, warn_if_not_float + + +class MinMaxScaler(BaseEstimator, TransformerMixin): + """Standardizes features by scaling each feature to a given range. + + This estimator scales and translates each feature individually such + that it is in the given range on the training set, i.e. between + zero and one. + + The standardization is given by:: + X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) + X_scaled = X_std * (max - min) + min + + where min, max = feature_range. + + This standardization is often used as an alternative to zero mean, + unit variance scaling. + + Parameters + ---------- + feature_range: tuple (min, max), default=(0, 1) + Desired range of transformed data. + + copy : boolean, optional, default is True + Set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array). + + Attributes + ---------- + `min_` : ndarray, shape (n_features,) + Per feature adjustment for minimum. + + `scale_` : ndarray, shape (n_features,) + Per feature relative scaling of the data. + """ + + def __init__(self, feature_range=(0, 1), copy=True): + self.feature_range = feature_range + self.copy = copy + + def fit(self, X, y=None): + """Compute the minimum and maximum to be used for later scaling. + Parameters + ---------- + X : array-like, shape [n_samples, n_features] + The data used to compute the per-feature minimum and maximum + used for later scaling along the features axis. + """ + X = check_arrays(X, sparse_format="csc", copy=self.copy)[0] + warn_if_not_float(X, estimator=self) + feature_range = self.feature_range + if feature_range[0] >= feature_range[1]: + raise ValueError("Minimum of desired feature range must be smaller" + " than maximum. Got %s." % str(feature_range)) + if sparse.issparse(X): + data_min = [] + data_max = [] + data_range = [] + for i in range(X.shape[1]): + if X.indptr[i] == X.indptr[i+1]: + data_min.append(0) + data_max.append(0) + data_range.append(0) + else: + data_min.append(X.data[X.indptr[i]:X.indptr[i + 1]].min()) + data_max.append(X.data[X.indptr[i]:X.indptr[i + 1]].max()) + data_min = np.array(data_min) + data_max = np.array(data_max) + data_range = data_max - data_min + + else: + data_min = np.min(X, axis=0) + data_range = np.max(X, axis=0) - data_min + + # Do not scale constant features + if isinstance(data_range, np.ndarray): + # For a sparse matrix, constant features will be set to one! + if sparse.issparse(X): + for i in range(len(data_min)): + if data_range[i] == 0.0: + data_min[i] = data_min[i] - 1 + data_range[data_range == 0.0] = 1.0 + elif data_range == 0.: + data_range = 1. + + self.scale_ = (feature_range[1] - feature_range[0]) / data_range + self.min_ = feature_range[0] - data_min * self.scale_ + self.data_range = data_range + self.data_min = data_min + return self + + + def transform(self, X): + """Scaling features of X according to feature_range. + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + Input data that will be transformed. + """ + X = check_arrays(X, sparse_format="csc", copy=self.copy)[0] + if sparse.issparse(X): + for i in range(X.shape[1]): + X.data[X.indptr[i]:X.indptr[i + 1]] *= self.scale_[i] + X.data[X.indptr[i]:X.indptr[i + 1]] += self.min_[i] + else: + X *= self.scale_ + X += self.min_ + return X + + + def inverse_transform(self, X): + """Undo the scaling of X according to feature_range. + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + Input data that will be transformed. + """ + X = check_arrays(X, sparse_format="dense", copy=self.copy)[0] + X -= self.min_ + X /= self.scale_ + return X \ No newline at end of file diff --git a/tests/implementations/test_minmaxscaler.py b/tests/implementations/test_minmaxscaler.py new file mode 100644 index 0000000000..0323d26989 --- /dev/null +++ b/tests/implementations/test_minmaxscaler.py @@ -0,0 +1,89 @@ +import unittest + +import numpy as np +from scipy import sparse +from sklearn.utils.testing import assert_array_almost_equal + +from AutoSklearn.util import get_dataset +from AutoSklearn.implementations.MinMaxScaler import MinMaxScaler + + +class MinMaxScalerTest(unittest.TestCase): + def test_min_max_scaler_zero_variance_features(self): + """Check min max scaler on toy data with zero variance features""" + X = [[0., 1., +0.5], + [0., 1., -0.1], + [0., 1., +1.1]] + + X_new = [[+0., 2., 0.5], + [-1., 1., 0.0], + [+0., 1., 1.5]] + # default params + scaler = MinMaxScaler() + X_trans = scaler.fit_transform(X) + X_expected_0_1 = [[0., 0., 0.5], + [0., 0., 0.0], + [0., 0., 1.0]] + assert_array_almost_equal(X_trans, X_expected_0_1) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + X_trans_new = scaler.transform(X_new) + X_expected_0_1_new = [[+0., 1., 0.500], + [-1., 0., 0.083], + [+0., 0., 1.333]] + assert_array_almost_equal(X_trans_new, X_expected_0_1_new, decimal=2) + + # not default params + scaler = MinMaxScaler(feature_range=(1, 2)) + X_trans = scaler.fit_transform(X) + X_expected_1_2 = [[1., 1., 1.5], + [1., 1., 1.0], + [1., 1., 2.0]] + assert_array_almost_equal(X_trans, X_expected_1_2) + + + def test_min_max_scaler_1d(self): + """Test scaling of dataset along single axis""" + rng = np.random.RandomState(0) + X = rng.randn(5) + X_orig_copy = X.copy() + + scaler = MinMaxScaler() + X_scaled = scaler.fit(X).transform(X) + assert_array_almost_equal(X_scaled.min(axis=0), 0.0) + assert_array_almost_equal(X_scaled.max(axis=0), 1.0) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + assert_array_almost_equal(X_scaled_back, X_orig_copy) + + # Test with 1D list + X = [0., 1., 2, 0.4, 1.] + scaler = MinMaxScaler() + X_scaled = scaler.fit(X).transform(X) + assert_array_almost_equal(X_scaled.min(axis=0), 0.0) + assert_array_almost_equal(X_scaled.max(axis=0), 1.0) + + def test_min_max_scaler_sparse_boston_data(self): + # Use the boston housing dataset, because column three is 1HotEncoded! + # This is important to test; because the normal sklearn rescaler + # would set all values of the 1Hot Encoded column to zero, while we + # keep the values at 1. + X_train, Y_train, X_test, Y_test = get_dataset('boston', + make_sparse=True) + num_data_points = len(X_train.data) + expected_max_values = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + expected_max_values = np.array(expected_max_values).reshape((1, -1)) + + scaler = MinMaxScaler() + scaler.fit(X_train, Y_train) + transformation = scaler.transform(X_train) + + assert_array_almost_equal(np.array(transformation.todense().min(axis=0)), + np.zeros((1, 13))) + assert_array_almost_equal(np.array(transformation.todense().max(axis=0)), + expected_max_values) + # Test that the matrix is still sparse + self.assertTrue(sparse.issparse(transformation)) + self.assertEqual(num_data_points, len(transformation.data)) \ No newline at end of file From 40c7af5eb4bf32d8a01eca760711165c17c53ae8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 15:06:00 +0100 Subject: [PATCH 057/352] Improve testing of sparse data capabilities of preprocessing --- .../components/preprocessing/rescaling.py | 5 +- AutoSklearn/implementations/StandardScaler.py | 35 +++++++---- AutoSklearn/util.py | 26 +++++++-- .../preprocessing/test_imputation.py | 13 ++++- tests/components/preprocessing/test_pca.py | 4 +- .../components/preprocessing/test_scaling.py | 12 +++- tests/implementations/test_standard_scaler.py | 58 ++++++++++++++----- 7 files changed, 116 insertions(+), 37 deletions(-) diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index d43dc6940e..5b2ebcde21 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -1,9 +1,8 @@ -import sklearn.preprocessing - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ...implementations.StandardScaler import StandardScaler +from ...implementations.MinMaxScaler import MinMaxScaler from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm @@ -14,7 +13,7 @@ def __init__(self, strategy, random_state=None): def fit(self, X, Y): if self.strategy == "min/max": - self.preprocessor = sklearn.preprocessing.MinMaxScaler(copy=False) + self.preprocessor = MinMaxScaler(copy=False) elif self.strategy == "standard": self.preprocessor = StandardScaler(copy=False) else: diff --git a/AutoSklearn/implementations/StandardScaler.py b/AutoSklearn/implementations/StandardScaler.py index 7e5c9a674d..2bed2fe1c4 100644 --- a/AutoSklearn/implementations/StandardScaler.py +++ b/AutoSklearn/implementations/StandardScaler.py @@ -103,18 +103,33 @@ def fit(self, X, y=None): The data used to compute the mean and standard deviation used for later scaling along the features axis. """ - X = check_arrays(X, copy=self.copy, sparse_format="csr")[0] + X = check_arrays(X, copy=self.copy, sparse_format="csc")[0] if warn_if_not_float(X, estimator=self): X = X.astype(np.float) if sparse.issparse(X): if self.center_sparse: - # This only works for csr matrices... - self.mean_ = [X.data[X.indices == i].mean() - for i in range(X.shape[1])] - var = np.array([X.data[X.indices == i].var() - for i in range(X.shape[1])]) - self.std_ = np.sqrt(var) - self.std_[var == 0.0] = 1.0 + means = [] + vars = [] + + # This only works for csc matrices... + for i in range(X.shape[1]): + if X.indptr[i] == X.indptr[i + 1]: + means.append(0) + vars.append(1) + else: + vars.append( + X.data[X.indptr[i]:X.indptr[i + 1]].var()) + # If the variance is 0, set all occurences of this + # features to 1 + means.append( + X.data[X.indptr[i]:X.indptr[i + 1]].mean()) + if 0.0000001 >= vars[-1] >= -0.0000001: + means[-1] -= 1 + + self.std_ = np.sqrt(np.array(vars)) + self.std_[np.array(vars) == 0.0] = 1.0 + self.mean_ = np.array(means) + return self elif self.with_mean: raise ValueError( @@ -144,13 +159,13 @@ def transform(self, X, y=None, copy=None): The data used to scale along the features axis. """ copy = copy if copy is not None else self.copy - X = check_arrays(X, copy=copy, sparse_format="csr")[0] + X = check_arrays(X, copy=copy, sparse_format="csc")[0] if warn_if_not_float(X, estimator=self): X = X.astype(np.float) if sparse.issparse(X): if self.center_sparse: for i in range(X.shape[1]): - X.data[X.indices == i] -= self.mean_[i] + X.data[X.indptr[i]:X.indptr[i + 1]] -= self.mean_[i] elif self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index e28725126b..b9aeb1a742 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -4,6 +4,7 @@ import pkgutil import numpy as np +import scipy.sparse import sklearn import sklearn.base import sklearn.datasets @@ -40,7 +41,7 @@ def find_sklearn_classifiers(): print classifiers -def get_dataset(dataset='iris'): +def get_dataset(dataset='iris', make_sparse=False): iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data Y = iris.target @@ -54,11 +55,23 @@ def get_dataset(dataset='iris'): Y_train = Y[:100] X_test = X[100:] Y_test = Y[100:] + + if make_sparse: + X_train[:,0] = 0 + X_train[np.random.random(X_train.shape) > 0.5] = 0 + X_train = scipy.sparse.csc_matrix(X_train) + X_train.eliminate_zeros() + X_test[:,0] = 0 + X_test[np.random.random(X_test.shape) > 0.5] = 0 + X_test = scipy.sparse.csc_matrix(X_test) + X_test.eliminate_zeros() + return X_train, Y_train, X_test, Y_test def _test_classifier(Classifier, dataset='iris'): - X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset) + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=False) configuration_space = Classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() classifier = Classifier(random_state=1, @@ -69,15 +82,18 @@ def _test_classifier(Classifier, dataset='iris'): return predictions, Y_test -def _test_preprocessing(Preprocessor, dataset='iris'): - X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset) +def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=make_sparse) + original_X_train = X_train.copy() configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + transformer = preprocessor.fit(X_train, Y_train) - return transformer.transform(X_test), X_test + return transformer.transform(X_train), original_X_train if __name__ == "__main__": diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index 946fdf9861..e1a4688132 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -1,5 +1,7 @@ import unittest +from scipy import sparse + from AutoSklearn.components.preprocessing.imputation import Imputation from AutoSklearn.util import _test_preprocessing @@ -14,4 +16,13 @@ def test_default_configuration(self): transformations.append(transformation) if len(transformations) > 1: self.assertTrue( - (transformations[-1] == transformations[-2]).all()) \ No newline at end of file + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_sparse_data(self): + transformations = [] + transformation, original = _test_preprocessing(Imputation, + make_sparse=True) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation.data == original.data).all()) + self.assertIsInstance(transformation, sparse.csc_matrix) + transformations.append(transformation) \ No newline at end of file diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index a9b3d3b513..67fb51b452 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -4,7 +4,7 @@ from AutoSklearn.util import _test_preprocessing -class LibLinearComponentTest(unittest.TestCase): +class PCAComponentTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): @@ -13,4 +13,4 @@ def test_default_configuration(self): self.assertFalse((transformation == original).all()) transformations.append(transformation) if len(transformations) > 1: - self.assertTrue((transformations[-1] == transformations[-2]).all()) \ No newline at end of file + self.assertTrue((transformations[-1] == transformations[-2]).all()) diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index bb23b493c3..19378cd601 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -7,7 +7,7 @@ from AutoSklearn.util import _test_preprocessing -class LibLinearComponentTest(unittest.TestCase): +class ScalingComponentTest(unittest.TestCase): def test_boston_is_not_scaled(self): data = sklearn.datasets.load_boston()['data'] self.assertGreaterEqual(np.max(data), 100) @@ -19,7 +19,15 @@ def test_default_configuration(self): dataset='boston') # The maximum is around 1.95 for the transformed array... self.assertLessEqual(np.max(transformation), 2) + self.assertFalse((original == transformation).all()) transformations.append(transformation) if len(transformations) > 1: self.assertTrue( - (transformations[-1] == transformations[-2]).all()) \ No newline at end of file + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_with_sparse_data(self): + preprocessing = _test_preprocessing(Rescaling, dataset='boston', + make_sparse=True) + transformation, original = preprocessing + self.assertAlmostEqual(transformation.max(), 1) + self.assertTrue(all((original != transformation).data)) \ No newline at end of file diff --git a/tests/implementations/test_standard_scaler.py b/tests/implementations/test_standard_scaler.py index 21b1c2ee1e..9f34becc8c 100644 --- a/tests/implementations/test_standard_scaler.py +++ b/tests/implementations/test_standard_scaler.py @@ -1,13 +1,13 @@ +from itertools import chain import unittest import numpy as np import scipy.sparse from sklearn.utils.testing import assert_array_almost_equal from sklearn.preprocessing.data import scale -from sklearn.utils.sparsefuncs import inplace_column_scale, \ - mean_variance_axis0 from AutoSklearn.implementations.StandardScaler import StandardScaler +from AutoSklearn.util import get_dataset matrix1 = [[0, 1, 2], [0, 1, 2], @@ -50,12 +50,11 @@ def test_scaler_1d(self): X_scaled = scaler.fit(X).transform(X, copy=False) self.assertFalse(np.any(np.isnan(X_scaled.data))) - self.assertAlmostEqual(X_scaled.mean(axis=0), 0) - assert_array_almost_equal(np.sqrt([X.data[X.indices == i].var() - for i in range(X.shape[1])]), 1) + self.assertAlmostEqual(X_scaled.mean(), 0) + self.assertAlmostEqual(np.sqrt(X_scaled.data.var()), 1) # Check that X has not been copied - self.assertTrue(X_scaled is X) + # self.assertTrue(X_scaled is X) # Check that the matrix is still sparse self.assertEqual(len(X.indices), 10) @@ -114,16 +113,47 @@ def test_scaler_2d_arrays(self): X = X.tocsr() scaler = StandardScaler() X_scaled = scaler.fit(X).transform(X, copy=False) + print id(X_scaled) self.assertFalse(np.any(np.isnan(X_scaled.data))) - assert_array_almost_equal(X_scaled.mean(axis=0), - np.zeros((1, 4), dtype=np.float64)) - assert_array_almost_equal(np.sqrt([X.data[X.indices == i].var() - for i in range(X.shape[1])]).reshape((1, 4)), - np.ones((1, 4), dtype=np.float64)) - - # Check that X has not been copied - self.assertTrue(X_scaled is X) + assert_array_almost_equal( + [X_scaled.data[X_scaled.indptr[i]:X_scaled.indptr[i + 1]].mean() + for i in range(X_scaled.shape[1])], + np.zeros((4, ), dtype=np.float64)) + assert_array_almost_equal(np.sqrt([ + X_scaled.data[X_scaled.indptr[i]:X_scaled.indptr[i + 1]].var() + for i in range(X_scaled.shape[1])]), + np.ones((4, ), dtype=np.float64)) + + # Because we change the sparse format to csc, we cannot assert that + # the matrix did not change! + # self.assertTrue(X_scaled is X) # Check that the matrix is still sparse self.assertEqual(len(X.indices), 12) + # TODO add more tests from scikit-learn here: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/preprocessing/tests/test_data.py + + def test_standard_scaler_sparse_boston_data(self): + X_train, Y_train, X_test, Y_test = get_dataset('boston', + make_sparse=True) + num_data_points = len(X_train.data) + + scaler = StandardScaler() + scaler.fit(X_train, Y_train) + tr = scaler.transform(X_train) + + # Test this for every single dimension! + means = np.array([tr.data[tr.indptr[i]:tr.indptr[i + 1]].mean() + for i in range(13)]) + vars = np.array([tr.data[tr.indptr[i]:tr.indptr[i + 1]].var() + for i in range(13)]) + + for i in chain(range(1, 3), range(4, 13)): + self.assertAlmostEqual(means[i], 0, 2) + self.assertAlmostEqual(vars[i], 1, 2) + self.assertAlmostEqual(means[3], 1) + self.assertAlmostEqual(vars[3], 0) + # Test that the matrix is still sparse + self.assertTrue(scipy.sparse.issparse(tr)) + self.assertEqual(num_data_points, len(tr.data)) From ee0ce085dda69ded4b9f4345816c739c33f99b98 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 15:51:01 +0100 Subject: [PATCH 058/352] Allow to pass parameters to both the init and the fit function of underlying methods --- AutoSklearn/autosklearn.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index f7320bc30e..a8f70882e5 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -1,3 +1,4 @@ +from collections import defaultdict import copy import sklearn @@ -78,7 +79,7 @@ def __init__(self, configuration, random_state=None): else: self.random_state = check_random_state(random_state) - def fit(self, X, Y): + def fit(self, X, Y, fit_params=None, init_params=None): """Fit the selected algorithm to the training data. Parameters @@ -90,6 +91,15 @@ def fit(self, X, Y): y : array-like Targets + fit_params : dict + See the documentation of sklearn.pipeline.Pipeline for formatting + instructions. + + init_params : dict + Pass arguments to the constructors of single methods. To pass + arguments to only one of the methods (lets says the + OneHotEncoder), seperate the class name from the argument by a ':'. + Returns ------- self : returns an instance of self. @@ -109,6 +119,11 @@ def fit(self, X, Y): # instantiation time steps = [] + init_params_per_method = defaultdict(dict) + if init_params is not None: + for init_param, value in init_params: + method, param = init_param.split(":") + init_params_per_method[method][param] = value preprocessors_names = ["imputation", "rescaling", self.configuration['preprocessor'].value] @@ -129,6 +144,7 @@ def fit(self, X, Y): split(":")[1] preproc_params[name_] = instantiated_hyperparameter.value + preproc_params.update(init_params_per_method[preproc_name]) preprocessor_object = components.preprocessing_components. \ _preprocessors[preproc_name](random_state=self.random_state, **preproc_params) @@ -148,6 +164,7 @@ def fit(self, X, Y): split(":")[1] classifier_parameters[name_] = instantiated_hyperparameter.value + classifier_parameters.update(init_params_per_method[classifier_name]) classifier_object = components.classification_components._classifiers\ [classifier_name](random_state=self.random_state, **classifier_parameters) From cda6b3876b841ba58824b7ca96f5fe4832b52e9d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 19 Dec 2014 20:05:00 +0100 Subject: [PATCH 059/352] Fix: wrong function name in all preprocessing methods --- AutoSklearn/autosklearn.py | 12 +++++++++++- AutoSklearn/components/preprocessing/imputation.py | 2 +- AutoSklearn/components/preprocessing/pca.py | 2 +- AutoSklearn/components/preprocessing/rescaling.py | 2 +- tests/components/preprocessing/test_scaling.py | 2 +- 5 files changed, 15 insertions(+), 5 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index a8f70882e5..bb5e5ef778 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -401,7 +401,6 @@ def get_hyperparameter_search_space(include_classifiers=None, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - names = [] names_ = [] for name in available_preprocessors: @@ -414,6 +413,17 @@ def get_hyperparameter_search_space(include_classifiers=None, elif exclude_preprocessors is not None and \ name in exclude_preprocessors: continue + + if multiclass is True and available_preprocessors[name]. \ + get_properties()['handles_multiclass'] is False: + continue + if multilabel is True and available_preprocessors[name]. \ + get_properties()['handles_multilabel'] is False: + continue + if sparse is True and available_preprocessors[name]. \ + get_properties()['handles_sparse'] is False: + continue + names.append(name) preprocessor = CategoricalHyperparameter("preprocessor", diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py index 68d15b6812..2a66df338b 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -23,7 +23,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'Imputation', 'name': 'Imputation', 'handles_missing_values': True, diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 44101dcee7..e8a6f262f3 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -41,7 +41,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'PCA', 'name': 'Principle Component Analysis', 'handles_missing_values': False, diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index 5b2ebcde21..e597c25a53 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -27,7 +27,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_meta_information(): + def get_properties(): return {'shortname': 'Rescaling', 'name': 'Rescaling', 'handles_missing_values': False, diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index 19378cd601..6858f594e8 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -30,4 +30,4 @@ def test_default_configuration_with_sparse_data(self): make_sparse=True) transformation, original = preprocessing self.assertAlmostEqual(transformation.max(), 1) - self.assertTrue(all((original != transformation).data)) \ No newline at end of file + self.assertTrue(all((original != transformation).data)) From 6a24a0b60bce9104051f4aa745820858f42877e3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 22 Dec 2014 13:00:19 +0100 Subject: [PATCH 060/352] Rename scores function and add conversion from decision function to probabilities --- AutoSklearn/autosklearn.py | 6 +++--- .../components/classification/extra_trees.py | 2 +- .../classification/gradient_boosting.py | 2 +- .../classification/k_nearest_neighbors.py | 2 +- .../components/classification/liblinear.py | 13 +++++++++++-- .../components/classification/libsvm_svc.py | 7 ++++--- .../components/classification/random_forest.py | 2 +- AutoSklearn/components/classification/sgd.py | 18 ++++++++++++++---- AutoSklearn/components/classification_base.py | 4 ++-- 9 files changed, 38 insertions(+), 18 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index bb5e5ef778..81f5afe27e 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -192,8 +192,8 @@ def predict(self, X): self._validate_input_X(X) return self._pipeline.predict(X) - def scores(self, X): - """Predict confidence scores for samples using the selected model. + def predict_proba(self, X): + """predict_proba. Parameters ---------- @@ -208,7 +208,7 @@ def scores(self, X): Xt = X for name, transform in self._pipeline.steps[:-1]: Xt = transform.transform(Xt) - return self._pipeline.steps[-1][-1].scores(Xt) + return self._pipeline.steps[-1][-1].predict_proba(Xt) def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 01376080c0..a904d0b03c 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -75,7 +75,7 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() return self.estimator.predict_proba(X) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 5fe984143b..2992e7d129 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -79,7 +79,7 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() return self.estimator.predict_proba(X) diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/AutoSklearn/components/classification/k_nearest_neighbors.py index a5ccc4ddef..b183e2fc4f 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/AutoSklearn/components/classification/k_nearest_neighbors.py @@ -37,7 +37,7 @@ def predict(self, X): raise NotImplementedError() return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() return self.estimator.predict_proba(X) diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 6fdd9d3f54..1810cdb56f 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -51,10 +52,18 @@ def predict(self, X): raise NotImplementedError() return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() - return self.estimator.decision_function(X) + + df = self.estimator.decision_function(X) + + if len(df.shape) == 1: + ppositive = 1 / (1 + np.exp(-df)) + return np.transpose(np.array((1 - ppositive, ppositive))) + else: + tmp = np.exp(-df) + return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) @staticmethod def get_properties(): diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 647a47d817..269b4f684a 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -51,7 +51,8 @@ def fit(self, X, Y): class_weight=self.class_weight, max_iter=self.max_iter, random_state=self.random_state, - cache_size=2000) + cache_size=2000, + probability=True) return self.estimator.fit(X, Y) def predict(self, X): @@ -59,10 +60,10 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() - return self.estimator.decision_function(X) + return self.estimator.predict_proba(X) @staticmethod def get_properties(): diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index d670953391..aca2ea395c 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -60,7 +60,7 @@ def predict(self, X): raise NotImplementedError return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() return self.estimator.predict_proba(X) diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index fe3bc558ea..66eece50f8 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -1,3 +1,4 @@ +import numpy as np from sklearn.linear_model.stochastic_gradient import SGDClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -59,12 +60,21 @@ def predict(self, X): raise NotImplementedError() return self.estimator.predict(X) - def scores(self, X): + def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() - # TODO figure out if it's better to return proba in the cases where - # the loss function allows for this - return self.estimator.decision_function(X) + + if self.loss in ["log", "modified_huber"]: + return self.estimator.predict_proba(X) + else: + df = self.estimator.decision_function(X) + + if len(df.shape) == 1: + ppositive = 1 / (1 + np.exp(-df)) + return np.transpose(np.array((1 - ppositive, ppositive))) + else: + tmp = np.exp(-df) + return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) @staticmethod def get_properties(): diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index 3c4a950f72..fdf947654b 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -95,8 +95,8 @@ def predict(self, X): -learn-objects>`_ for further information.""" raise NotImplementedError() - def scores(self, X): - """Predict confidence scores for samples. + def predict_proba(self, X): + """Predict probabilities. Parameters ---------- From f9477c1f2dbeeb19e426d0b5441fe771bfa3899a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 22 Dec 2014 20:55:25 +0100 Subject: [PATCH 061/352] Scale down search spaces of the forest-based classifiers --- AutoSklearn/components/classification/extra_trees.py | 4 ++-- AutoSklearn/components/classification/gradient_boosting.py | 6 ++---- AutoSklearn/components/classification/random_forest.py | 4 ++-- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index a904d0b03c..a54124925d 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -108,11 +108,11 @@ def get_hyperparameter_search_space(): # Copied from random_forest.py n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 1000, default=10) + "n_estimators", 10, 500, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 1.0, default=1.0) + "max_features", 0.01, 0.5, default=1.0) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 2992e7d129..43aaa585c9 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -116,14 +116,12 @@ def get_hyperparameter_search_space(): # choices=["max_leaf_nodes", "max_depth"], default="max_depth") max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", value="None") - # UniformIntegerHyperparameter( - # name="max_leaf_nodes", lower=10, upper=1000, default=) # Copied from random_forest.py n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=10, upper=1000, default=10, log=False) + name="n_estimators", lower=10, upper=500, default=10, log=False) max_features = UniformFloatHyperparameter( - name="max_features", lower=0.01, upper=1.0, default=1.0) + name="max_features", lower=0.01, upper=0.5, default=1.0) max_depth = UniformIntegerHyperparameter( name="max_depth", lower=1, upper=10, default=3, log=False) min_samples_split = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index aca2ea395c..a49a22b1a9 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -86,11 +86,11 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 1000, default=10) + "n_estimators", 10, 500, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 1.0, default=0.1) + "max_features", 0.01, 0.5, default=0.1) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) From b16091f1acc48c3fdd1853c2b9fd6c724ea9057d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 22 Dec 2014 21:21:03 +0100 Subject: [PATCH 062/352] remove max_depth and max_leaf_nodes, reset default for min_samples_split/min_samples_leaf --- .../components/classification/extra_trees.py | 54 ++++++++++--------- .../classification/gradient_boosting.py | 30 +++++------ .../classification/random_forest.py | 2 +- 3 files changed, 44 insertions(+), 42 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index a904d0b03c..b02e4731f0 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -12,9 +12,9 @@ class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): - def __init__(self, n_estimators, criterion, use_max_depth, min_samples_leaf, - min_samples_split, max_leaf_nodes_or_max_depth, max_features, - bootstrap=False, max_leaf_nodes=None, max_depth=None, + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", #use_max_depth=False, + bootstrap=False, max_leaf_nodes=None, max_depth="None", oob_score=False, n_jobs=1, random_state=None, verbose=0, min_density=None, compute_importances=None): @@ -26,10 +26,14 @@ def __init__(self, n_estimators, criterion, use_max_depth, min_samples_leaf, if max_leaf_nodes_or_max_depth == "max_depth": self.max_leaf_nodes = None - if use_max_depth == "True": - self.max_depth = int(max_depth) - elif use_max_depth == "False": + if max_depth == "None": self.max_depth = None + else: + self.max_depth = int(max_depth) + #if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None else: if max_leaf_nodes == "None": self.max_leaf_nodes = None @@ -101,8 +105,8 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): - use_max_depth = CategoricalHyperparameter( - name="use_max_depth", choices=("True", "False"), default="False") + #use_max_depth = CategoricalHyperparameter( + # name="use_max_depth", choices=("True", "False"), default="False") bootstrap = CategoricalHyperparameter( "bootstrap", ["True", "False"], default="False") @@ -114,34 +118,32 @@ def get_hyperparameter_search_space(): max_features = UniformFloatHyperparameter( "max_features", 0.01, 1.0, default=1.0) min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 1, 20, default=2) + "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( "min_samples_leaf", 1, 20, default=1) # Unparametrized - max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( - name="max_leaf_nodes_or_max_depth", value="max_depth") + #max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + # name="max_leaf_nodes_or_max_depth", value="max_depth") # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", # choices=["max_leaf_nodes", "max_depth"], default="max_depth") - max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", - value="None") + #max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + # value="None") # UniformIntegerHyperparameter( # name="max_leaf_nodes", lower=10, upper=1000, default=) - #max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") - # TODO these are very random guesses - max_depth = UniformIntegerHyperparameter("max_depth", 5, 50) + max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) cs.add_hyperparameter(criterion) cs.add_hyperparameter(max_features) - cs.add_hyperparameter(use_max_depth) + #cs.add_hyperparameter(use_max_depth) cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(max_leaf_nodes_or_max_depth) + #cs.add_hyperparameter(max_leaf_nodes_or_max_depth) cs.add_hyperparameter(min_samples_split) cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(max_leaf_nodes) + #cs.add_hyperparameter(max_leaf_nodes) cs.add_hyperparameter(bootstrap) # Conditions @@ -150,15 +152,15 @@ def get_hyperparameter_search_space(): # EqualsCondition(child=max_leaf_nodes, # parent=max_leaf_nodes_or_max_depth, # value="max_leaf_nodes") - cond2_max_leaf_nodes_or_max_depth = \ - EqualsCondition(child=use_max_depth, - parent=max_leaf_nodes_or_max_depth, - value="max_depth") + #cond2_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=use_max_depth, + # parent=max_leaf_nodes_or_max_depth, + # value="max_depth") - cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, + #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, value="True") #cs.add_condition(cond_max_leaf_nodes_or_max_depth) - cs.add_condition(cond2_max_leaf_nodes_or_max_depth) - cs.add_condition(cond_max_depth) + #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond_max_depth) return cs diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 2992e7d129..cf31714525 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -14,8 +14,8 @@ class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, learning_rate, n_estimators, subsample, - min_samples_split, min_samples_leaf, max_features, - max_leaf_nodes_or_max_depth, max_depth=None, + min_samples_split, min_samples_leaf, max_features, max_depth, + max_leaf_nodes_or_max_depth="max_depth", max_leaf_nodes=None, loss='deviance', warm_start=False, init=None, random_state=None, verbose=0): @@ -110,12 +110,12 @@ def get_hyperparameter_search_space(): name="subsample", lower=0.01, upper=1.0, default=1.0, log=False) # Unparametrized - max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( - name="max_leaf_nodes_or_max_depth", value="max_depth") + #max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + # name="max_leaf_nodes_or_max_depth", value="max_depth") # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", # choices=["max_leaf_nodes", "max_depth"], default="max_depth") - max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", - value="None") + #max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + # value="None") # UniformIntegerHyperparameter( # name="max_leaf_nodes", lower=10, upper=1000, default=) @@ -125,9 +125,9 @@ def get_hyperparameter_search_space(): max_features = UniformFloatHyperparameter( name="max_features", lower=0.01, upper=1.0, default=1.0) max_depth = UniformIntegerHyperparameter( - name="max_depth", lower=1, upper=10, default=3, log=False) + name = "max_depth", lower=1, upper=10, default=3) min_samples_split = UniformIntegerHyperparameter( - name="min_samples_split", lower=1, upper=20, default=2, log=False) + name="min_samples_split", lower=2, upper=20, default=2, log=False) min_samples_leaf = UniformIntegerHyperparameter( name="min_samples_leaf", lower=1, upper=20, default=1, log=False) @@ -135,8 +135,8 @@ def get_hyperparameter_search_space(): cs.add_hyperparameter(n_estimators) cs.add_hyperparameter(learning_rate) cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_leaf_nodes_or_max_depth) - cs.add_hyperparameter(max_leaf_nodes) + #cs.add_hyperparameter(max_leaf_nodes_or_max_depth) + #cs.add_hyperparameter(max_leaf_nodes) cs.add_hyperparameter(max_depth) cs.add_hyperparameter(min_samples_split) cs.add_hyperparameter(min_samples_leaf) @@ -148,12 +148,12 @@ def get_hyperparameter_search_space(): # parent=max_leaf_nodes_or_max_depth, # value="max_leaf_nodes") - cond2_max_leaf_nodes_or_max_depth = \ - EqualsCondition(child=max_depth, - parent=max_leaf_nodes_or_max_depth, - value="max_depth") + #cond2_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=max_depth, + # parent=max_leaf_nodes_or_max_depth, + # value="max_depth") #cs.add_condition(cond_max_leaf_nodes_or_max_depth) - cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) return cs diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index aca2ea395c..a5f9baff9f 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -93,7 +93,7 @@ def get_hyperparameter_search_space(): "max_features", 0.01, 1.0, default=0.1) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 1, 20, default=2) + "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( "min_samples_leaf", 1, 20, default=1) max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") From 62852aaf04dcda91c600b11832eb38a69b149677 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 22 Dec 2014 21:25:34 +0100 Subject: [PATCH 063/352] Make defaults legal again --- AutoSklearn/components/classification/extra_trees.py | 2 +- AutoSklearn/components/classification/gradient_boosting.py | 2 +- AutoSklearn/components/classification/random_forest.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index a54124925d..dac411ad09 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -112,7 +112,7 @@ def get_hyperparameter_search_space(): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 0.5, default=1.0) + "max_features", 0.01, 0.5, default=0.2) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 43aaa585c9..144cd33f8d 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -121,7 +121,7 @@ def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( name="n_estimators", lower=10, upper=500, default=10, log=False) max_features = UniformFloatHyperparameter( - name="max_features", lower=0.01, upper=0.5, default=1.0) + name="max_features", lower=0.01, upper=0.5, default=0.2) max_depth = UniformIntegerHyperparameter( name="max_depth", lower=1, upper=10, default=3, log=False) min_samples_split = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index a49a22b1a9..622c28d01d 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -90,7 +90,7 @@ def get_hyperparameter_search_space(): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 0.5, default=0.1) + "max_features", 0.01, 0.5, default=0.2) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 1, 20, default=2) From 3902ae32e4995c714b348bdb126d58825b9801b8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 22 Dec 2014 21:26:22 +0100 Subject: [PATCH 064/352] Fix tests --- source/first_steps.rst | 2 +- tests/components/classification/test_extra_trees.py | 2 +- tests/components/classification/test_gradient_boosting.py | 2 +- tests/test_autosklearn.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/source/first_steps.rst b/source/first_steps.rst index 416ca84d2d..bc693fac9a 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.90000000000000002 + 0.92000000000000004 diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index a19496012a..5eb133b19a 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -12,5 +12,5 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(ExtraTreesClassifier) - self.assertAlmostEqual(0.959999999999999, + self.assertAlmostEqual(0.97999999999999998, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index b77ebc4ec8..c7af6b68a4 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -12,5 +12,5 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(GradientBoostingClassifier) - self.assertAlmostEqual(0.92, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index f9eff7de7d..094431750e 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -51,7 +51,7 @@ def test_default_configuration(self): predictions = auto.predict(X_test) self.assertAlmostEqual(0.94, sklearn.metrics.accuracy_score(predictions, Y_test)) - scores = auto.scores(X_test) + scores = auto.predict_proba(X_test) def test_get_hyperparameter_search_space(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() From 3dc79d497385d615efa1c3a58923e3741f5309cb Mon Sep 17 00:00:00 2001 From: kleinaa Date: Mon, 5 Jan 2015 16:07:15 +0100 Subject: [PATCH 065/352] bugfix --- AutoSklearn/components/classification/__init__.py | 2 +- AutoSklearn/components/classification/extra_trees.py | 2 +- AutoSklearn/components/preprocessing/__init__.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/components/classification/__init__.py b/AutoSklearn/components/classification/__init__.py index 99207957a9..32960f493b 100644 --- a/AutoSklearn/components/classification/__init__.py +++ b/AutoSklearn/components/classification/__init__.py @@ -19,7 +19,7 @@ for member_name, obj in inspect.getmembers(module): if inspect.isclass(obj) and AutoSklearnClassificationAlgorithm in obj.__bases__: # TODO test if the obj implements the interface - # Keep in mind that this only instantiates the wrapper, + # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier classifier = obj _classifiers[module_name] = classifier diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index e8bbb9e0b5..a566a986d1 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -158,7 +158,7 @@ def get_hyperparameter_search_space(): # value="max_depth") #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, - value="True") + #value="True") #cs.add_condition(cond_max_leaf_nodes_or_max_depth) #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) #cs.add_condition(cond_max_depth) diff --git a/AutoSklearn/components/preprocessing/__init__.py b/AutoSklearn/components/preprocessing/__init__.py index ce970e6709..ffdb628b8f 100644 --- a/AutoSklearn/components/preprocessing/__init__.py +++ b/AutoSklearn/components/preprocessing/__init__.py @@ -19,7 +19,7 @@ for member_name, obj in inspect.getmembers(module): if inspect.isclass(obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: # TODO test if the obj implements the interface - # Keep in mind that this only instantiates the wrapper, + # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier preprocessor = obj _preprocessors[module_name] = preprocessor From f78a2a0d858d2ec04672af546720b9194e569051 Mon Sep 17 00:00:00 2001 From: kleinaa Date: Mon, 5 Jan 2015 16:15:07 +0100 Subject: [PATCH 066/352] more bugfixes (default maxfeatures was out of bounds) --- AutoSklearn/components/classification/extra_trees.py | 2 +- AutoSklearn/components/classification/gradient_boosting.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index a566a986d1..b2e9be89c0 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -116,7 +116,7 @@ def get_hyperparameter_search_space(): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( - "max_features", 0.01, 0.5, default=1.0) + "max_features", 0.01, 0.5, default=0.1) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index aa39b83b46..c952fc200f 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -123,7 +123,7 @@ def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( name="n_estimators", lower=10, upper=500, default=10, log=False) max_features = UniformFloatHyperparameter( - name="max_features", lower=0.01, upper=0.5, default=1.0) + name="max_features", lower=0.01, upper=0.5, default=0.1) max_depth = UniformIntegerHyperparameter( name = "max_depth", lower=1, upper=10, default=3) min_samples_split = UniformIntegerHyperparameter( From 286ad72c54bd5a577f20ed3a6037f29aafd9eb86 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Wed, 7 Jan 2015 11:28:31 +0100 Subject: [PATCH 067/352] first try adding sparse filtering as preprocessing --- .../preprocessing/sparse_filtering.py | 54 +++++++++++++++ .../implementations/SparseFiltering.py | 67 +++++++++++++++++++ 2 files changed, 121 insertions(+) create mode 100644 AutoSklearn/components/preprocessing/sparse_filtering.py create mode 100644 AutoSklearn/implementations/SparseFiltering.py diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/AutoSklearn/components/preprocessing/sparse_filtering.py new file mode 100644 index 0000000000..24cda0732b --- /dev/null +++ b/AutoSklearn/components/preprocessing/sparse_filtering.py @@ -0,0 +1,54 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter + +from ...implementations.SparseFiltering import SparseFiltering + +class SparseFiltering(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, N, maxiter=200): + self.N = N + self.maxiter = maxiter + + def fit(self, X, Y): + self.preprocessor = SparseFiltering(self.N, self.maxiter) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'PCA', + 'name': 'Principle Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': False, + 'preferred_dtype': None} + + + + @staticmethod + def get_hyperparameter_search_space(): + N = UniformIntegerHyperparameter( + "N", 100, 1000, default=200) + maxiter = UniformIntegerHyperparameter( + "maxiter", 50, 500, default=200) + cs = ConfigurationSpace() + cs.add_hyperparameter(N) + cs.add_hyperparameter(maxiter) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/implementations/SparseFiltering.py b/AutoSklearn/implementations/SparseFiltering.py new file mode 100644 index 0000000000..ead8e6aa8f --- /dev/null +++ b/AutoSklearn/implementations/SparseFiltering.py @@ -0,0 +1,67 @@ +""" +This quickly adapted version of sparse filtering requires scipy and numpy +""" +import numpy as np +from scipy.optimize import minimize + +def l2row(X): + """ + L2 normalize X by rows. We also use this to normalize by column with l2row(X.T) + """ + N = np.sqrt((X**2).sum(axis=1)+1e-8) + Y = (X.T/N).T + return Y,N + + +def l2rowg(X,Y,N,D): + """ + Compute L2 normalized gradient. + """ + return (D.T/N - Y.T * (D*X).sum(axis=1) / N**2).T + + +class SparseFiltering(object): + def __init__(self, N, maxiter=200): + self.N = N + self.W = None + self.maxiter = 200 + + def step(self, X, W): + # returns current objective and gradient + W = W.reshape((X.shape[1], self.N)) + features = W.dot(X) + features_norm = np.sqrt(features**2 + 1e-8) + features_column, column_norm = l2row(features_norm.T) + features_row, row_norm = l2row(features_norm) + # compute objective function (l1 norm of features) + obj = features_row.sum() + # backprop through the whole process + deltaW = l2rowg(features_norm, feautres_row, row_norm, np.ones(features_row.shape)) + deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T) + deltaW = X.T.dot(deltaW*(features/features_norm)) + return obj, deltaW.flatten() + + + def fit(self, X, y=None): + """ fit sparse filtering to data + this completely ignores y + """ + # init random weights + W = np.random.randn(N,X.shape[1]) + # build a closure for the objective + obj_fun = lambda w: self.step(X, w) + # evaluate once for testing + obj, grad = obj_fun(W) + # and run optimization + opt = {'maxiter': self.maxiter} + res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) + self.W = res.x.reshape(X.shape[1], N) + + def transform(self, X): + # compute responses + features = X.dot(W) + # sparsify + features_norm = np.sqrt(features**2 + 1e-8) + features_column = l2row(features_norm.T)[0] + features_row = l2row(features_column)[0].T + return features_row From 43bfbc610f0736e072543d320dc9d69657153e25 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 7 Jan 2015 16:07:35 +0100 Subject: [PATCH 068/352] Update OneHotEncoder --- AutoSklearn/implementations/OneHotEncoder.py | 6 +++--- source/components.rst | 12 ++++++++++++ tests/implementations/test_OneHotEncoder.py | 18 ++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index f14fb77e17..778c1b507c 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -182,7 +182,7 @@ def _fit_transform(self, X): else index + offset for index in inverse] data_idx = [0 if index >= n_uniques else 1 for index in inverse] - feature_indices_idx = {unique: index + offset + feature_indices_idx = {str(unique): index + offset for index, unique in enumerate(unique_elements) if np.isfinite(unique)} @@ -191,7 +191,7 @@ def _fit_transform(self, X): feature_indices.append(feature_indices_idx) row_indices = np.tile(np.arange(n_samples, dtype=np.int32), - n_features) + n_features) self.feature_indices_ = feature_indices self.n_values = n_values @@ -212,7 +212,7 @@ def fit_transform(self, X, y=None): def _transform(self, X): """Assumes X contains only categorical features.""" - X = check_arrays(X, sparse_format='dense', allow_nans=True)[0] + X = check_arrays(X, sparse_format='csc', allow_nans=True)[0] n_samples, n_features = X.shape indices = self.feature_indices_ diff --git a/source/components.rst b/source/components.rst index f8f485b1da..d3f234a0c8 100644 --- a/source/components.rst +++ b/source/components.rst @@ -10,6 +10,15 @@ Classification A list of all classification algorithms considered in the AutoSklearn search space. +.. autoclass:: AutoSklearn.components.classification.extra_trees.ExtraTreesClassifier + :members: + +.. autoclass:: AutoSklearn.components.classification.gradient_boosting.GradientBoostingClassifier + :members: + +.. autoclass:: AutoSklearn.components.classification.k_nearest_neighbors.KNearestNeighborsClassifier + :members: + .. autoclass:: AutoSklearn.components.classification.liblinear.LibLinear_SVC :members: @@ -19,6 +28,9 @@ A list of all classification algorithms considered in the AutoSklearn search spa .. autoclass:: AutoSklearn.components.classification.random_forest.RandomForest :members: +.. autoclass:: AutoSklearn.components.classification.sgd.SGD + :members: + Regression ========== diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index cfa688d26d..10fa7c49cb 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -20,6 +20,18 @@ [0., 0., 0., 1., 9.], [0., 1., 1., 0., 7.]] +with_string = [("Black", 5, 9), + ("Blue", 3, 7), + ("Red", 2, 5), + (np.NaN, 3, 1), + ("Black", 1, 1)] + +with_string_1h = [[1, 0, 0, 5, 9], + [0, 1, 0, 3, 7], + [0, 0, 1, 2, 5], + [0, 0, 0, 3, 1], + [1, 0, 0, 1, 1]] + class OneHotEncoderTest(unittest.TestCase): def test_dense1(self): @@ -36,6 +48,12 @@ def test_dense2_with_non_sparse_components(self): self.fit_then_transform_dense(dense2_partial_1h, dense2, categorical_features=[True, True, False]) + def test_with_string(self): + self.fit_then_transform(with_string_1h, with_string, + categorical_features=[True, False, False]) + self.fit_then_transform_dense(with_string_1h, with_string, + categorical_features=[True, False, False]) + def fit_then_transform(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features) ohe.fit(input) From 63297cf291a3544ebec05429f1e302aecb0d931f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 7 Jan 2015 19:26:12 +0100 Subject: [PATCH 069/352] minor --- AutoSklearn/components/classification/gradient_boosting.py | 1 + 1 file changed, 1 insertion(+) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index c952fc200f..1b29fbfa8e 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -55,6 +55,7 @@ def __init__(self, learning_rate, n_estimators, subsample, self.init = init self.random_state = random_state self.verbose = int(verbose) + self.estimator = None def fit(self, X, Y): self.estimator = sklearn.ensemble.GradientBoostingClassifier( From 227f1a64dde8bddb4afadfc5b4a307c12d76ccb4 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 7 Jan 2015 19:26:45 +0100 Subject: [PATCH 070/352] fix accuracy test --- tests/test_autosklearn.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index f9eff7de7d..15369033b9 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -1,5 +1,6 @@ __author__ = 'feurerm' +import copy import numpy as np import StringIO import unittest @@ -48,10 +49,11 @@ def test_default_configuration(self): X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') auto = AutoSklearnClassifier(default) auto = auto.fit(X_train, Y_train) - predictions = auto.predict(X_test) - self.assertAlmostEqual(0.94, - sklearn.metrics.accuracy_score(predictions, Y_test)) - scores = auto.scores(X_test) + predictions = auto.predict(copy.deepcopy(X_test)) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(0.94, accuracy) + model_score = auto.score(copy.deepcopy(X_test), Y_test) + self.assertEqual(model_score, accuracy) def test_get_hyperparameter_search_space(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() From 3f6d2256588daa99d4ed638dbce66e4202864424 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 7 Jan 2015 19:27:44 +0100 Subject: [PATCH 071/352] add regression pipeline --- AutoSklearn/autosklearn_regression.py | 413 ++++++++++++++++++ AutoSklearn/components/__init__.py | 1 + AutoSklearn/components/regression/__init__.py | 25 ++ .../components/regression/random_forest.py | 123 ++++++ AutoSklearn/components/regression_base.py | 118 +++++ AutoSklearn/util.py | 30 ++ misc/regressors.csv | 47 ++ tests/components/regression/__init__.py | 0 tests/test_autosklearn_regression.py | 138 ++++++ 9 files changed, 895 insertions(+) create mode 100644 AutoSklearn/autosklearn_regression.py create mode 100644 AutoSklearn/components/regression/__init__.py create mode 100644 AutoSklearn/components/regression/random_forest.py create mode 100644 AutoSklearn/components/regression_base.py create mode 100644 misc/regressors.csv create mode 100644 tests/components/regression/__init__.py create mode 100644 tests/test_autosklearn_regression.py diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py new file mode 100644 index 0000000000..da7785cb3d --- /dev/null +++ b/AutoSklearn/autosklearn_regression.py @@ -0,0 +1,413 @@ +from collections import defaultdict +import copy + +import sklearn +if sklearn.__version__ != "0.15.2": + raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + "you installed %s." % sklearn.__version__) + +from sklearn.base import BaseEstimator, RegressorMixin +from sklearn.pipeline import Pipeline +from sklearn.utils import check_random_state + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + InactiveHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from . import components as components + + +class AutoSklearnRegressor(BaseEstimator, RegressorMixin): + """This class implements the regression task. + + It implements a pipeline, which includes one preprocessing step and one + regression algorithm. It can render a search space including all known + regression and preprocessing algorithms. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available regressors at runtime. For this reason the user must + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. + + Parameters + ---------- + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : The underlying scikit-learn regression model. This + variable is assigned after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + + See also + -------- + + References + ---------- + + Examples + -------- + + """ + def __init__(self, configuration, random_state=None): + + # TODO check sklearn version! + self.configuration = configuration + + cs = self.get_hyperparameter_search_space() + cs.check_configuration(configuration) + + self._pipeline = None + + if random_state is None: + self.random_state = check_random_state(1) + else: + self.random_state = check_random_state(random_state) + + def fit(self, X, Y, fit_params=None, init_params=None): + """Fit the selected algorithm to the training data. + + Parameters + ---------- + X : array-like or sparse, shape = (n_samples, n_features) + Training data. The preferred type of the matrix (dense or sparse) + depends on the regressor selected. + + y : array-like + Targets + + fit_params : dict + See the documentation of sklearn.pipeline.Pipeline for formatting + instructions. + + init_params : dict + Pass arguments to the constructors of single methods. To pass + arguments to only one of the methods (lets says the + OneHotEncoder), seperate the class name from the argument by a ':'. + + Returns + ------- + self : returns an instance of self. + + Raises + ------ + NoModelException + NoModelException is raised if fit() is called without specifying + a regression algorithm first. + """ + # TODO: perform input validation + # TODO: look if X.shape[0] == y.shape[0] + # TODO: check if the hyperparameters have been set... + # TODO: this is an example of the antipattern of not properly + # initializing a class in the init function! + # TODO: can this happen now that a configuration is specified at + # instantiation time + + steps = [] + init_params_per_method = defaultdict(dict) + if init_params is not None: + for init_param, value in init_params: + method, param = init_param.split(":") + init_params_per_method[method][param] = value + + preprocessors_names = ["imputation", "rescaling", + self.configuration['preprocessor'].value] + + for preproc_name in preprocessors_names: + if preproc_name != "None": + preproc_params = {} + + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name \ + .startswith(preproc_name): + continue + if isinstance(instantiated_hyperparameter, + InactiveHyperparameter): + continue + + name_ = instantiated_hyperparameter.hyperparameter.name. \ + split(":")[1] + preproc_params[name_] = instantiated_hyperparameter.value + + preproc_params.update(init_params_per_method[preproc_name]) + preprocessor_object = components.preprocessing_components. \ + _preprocessors[preproc_name](random_state=self.random_state, + **preproc_params) + steps.append((preproc_name, preprocessor_object)) + + # Extract Hyperparameters from the configuration object + regressor_name = self.configuration["regressor"].value + regressor_parameters = {} + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name.startswith( + regressor_name): + continue + if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + continue + + name_ = instantiated_hyperparameter.hyperparameter.name.\ + split(":")[1] + regressor_parameters[name_] = instantiated_hyperparameter.value + + regressor_parameters.update(init_params_per_method[regressor_name]) + regressor_object = components.regression_components._regressors\ + [regressor_name](random_state=self.random_state, + **regressor_parameters) + steps.append((regressor_name, regressor_object)) + + self._validate_input_X(X) + self._validate_input_Y(Y) + + self._pipeline = Pipeline(steps) + self._pipeline.fit(X, Y) + return self + + def predict(self, X): + """Predict the classes using the selected model. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + Returns the predicted values""" + # TODO check if fit() was called before... + self._validate_input_X(X) + return self._pipeline.predict(X) + + def predict_proba(self, X): + """predict_proba. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + self._validate_input_X(X) + + Xt = X + for name, transform in self._pipeline.steps[:-1]: + Xt = transform.transform(Xt) + return self._pipeline.steps[-1][-1].predict_proba(Xt) + + def _validate_input_X(self, X): + # TODO: think of all possible states which can occur and how to + # handle them + pass + + def _validate_input_Y(self, Y): + pass + + def add_model_class(self, model): + """ + Raises + ------ + NotImplementedError + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(include_regressors=None, + exclude_regressors=None, + include_preprocessors=None, + exclude_preprocessors=None, + sparse=False): + # TODO: We assume that there exists only a single regression task. which + # is different to classification where we have multiclass, + # multilabel, etc + """Return the configuration space for the CASH problem. + + Parameters + ---------- + include_regressors : list of str + If include_regressors is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + only the SVM use :python:`include_regressors=['svr']`. + Cannot be used together with :python:`exclude_regressors`. + + exclude_regressors : list of str + If exclude_regressors is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + all regressors except the SVM use + :python:`exclude_regressors=['svr']`. + Cannot be used together with :python:`include_regressors`. + + include_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + only the PCA use :python:`include_preprocessors=['pca']`. + Cannot be used together with :python:`exclude_preprocessors`. + + exclude_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + all preprocessors except the PCA use + :python:`exclude_preprocessors=['pca']`. + Cannot be used together with :python:`include_preprocessors`. + + Returns + ------- + cs : HPOlibConfigSpace.configuration_space.Configuration + The configuration space describing the AutoSklearnClassifier. + + """ + if include_regressors is not None and exclude_regressors is not None: + raise ValueError("The arguments include_regressors and " + "exclude_regressors cannot be used together.") + + if include_preprocessors is not None and exclude_preprocessors is not None: + raise ValueError("The arguments include_preprocessors and " + "exclude_preprocessors cannot be used together.") + + always_active = ["imputation", "rescaling"] + + cs = ConfigurationSpace() + + available_regressors = \ + components.regression_components._regressors + available_preprocessors = \ + components.preprocessing_components._preprocessors + + names = [] + names_ = [] + for name in available_regressors: + if name in always_active: + names_.append(name) + continue + elif include_regressors is not None and \ + name not in include_regressors: + continue + elif exclude_regressors is not None and \ + name in exclude_regressors: + continue + + if sparse is True and available_regressors[name]. \ + get_properties()['handles_sparse'] is False: + continue + names.append(name) + + if len(names + names_) == 0: + raise ValueError("No regressor to build a configuration space " + "for...") + + regressor = CategoricalHyperparameter("regressor", names, + default='random_forest' if 'random_forest' in names else names[0]) + cs.add_hyperparameter(regressor) + for name in names + names_: + + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + + regressor_configuration_space = available_regressors[name]. \ + get_hyperparameter_search_space() + for parameter in regressor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(regressor_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, regressor, name) + cs.add_condition(condition) + + for condition in available_regressors[name]. \ + get_hyperparameter_search_space().get_conditions(): + dlcs = condition.get_descendant_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_regressors[name]. \ + get_hyperparameter_search_space().forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + names = [] + names_ = [] + for name in available_preprocessors: + if name in always_active: + names_.append(name) + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + if sparse is True and available_preprocessors[name]. \ + get_properties()['handles_sparse'] is False: + continue + + names.append(name) + + preprocessor = CategoricalHyperparameter("preprocessor", + ["None"] + names, + default='None') + cs.add_hyperparameter(preprocessor) + for name in names + names_: + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space() + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0 and name not in always_active: + condition = EqualsCondition(new_parameter, preprocessor, name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space().get_conditions(): + dlcs = condition.get_descendent_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space().forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.startwith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + # TODO: maybe provide an interface to the underlying predictor like + # decision_function or predict_proba \ No newline at end of file diff --git a/AutoSklearn/components/__init__.py b/AutoSklearn/components/__init__.py index 8ff6a8f39e..6d86e4f5d2 100644 --- a/AutoSklearn/components/__init__.py +++ b/AutoSklearn/components/__init__.py @@ -37,4 +37,5 @@ =============""" from . import classification as classification_components +from . import regression as regression_components from . import preprocessing as preprocessing_components diff --git a/AutoSklearn/components/regression/__init__.py b/AutoSklearn/components/regression/__init__.py new file mode 100644 index 0000000000..ea64ab5909 --- /dev/null +++ b/AutoSklearn/components/regression/__init__.py @@ -0,0 +1,25 @@ +__author__ = 'eggenspk' + +import inspect +import os +import pkgutil +import sys + +from ..regression_base import AutoSklearnRegressionAlgorithm + +regressor_directory = os.path.split(__file__)[0] +_regressors = {} + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([regressor_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = module_loader.find_module(module_name).load_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnRegressionAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + classifier = obj + _regressors[module_name] = classifier diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py new file mode 100644 index 0000000000..22bf3a2369 --- /dev/null +++ b/AutoSklearn/components/regression/random_forest.py @@ -0,0 +1,123 @@ +import numpy as np +import sklearn.ensemble + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..regression_base import AutoSklearnRegressionAlgorithm + + +class RandomForest(AutoSklearnRegressionAlgorithm): + def __init__(self, n_estimators, criterion, max_features, + max_depth, min_samples_split, min_samples_leaf, + bootstrap, + max_leaf_nodes_or_max_depth="max_depth", + max_leaf_nodes=None, random_state=None, + n_jobs=1): + self.n_estimators = int(n_estimators) + if criterion in ("mse",): + self.criterion = criterion + else: + raise ValueError("criterion should be in (mse,) but is: %s" % + str(criterion)) + + if max_features in ("sqrt", "log2", "auto"): + raise ValueError("'max_features' should be a float: %s" % + str(max_features)) + self.max_features = float(max_features) + if self.max_features > 1: + raise ValueError("'max_features' > 1: %s" % str(max_features)) + + self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) + if self.max_leaf_nodes_or_max_depth == "max_depth": + if max_depth == 'None': + self.max_depth = None + else: + self.max_depth = int(max_depth) + self.max_leaf_nodes = None + elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": + self.max_depth = None + if max_leaf_nodes == 'None': + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + else: + raise ValueError("max_leaf_nodes_or_max_depth sould be in " + "('max_leaf_nodes', 'max_depth'): %s" % + self.max_leaf_nodes_or_max_depth) + self.min_samples_split = int(min_samples_split) + self.min_samples_leaf = int(min_samples_leaf) + + if bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False + + self.random_state = random_state + self.n_jobs = n_jobs + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.ensemble.RandomForestRegressor( + n_estimators=self.n_estimators, + criterion=self.criterion, + max_features=self.max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, + n_jobs=self.n_jobs) + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'RF', + 'name': 'Random Forest Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + criterion = Constant(name="criterion", value="mse") + # Copied from classification/random_forest.py + n_estimators = UniformIntegerHyperparameter( + name="n_estimators", lower=10, upper=500, default=10, log=False) + max_features = UniformFloatHyperparameter( + name="max_features", lower=0.01, upper=0.5, default=0.1) + max_depth = UniformIntegerHyperparameter( + name = "max_depth", lower=1, upper=10, default=3) + min_samples_split = UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False) + min_samples_leaf = UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False) + bootstrap = CategoricalHyperparameter( + name="bootstrap", choices=["True", "False"], default="True") + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(bootstrap) + cs.add_hyperparameter(criterion) + + return cs diff --git a/AutoSklearn/components/regression_base.py b/AutoSklearn/components/regression_base.py new file mode 100644 index 0000000000..14c55a29e8 --- /dev/null +++ b/AutoSklearn/components/regression_base.py @@ -0,0 +1,118 @@ +class AutoSklearnRegressionAlgorithm(object): + """Provide an abstract interface for regression algorithms in + AutoSklearn. + + Make a subclass of this and put it into the directory + `AutoSklearn/components/regression` to make it available.""" + def __init__(self): + self.estimator = None + self.properties = None + + @staticmethod + def get_properties(): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(): + """Return the configuration space of this regression algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this regression algorithm. + """ + raise NotImplementedError() + + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict_proba(self, X): + """Predict probabilities. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + + def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ + return self.estimator + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index b9aeb1a742..bf587e1f18 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -41,6 +41,36 @@ def find_sklearn_classifiers(): print classifiers +def find_sklearn_regressor(): + classifiers = set() + all_subdirectories = [] + sklearn_path = sklearn.__path__[0] + for root, dirs, files in os.walk(sklearn_path): + all_subdirectories.append(root) + + for module_loader, module_name, ispkg in \ + pkgutil.iter_modules(all_subdirectories): + + # Work around some issues... + if module_name in ["hmm", "mixture"]: + print "Skipping %s" % module_name + continue + + module_file = module_loader.__dict__["path"] + sklearn_module = module_file.replace(sklearn_path, "").replace("/", ".") + full_module_name = "sklearn" + sklearn_module + "." + module_name + + pkg = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(pkg): + if inspect.isclass(obj) and \ + issubclass(obj, sklearn.base.RegressorMixin): + classifier = obj + print member_name, obj + classifiers.add(classifier) + + print classifiers + def get_dataset(dataset='iris', make_sparse=False): iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data diff --git a/misc/regressors.csv b/misc/regressors.csv new file mode 100644 index 0000000000..d7d891a4a1 --- /dev/null +++ b/misc/regressors.csv @@ -0,0 +1,47 @@ +Name,class,added,comment +,,, +,,, +,,, +,,, +WeDoNotAddThis,,False,we already have this method +WeDoNotAddThis,,False,See module name +,,,Crashes when predicting a training input and weighted distances +Preprocessing,,False,Preprocessing +,,, +Multitask,,False,MultiTask +Preprocessing,,False,Preprocessing +,,, +,,, +WeDoNotAddThis,,False,We already have this method +,,, +,,, +,, +,,,Can crash when there is no neighbour within the radius +WeDoNotAddThis,,False,BaseClass +,,, +WeDoNotAddThis,,False,We alreday have this method +MultiTask,,False,MultiTask +WeDoNotAddThis,,False,We already have this method +MultiTask,,False,MultiTask +,,, +,,, +WeDoNotAddThis,,False, We already have this method +,,, +,,, +Preprocessing,,False,Preprocessing +,,, +,,, +WeDoNotAddThis,,False,We already have this method +WeDoNotAddThis,,False,We alreday have this method +,,, +,,, +,,, +,,, +,,, +,,, +WeDoNotAddThis,,False,We already have this method +,,,Crashes when getting two similar inputs +,,, +,,, +Preprocessing,,False,Preprocessing + diff --git a/tests/components/regression/__init__.py b/tests/components/regression/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py new file mode 100644 index 0000000000..fcb81a4010 --- /dev/null +++ b/tests/test_autosklearn_regression.py @@ -0,0 +1,138 @@ +__author__ = 'eggenspk' + +import copy +import numpy as np +import StringIO +import unittest + +import sklearn.datasets +import sklearn.decomposition +import sklearn.ensemble +import sklearn.svm + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from AutoSklearn.autosklearn_regression import AutoSklearnRegressor +from AutoSklearn.components.regression_base import AutoSklearnRegressionAlgorithm +from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm +import AutoSklearn.components.regression as regression_components +import AutoSklearn.components.preprocessing as preprocessing_components +from AutoSklearn.util import get_dataset + + +class TestAutoSKlearnRegressor(unittest.TestCase): + # TODO: test for both possible ways to initialize AutoSklearn + # parameters and other... + + def test_find_classifiers(self): + regressors = regression_components._regressors + self.assertGreaterEqual(len(regressors), 1) + for key in regressors: + self.assertIn(AutoSklearnRegressionAlgorithm, + regressors[key].__bases__) + + def test_find_preprocessors(self): + preprocessors = preprocessing_components._preprocessors + self.assertGreaterEqual(len(preprocessors), 1) + for key in preprocessors: + self.assertIn(AutoSklearnPreprocessingAlgorithm, + preprocessors[key].__bases__) + + def test_get_hyperparameter_search_space(self): + config = AutoSklearnRegressor.get_hyperparameter_search_space() + self.assertIsInstance(config, ConfigurationSpace) + + def test_default_configuration(self): + for i in range(2): + cs = AutoSklearnRegressor.get_hyperparameter_search_space() + default = cs.get_default_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') + auto = AutoSklearnRegressor(default) + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(copy.deepcopy(X_test)) + # The lower the worse + r2_score = sklearn.metrics.r2_score(Y_test, predictions) + self.assertAlmostEqual(0.30805962106685625, r2_score) + model_score = auto.score(copy.deepcopy(X_test), Y_test) + self.assertEqual(model_score, r2_score) + + def test_get_hyperparameter_search_space(self): + cs = AutoSklearnRegressor.get_hyperparameter_search_space() + conditions = cs.get_conditions() + hyperparameters = cs.get_hyperparameters() + self.assertEqual(13, len(hyperparameters)) + self.assertEqual(len(hyperparameters) - 4, len(conditions)) + + def test_get_hyperparameter_search_space_include_exclude_models(self): + cs = AutoSklearnRegressor.get_hyperparameter_search_space( + include_regressors=['random_forest']) + self.assertEqual(cs.get_hyperparameter('regressor'), + CategoricalHyperparameter('regressor', ['random_forest'])) + + # TODO add this test when more than one regressor is present + """ + cs = AutoSklearnRegressor.get_hyperparameter_search_space( + exclude_regressors=['random_forest']) + self.assertNotIn('random_forest', str(cs)) + """ + + cs = AutoSklearnRegressor.get_hyperparameter_search_space( + include_preprocessors=['pca']) + self.assertEqual(cs.get_hyperparameter('preprocessor'), + CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + + cs = AutoSklearnRegressor.get_hyperparameter_search_space( + exclude_preprocessors=['pca']) + self.assertNotIn('pca', str(cs)) + + @unittest.skip("test_get_hyperparameter_search_space_dataset_properties" + + " Not yet Implemented") + def test_get_hyperparameter_search_space_dataset_properties(self): + # TODO: We do not have any dataset properties for regression, so this + # test is somewhat stupid + pass + """ + full_cs = AutoSklearnRegressor.get_hyperparameter_search_space() + cs_mc = AutoSklearnRegressor.get_hyperparameter_search_space() + self.assertEqual(full_cs, cs_mc) + + cs_ml = AutoSklearnRegressor.get_hyperparameter_search_space() + self.assertNotIn('k_nearest_neighbors', str(cs_ml)) + self.assertNotIn('liblinear', str(cs_ml)) + self.assertNotIn('libsvm_svc', str(cs_ml)) + self.assertNotIn('sgd', str(cs_ml)) + + cs_sp = AutoSklearnRegressor.get_hyperparameter_search_space( + sparse=True) + self.assertNotIn('extra_trees', str(cs_sp)) + self.assertNotIn('gradient_boosting', str(cs_sp)) + self.assertNotIn('random_forest', str(cs_sp)) + + cs_mc_ml = AutoSklearnRegressor.get_hyperparameter_search_space() + self.assertEqual(cs_ml, cs_mc_ml) + + self.assertRaisesRegexp(ValueError, + "No regressor to build a configuration space " + "for...", AutoSklearnRegressor. + get_hyperparameter_search_space, + multiclass=True, multilabel=True, sparse=True) + """ + + @unittest.skip("test_check_random_state Not yet Implemented") + def test_check_random_state(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_X Not yet Implemented") + def test_validate_input_X(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_Y Not yet Implemented") + def test_validate_input_Y(self): + raise NotImplementedError() + + def test_set_params(self): + pass + + def test_get_params(self): + pass \ No newline at end of file From 478433bd0f9c29b528c212404bf0e9ac0c093705 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 7 Jan 2015 19:28:38 +0100 Subject: [PATCH 072/352] add random forest as a first regression model --- misc/regressors.csv | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/misc/regressors.csv b/misc/regressors.csv index d7d891a4a1..5f4f887939 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -15,7 +15,7 @@ Preprocessing,,False,Preprocessin WeDoNotAddThis,,False,We already have this method ,,, ,,, -,, +,,, ,,,Can crash when there is no neighbour within the radius WeDoNotAddThis,,False,BaseClass ,,, @@ -42,6 +42,5 @@ WeDoNotAddThis,,False,We a WeDoNotAddThis,,False,We already have this method ,,,Crashes when getting two similar inputs ,,, -,,, +RandomForest,,, Preprocessing,,False,Preprocessing - From fb2a61a9eaafa7bcc92b00db380aec413e649b3f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 7 Jan 2015 19:32:43 +0100 Subject: [PATCH 073/352] minor --- misc/regressors.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/regressors.csv b/misc/regressors.csv index 5f4f887939..f01fe19e61 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -42,5 +42,5 @@ WeDoNotAddThis,,False,We a WeDoNotAddThis,,False,We already have this method ,,,Crashes when getting two similar inputs ,,, -RandomForest,,, +RandomForest,,True, Preprocessing,,False,Preprocessing From cee534f61442e829f26cdf388c5e9357381e993a Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Wed, 7 Jan 2015 20:28:55 +0100 Subject: [PATCH 074/352] remove my random kitchen sink implementation and wrap random kitchen sinks as implemented in scikit --- .../components/preprocessing/kitchen_sinks.py | 66 +++++++++++++++++++ .../preprocessing/test_kitchen_sinks.py | 12 ++++ 2 files changed, 78 insertions(+) create mode 100644 AutoSklearn/components/preprocessing/kitchen_sinks.py create mode 100644 tests/components/preprocessing/test_kitchen_sinks.py diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/AutoSklearn/components/preprocessing/kitchen_sinks.py new file mode 100644 index 0000000000..c8ce389dd1 --- /dev/null +++ b/AutoSklearn/components/preprocessing/kitchen_sinks.py @@ -0,0 +1,66 @@ +import sklearn.kernel_approximation + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + +class RandomKitchenSinks(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, gamma, n_components, random_state = None): + """ Parameters: + gamma: float + Parameter of the rbf kernel to be approximated exp(-gamma * x^2) + + n_components: int + Number of components (output dimensionality) used to approximate the kernel + """ + self.gamma = gamma + self.n_components = n_components + self.random_state = random_state + + + def fit(self, X, Y): + self.preprocessor = sklearn.kernel_approximation.RBFSampler(self.gamma, self.n_components, self.random_state) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'KitchenSink', + 'name': 'Random Kitchen Sinks', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + # JTS TODO: it should handle sparse data but I have not tested it :) + 'handles_sparse': False, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(): + gamma = UniformFloatHyperparameter( + "gamma", 0.3, 2., default=1.0) + n_components = UniformFloatHyperparameter( + "n_components", 50, 10000, default=100, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(n_components) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name + diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/preprocessing/test_kitchen_sinks.py new file mode 100644 index 0000000000..abb30fd052 --- /dev/null +++ b/tests/components/preprocessing/test_kitchen_sinks.py @@ -0,0 +1,12 @@ +import unittest + +from AutoSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from AutoSklearn.util import _test_preprocessing + + +class PCAComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(RandomKitchenSinks) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + self.assertFalse((transformation == 0).all()) From ed7f7dbf3cc29d993e70e402c2f1f2df9ceac1a4 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Wed, 7 Jan 2015 20:43:26 +0100 Subject: [PATCH 075/352] correct sparse filtering implementation --- .../preprocessing/sparse_filtering.py | 12 +-- .../implementations/SparseFiltering.py | 22 ++++-- .../preprocessing/test_sparse_filtering.py | 11 +++ .../implementations/test_sparse_filtering.py | 74 +++++++++++++++++++ 4 files changed, 106 insertions(+), 13 deletions(-) create mode 100644 tests/components/preprocessing/test_sparse_filtering.py create mode 100644 tests/implementations/test_sparse_filtering.py diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/AutoSklearn/components/preprocessing/sparse_filtering.py index 24cda0732b..1da188fba6 100644 --- a/AutoSklearn/components/preprocessing/sparse_filtering.py +++ b/AutoSklearn/components/preprocessing/sparse_filtering.py @@ -2,16 +2,18 @@ Configuration from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ...implementations.SparseFiltering import SparseFiltering +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ...implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl class SparseFiltering(AutoSklearnPreprocessingAlgorithm): - def __init__(self, N, maxiter=200): + def __init__(self, N, maxiter=100, random_state=None): self.N = N self.maxiter = maxiter + self.random_state = random_state def fit(self, X, Y): - self.preprocessor = SparseFiltering(self.N, self.maxiter) + self.preprocessor = SparseFilteringImpl(self.N, self.maxiter, random_state = self.random_state) self.preprocessor.fit(X, Y) return self @@ -41,9 +43,9 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): N = UniformIntegerHyperparameter( - "N", 100, 1000, default=200) + "N", 50, 2000, default=100) maxiter = UniformIntegerHyperparameter( - "maxiter", 50, 500, default=200) + "maxiter", 50, 500, default=100) cs = ConfigurationSpace() cs.add_hyperparameter(N) cs.add_hyperparameter(maxiter) diff --git a/AutoSklearn/implementations/SparseFiltering.py b/AutoSklearn/implementations/SparseFiltering.py index ead8e6aa8f..d9a15e1079 100644 --- a/AutoSklearn/implementations/SparseFiltering.py +++ b/AutoSklearn/implementations/SparseFiltering.py @@ -21,23 +21,29 @@ def l2rowg(X,Y,N,D): class SparseFiltering(object): - def __init__(self, N, maxiter=200): + def __init__(self, N, maxiter=100, random_state=None): self.N = N self.W = None - self.maxiter = 200 + self.maxiter = maxiter + if random_state is None: + self.rng = np.random + elif isinstance(random_state, int): + self.rng = np.random.RandomState(random_state) + else: + self.rng = random_state def step(self, X, W): # returns current objective and gradient W = W.reshape((X.shape[1], self.N)) - features = W.dot(X) + features = X.dot(W) #W.dot(X) features_norm = np.sqrt(features**2 + 1e-8) features_column, column_norm = l2row(features_norm.T) features_row, row_norm = l2row(features_norm) # compute objective function (l1 norm of features) obj = features_row.sum() # backprop through the whole process - deltaW = l2rowg(features_norm, feautres_row, row_norm, np.ones(features_row.shape)) - deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T) + deltaW = l2rowg(features_norm, features_row, row_norm, np.ones(features_row.shape)) + deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T).T deltaW = X.T.dot(deltaW*(features/features_norm)) return obj, deltaW.flatten() @@ -47,7 +53,7 @@ def fit(self, X, y=None): this completely ignores y """ # init random weights - W = np.random.randn(N,X.shape[1]) + W = self.rng.randn(self.N,X.shape[1]) # build a closure for the objective obj_fun = lambda w: self.step(X, w) # evaluate once for testing @@ -55,11 +61,11 @@ def fit(self, X, y=None): # and run optimization opt = {'maxiter': self.maxiter} res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) - self.W = res.x.reshape(X.shape[1], N) + self.W = res.x.reshape(X.shape[1], self.N) def transform(self, X): # compute responses - features = X.dot(W) + features = X.dot(self.W) # sparsify features_norm = np.sqrt(features**2 + 1e-8) features_column = l2row(features_norm.T)[0] diff --git a/tests/components/preprocessing/test_sparse_filtering.py b/tests/components/preprocessing/test_sparse_filtering.py new file mode 100644 index 0000000000..d461321205 --- /dev/null +++ b/tests/components/preprocessing/test_sparse_filtering.py @@ -0,0 +1,11 @@ +import unittest + +from AutoSklearn.components.preprocessing.sparse_filtering import SparseFiltering +from AutoSklearn.util import _test_preprocessing + + +class SparseFilteringComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(SparseFiltering) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py new file mode 100644 index 0000000000..47bc91bc64 --- /dev/null +++ b/tests/implementations/test_sparse_filtering.py @@ -0,0 +1,74 @@ +import unittest +import os +import numpy as np + +from AutoSklearn.implementations.SparseFiltering import SparseFiltering + + +class TestSparseFiltering(unittest.TestCase): + def test_sparse_filtering(self): + """Test sparse filtering on a simple dataset""" + # load a few patches of image data from a file which is currently hard coded :) + # JTS TODO: remove this hard coding + dataset = "/home/springj/data/image_patches.npz" + # try not to break testing if data is not available + if (not os.path.isfile(dataset)): + return + patches = np.load(dataset) + data = patches['data'] + preprocess = SparseFiltering(256, random_state = 123456) + print("BEFORE") + preprocess.fit(data) + # JTS TODO: figure out a better test than this nonsense here ;) + self.assertFalse((preprocess.W == 0).all()) + """ + # JTS: the following is only useful for visualization purposes + # turn it on if you want to see sparse filtering in action on image data ;) + import pylab + # method for eyeballing the features + # assumes features in ROWS not columns! + def displayData(X, example_width = False, display_cols = False): + # compute rows, cols + m,n = X.shape + if not example_width: + example_width = int(np.round(np.sqrt(n))) + example_height = (n/example_width) + # Compute number of items to display + if not display_cols: + display_cols = int(np.sqrt(m)) + display_rows = int(np.ceil(m/display_cols)) + pad = 1 + # Setup blank display + display_array = -np.ones((pad+display_rows * (example_height+pad), + pad+display_cols * (example_width+pad))) + # Copy each example into a patch on the display array + curr_ex = 0 + for j in range(display_rows): + for i in range(display_cols): + if curr_ex>=m: + break + # Copy the patch + # Get the max value of the patch + max_val = abs(X[curr_ex,:]).max() + i_inds = example_width*[pad+j * (example_height+pad)+q for q in range(example_height)] + j_inds = [pad+i * (example_width+pad)+q + for q in range(example_width) + for nn in range(example_height)] + try: + newData = (X[curr_ex,:].reshape((example_height,example_width)))/max_val + except: + print X[curr_ex,:].shape + print (example_height,example_width) + raise + display_array[i_inds,j_inds] = newData.flatten() + curr_ex+=1 + if curr_ex>=m: + break + # Display the image + pylab.imshow(display_array,vmin=-1,vmax=1,interpolation='nearest',cmap=pylab.cm.gray) + pylab.xticks([]) + pylab.yticks([]) + displayData(preprocess.W.T) + pylab.show() + #""" + From a0c9f6257be9be6547bbf76448c91797d5ab674b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 8 Jan 2015 08:36:34 +0100 Subject: [PATCH 076/352] fix #hyperparameter --- tests/test_autosklearn_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index fcb81a4010..2b13e540e7 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -61,7 +61,7 @@ def test_get_hyperparameter_search_space(self): cs = AutoSklearnRegressor.get_hyperparameter_search_space() conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(13, len(hyperparameters)) + self.assertEqual(17, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 0ec3a814145f8e7fc05415d5cca87806ff7994e2 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 8 Jan 2015 08:41:20 +0100 Subject: [PATCH 077/352] merge two test_get_hyperparameter_search_space methods --- tests/test_autosklearn.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index a763b7bb2a..f64715d7ee 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -38,10 +38,6 @@ def test_find_preprocessors(self): self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) - def test_get_hyperparameter_search_space(self): - config = AutoSklearnClassifier.get_hyperparameter_search_space() - self.assertIsInstance(config, ConfigurationSpace) - def test_default_configuration(self): for i in range(2): cs = AutoSklearnClassifier.get_hyperparameter_search_space() @@ -56,6 +52,7 @@ def test_default_configuration(self): def test_get_hyperparameter_search_space(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space() + self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() self.assertEqual(67, len(hyperparameters)) From d19933b22b24d131b5824a1641b28f93e7f30e2b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 8 Jan 2015 08:42:12 +0100 Subject: [PATCH 078/352] merge two test_get_hyperparameter_search_space methods --- tests/test_autosklearn_regression.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index 2b13e540e7..e78f8fdd86 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -39,10 +39,6 @@ def test_find_preprocessors(self): self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) - def test_get_hyperparameter_search_space(self): - config = AutoSklearnRegressor.get_hyperparameter_search_space() - self.assertIsInstance(config, ConfigurationSpace) - def test_default_configuration(self): for i in range(2): cs = AutoSklearnRegressor.get_hyperparameter_search_space() @@ -59,6 +55,7 @@ def test_default_configuration(self): def test_get_hyperparameter_search_space(self): cs = AutoSklearnRegressor.get_hyperparameter_search_space() + self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() self.assertEqual(17, len(hyperparameters)) From f33525fef200a78c60544d52bb968b67660260c3 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 8 Jan 2015 09:21:34 +0100 Subject: [PATCH 079/352] add Ridge regression --- .../components/regression/ridge_regression.py | 67 +++++++++++++++++++ AutoSklearn/util.py | 17 ++++- misc/regressors.csv | 2 +- .../regression/test_ridge_regression.py | 42 ++++++++++++ tests/test_autosklearn_regression.py | 2 +- 5 files changed, 127 insertions(+), 3 deletions(-) create mode 100644 AutoSklearn/components/regression/ridge_regression.py create mode 100644 tests/components/regression/test_ridge_regression.py diff --git a/AutoSklearn/components/regression/ridge_regression.py b/AutoSklearn/components/regression/ridge_regression.py new file mode 100644 index 0000000000..bf27b4891e --- /dev/null +++ b/AutoSklearn/components/regression/ridge_regression.py @@ -0,0 +1,67 @@ +import numpy as np +import sklearn.linear_model + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..regression_base import AutoSklearnRegressionAlgorithm + + +class RidgeRegression(AutoSklearnRegressionAlgorithm): + def __init__(self, alpha, fit_intercept=False, normalize=False, + copy_X=False, max_iter=None, tol=0.001, solver='auto', + random_state=None): + self.alpha = float(alpha) + self.fit_intercept = fit_intercept + self.normalize = normalize + self.copy_X = copy_X + self.max_iter = max_iter + self.tol = tol + self.solver = solver + # We ignore it + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.linear_model.Ridge( + alpha=self.alpha, + fit_intercept=self.fit_intercept, + normalize=self.normalize, + copy_X=self.copy_X, + max_iter=self.max_iter, + tol=self.tol, + solver=self.solver) + + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'RR', + 'name': 'Ridge Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': True, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + alpha = UniformFloatHyperparameter( + name="alpha", lower=0.0001, upper=10, default=1.0) + + cs = ConfigurationSpace() + cs.add_hyperparameter(alpha) + return cs diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index bf587e1f18..c899373598 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -71,6 +71,7 @@ def find_sklearn_regressor(): print classifiers + def get_dataset(dataset='iris', make_sparse=False): iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data @@ -126,5 +127,19 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): return transformer.transform(X_train), original_X_train +def _test_regressor(Regressor, dataset='diabetes'): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=False) + configuration_space = Regressor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + regressor = Regressor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + predictor = regressor.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + return predictions, Y_test + + if __name__ == "__main__": - find_sklearn_classifiers() \ No newline at end of file + find_sklearn_classifiers() + find_sklearn_regressor() \ No newline at end of file diff --git a/misc/regressors.csv b/misc/regressors.csv index f01fe19e61..df7dbe7afb 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -1,7 +1,7 @@ Name,class,added,comment ,,, ,,, -,,, +RidgeRegression,,True,Check range for alpha ,,, WeDoNotAddThis,,False,we already have this method WeDoNotAddThis,,False,See module name diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py new file mode 100644 index 0000000000..b2a85e19d8 --- /dev/null +++ b/tests/components/regression/test_ridge_regression.py @@ -0,0 +1,42 @@ +import unittest + +from AutoSklearn.components.regression.ridge_regression import RidgeRegression +from AutoSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from AutoSklearn.util import _test_regressor, get_dataset + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + configuration_space = RidgeRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + configuration_space_preproc = RandomKitchenSinks.get_hyperparameter_search_space() + default_preproc = configuration_space_preproc.get_default_configuration() + + for i in range(10): + # This should be a bad results + predictions, targets = _test_regressor(RidgeRegression, + dataset='diabetes') + self.assertAlmostEqual(-3.726787582018825, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + # This should be much more better + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes', + make_sparse=False) + preprocessor = RandomKitchenSinks( + random_state=1, + **{hp.hyperparameter.name: hp.value for hp in default_preproc.values.values()}) + + transformer = preprocessor.fit(X_train, Y_train) + X_train_transformed = transformer.transform(X_train) + X_test_transformed = transformer.transform(X_test) + + regressor = RidgeRegression( + random_state=1, + **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + predictor = regressor.fit(X_train_transformed, Y_train) + predictions = predictor.predict(X_test_transformed) + + self.assertAlmostEqual(0.24658871483206091, + sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index e78f8fdd86..cdacd9c202 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -58,7 +58,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(17, len(hyperparameters)) + self.assertEqual(18, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From d4dc18efeb2f258d17239986b518b237c788013d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 8 Jan 2015 09:22:05 +0100 Subject: [PATCH 080/352] add test for random forest regression --- .../components/regression/test_random_forests.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 tests/components/regression/test_random_forests.py diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py new file mode 100644 index 0000000000..9932f9213c --- /dev/null +++ b/tests/components/regression/test_random_forests.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.regression.random_forest import RandomForest +from AutoSklearn.util import _test_regressor + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + + predictions, targets = _test_regressor(RandomForest, + dataset='diabetes') + self.assertAlmostEqual(0.30805962106685625, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) \ No newline at end of file From 2a0e6bca217fb6855ebaf572a4bdb99b3095bf67 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 8 Jan 2015 09:22:33 +0100 Subject: [PATCH 081/352] Fix OneHotEncoder test --- AutoSklearn/implementations/OneHotEncoder.py | 4 ++-- tests/implementations/test_OneHotEncoder.py | 12 ------------ tests/implementations/test_standard_scaler.py | 1 - 3 files changed, 2 insertions(+), 15 deletions(-) diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index 778c1b507c..3c11346bd3 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -175,6 +175,7 @@ def _fit_transform(self, X): # Number of unique elements in that column (without np.NaN) n_uniques = np.sum(np.isfinite(unique_elements)) + n_values.append(n_uniques) offset = np.sum(n_values[:-1]) @@ -182,7 +183,7 @@ def _fit_transform(self, X): else index + offset for index in inverse] data_idx = [0 if index >= n_uniques else 1 for index in inverse] - feature_indices_idx = {str(unique): index + offset + feature_indices_idx = {unique: index + offset for index, unique in enumerate(unique_elements) if np.isfinite(unique)} @@ -221,7 +222,6 @@ def _transform(self, X): " Expected %d, got %d." % (len(indices), n_features)) - #column_indices = (X + indices[:-1]).ravel() row_indices = np.tile(np.arange(n_samples, dtype=np.int32), n_features) diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index 10fa7c49cb..cdcec7e937 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -20,12 +20,6 @@ [0., 0., 0., 1., 9.], [0., 1., 1., 0., 7.]] -with_string = [("Black", 5, 9), - ("Blue", 3, 7), - ("Red", 2, 5), - (np.NaN, 3, 1), - ("Black", 1, 1)] - with_string_1h = [[1, 0, 0, 5, 9], [0, 1, 0, 3, 7], [0, 0, 1, 2, 5], @@ -48,12 +42,6 @@ def test_dense2_with_non_sparse_components(self): self.fit_then_transform_dense(dense2_partial_1h, dense2, categorical_features=[True, True, False]) - def test_with_string(self): - self.fit_then_transform(with_string_1h, with_string, - categorical_features=[True, False, False]) - self.fit_then_transform_dense(with_string_1h, with_string, - categorical_features=[True, False, False]) - def fit_then_transform(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features) ohe.fit(input) diff --git a/tests/implementations/test_standard_scaler.py b/tests/implementations/test_standard_scaler.py index 9f34becc8c..6d963f2aba 100644 --- a/tests/implementations/test_standard_scaler.py +++ b/tests/implementations/test_standard_scaler.py @@ -113,7 +113,6 @@ def test_scaler_2d_arrays(self): X = X.tocsr() scaler = StandardScaler() X_scaled = scaler.fit(X).transform(X, copy=False) - print id(X_scaled) self.assertFalse(np.any(np.isnan(X_scaled.data))) assert_array_almost_equal( From e0e1528a6787e53f31a783cafff2a9743b71d07b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 8 Jan 2015 11:03:46 +0100 Subject: [PATCH 082/352] RKS allows sparse data, fix number of kitchen sinks to be an Integer --- .../components/preprocessing/kitchen_sinks.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/AutoSklearn/components/preprocessing/kitchen_sinks.py index c8ce389dd1..6b12d25f98 100644 --- a/AutoSklearn/components/preprocessing/kitchen_sinks.py +++ b/AutoSklearn/components/preprocessing/kitchen_sinks.py @@ -1,9 +1,8 @@ import sklearn.kernel_approximation -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter + UniformIntegerHyperparameter from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm @@ -20,7 +19,6 @@ def __init__(self, gamma, n_components, random_state = None): self.gamma = gamma self.n_components = n_components self.random_state = random_state - def fit(self, X, Y): self.preprocessor = sklearn.kernel_approximation.RBFSampler(self.gamma, self.n_components, self.random_state) @@ -44,16 +42,15 @@ def get_properties(): 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, - 'is_deterministic': False, - # JTS TODO: it should handle sparse data but I have not tested it :) - 'handles_sparse': False, + 'is_deterministic': True, + 'handles_sparse': True, 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(): gamma = UniformFloatHyperparameter( "gamma", 0.3, 2., default=1.0) - n_components = UniformFloatHyperparameter( + n_components = UniformIntegerHyperparameter( "n_components", 50, 10000, default=100, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(gamma) From 8763f5e82b0e0fcc96e416dd5f64ba4b29e661a1 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 8 Jan 2015 11:06:15 +0100 Subject: [PATCH 083/352] Pass fit_params to the respective components --- AutoSklearn/autosklearn.py | 7 ++++++- AutoSklearn/autosklearn_regression.py | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 81f5afe27e..263e5039f2 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -174,7 +174,12 @@ def fit(self, X, Y, fit_params=None, init_params=None): self._validate_input_Y(Y) self._pipeline = Pipeline(steps) - self._pipeline.fit(X, Y) + if fit_params is None or not isinstance(fit_params, dict): + fit_params = dict() + else: + fit_params = {key.replace(":", "__"): value for key, value in + fit_params.items()} + self._pipeline.fit(X, Y, **fit_params) return self def predict(self, X): diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py index da7785cb3d..581c8f6662 100644 --- a/AutoSklearn/autosklearn_regression.py +++ b/AutoSklearn/autosklearn_regression.py @@ -173,7 +173,12 @@ def fit(self, X, Y, fit_params=None, init_params=None): self._validate_input_Y(Y) self._pipeline = Pipeline(steps) - self._pipeline.fit(X, Y) + if fit_params is None or not isinstance(fit_params, dict): + fit_params = dict() + else: + fit_params = {key.replace(":", "__"): value for key, value in + fit_params.items()} + self._pipeline.fit(X, Y, **fit_params) return self def predict(self, X): From 92d18c695eeebd51b534752a90595835fc58e51f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 9 Jan 2015 09:00:48 +0100 Subject: [PATCH 084/352] reduce n_estimators of all tree-based models to 100 --- AutoSklearn/components/classification/extra_trees.py | 2 +- AutoSklearn/components/classification/gradient_boosting.py | 2 +- AutoSklearn/components/classification/random_forest.py | 2 +- AutoSklearn/components/regression/random_forest.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index b2e9be89c0..d60369fcd1 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -112,7 +112,7 @@ def get_hyperparameter_search_space(): # Copied from random_forest.py n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 500, default=10) + "n_estimators", 10, 100, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 08f43a8492..cf70ac4272 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -122,7 +122,7 @@ def get_hyperparameter_search_space(): # Copied from random_forest.py n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=10, upper=500, default=10, log=False) + name="n_estimators", lower=10, upper=100, default=10, log=False) max_features = UniformFloatHyperparameter( name="max_features", lower=0.01, upper=0.5, default=0.1) max_depth = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index b3040134c7..7594ba8365 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -86,7 +86,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 500, default=10) + "n_estimators", 10, 100, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 22bf3a2369..56124480f8 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -99,7 +99,7 @@ def get_hyperparameter_search_space(): criterion = Constant(name="criterion", value="mse") # Copied from classification/random_forest.py n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=10, upper=500, default=10, log=False) + name="n_estimators", lower=10, upper=100, default=10, log=False) max_features = UniformFloatHyperparameter( name="max_features", lower=0.01, upper=0.5, default=0.1) max_depth = UniformIntegerHyperparameter( From e918b3495981cf41af9c2cfc3cd3363eaa49bd45 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 9 Jan 2015 11:04:57 +0100 Subject: [PATCH 085/352] Regression Random Forest remove max_depth --- AutoSklearn/components/regression/random_forest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 22bf3a2369..c09669425a 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -102,8 +102,7 @@ def get_hyperparameter_search_space(): name="n_estimators", lower=10, upper=500, default=10, log=False) max_features = UniformFloatHyperparameter( name="max_features", lower=0.01, upper=0.5, default=0.1) - max_depth = UniformIntegerHyperparameter( - name = "max_depth", lower=1, upper=10, default=3) + max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( name="min_samples_split", lower=2, upper=20, default=2, log=False) min_samples_leaf = UniformIntegerHyperparameter( From e82d40c46ea4fbd3535d2f4ce705cf5861daec42 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 9 Jan 2015 11:05:22 +0100 Subject: [PATCH 086/352] Regression Ridge Regression put alpha on a log-scale --- AutoSklearn/components/regression/ridge_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/regression/ridge_regression.py b/AutoSklearn/components/regression/ridge_regression.py index bf27b4891e..71bff71849 100644 --- a/AutoSklearn/components/regression/ridge_regression.py +++ b/AutoSklearn/components/regression/ridge_regression.py @@ -60,7 +60,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): alpha = UniformFloatHyperparameter( - name="alpha", lower=0.0001, upper=10, default=1.0) + name="alpha", lower=0.0001, upper=10, default=1.0, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(alpha) From 1cbf2b6a959f90c9161175403dbbb8de84b85f36 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 9 Jan 2015 11:06:07 +0100 Subject: [PATCH 087/352] Only use feature learning with linear models --- AutoSklearn/autosklearn.py | 21 ++++++++++++++++++++- AutoSklearn/autosklearn_regression.py | 14 ++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 263e5039f2..fde6f77f68 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -1,5 +1,6 @@ from collections import defaultdict import copy +from itertools import product import sklearn if sklearn.__version__ != "0.15.2": @@ -9,12 +10,12 @@ from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.pipeline import Pipeline from sklearn.utils import check_random_state -from sklearn.utils.validation import safe_asarray, assert_all_finite from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ InactiveHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from . import components as components @@ -468,6 +469,24 @@ def get_hyperparameter_search_space(include_classifiers=None, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) + # And now add forbidden parameter configurations which would take too + # long + + # Combinations of tree-based models with feature learning: + classifiers_ = ["extra_trees", "gradient_boosting", + "k_nearest_neighbors", "libsvm_svc", "random_forest"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + + for c, f in product(classifiers_, feature_learning_): + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor"), f))) + except: + pass + return cs # TODO: maybe provide an interface to the underlying predictor like diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py index 581c8f6662..dac5ee92ec 100644 --- a/AutoSklearn/autosklearn_regression.py +++ b/AutoSklearn/autosklearn_regression.py @@ -14,6 +14,7 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ InactiveHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from . import components as components @@ -412,6 +413,19 @@ def get_hyperparameter_search_space(include_regressors=None, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) + # And now add forbidden parameter configurations which would take too + # long + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter("regressor"), + "random_forest"), + ForbiddenEqualsClause(cs.get_hyperparameter("preprocessor"), + "kitchen_sinks"))) + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter("regressor"), + "random_forest"), + ForbiddenEqualsClause(cs.get_hyperparameter("preprocessor"), + "sparse_filtering"))) + return cs # TODO: maybe provide an interface to the underlying predictor like From 7cb6c2272c853f53739661d40238d63d84422c35 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 9 Jan 2015 11:18:51 +0100 Subject: [PATCH 088/352] Improve classification defaults --- AutoSklearn/autosklearn.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index fde6f77f68..77f4a2ab28 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -364,8 +364,19 @@ def get_hyperparameter_search_space(include_classifiers=None, raise ValueError("No classifier to build a configuration space " "for...") + # Hardcode the defaults based on some educated guesses + classifier_defaults = ['random_forest', 'liblinear', 'sgd', + 'libsvm_svc'] + classifier_default = None + for cd_ in classifier_defaults: + if cd_ in names: + classifier_default = cd_ + break + if classifier_default is None: + classifier_default = names[0] + classifier = CategoricalHyperparameter("classifier", names, - default='random_forest' if 'random_forest' in names else names[0]) + default=classifier_default) cs.add_hyperparameter(classifier) for name in names + names_: From 77cf9e260deab90b75b7cc24b489a389437266c3 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 9 Jan 2015 19:36:10 +0100 Subject: [PATCH 089/352] Remove dependencies to scikit-learn version 0.15.2 --- AutoSklearn/autosklearn.py | 8 +- AutoSklearn/autosklearn_regression.py | 6 +- .../components/classification/extra_trees.py | 9 +- AutoSklearn/implementations/MinMaxScaler.py | 3 +- AutoSklearn/implementations/OneHotEncoder.py | 6 +- AutoSklearn/implementations/StandardScaler.py | 8 +- AutoSklearn/sklearn_backward_fixes.py | 316 ++++++++++++++++ AutoSklearn/sklearn_backward_validation.py | 353 ++++++++++++++++++ setup.py | 2 +- .../classification/test_extra_trees.py | 2 +- .../regression/test_ridge_regression.py | 2 +- 11 files changed, 693 insertions(+), 22 deletions(-) create mode 100644 AutoSklearn/sklearn_backward_fixes.py create mode 100644 AutoSklearn/sklearn_backward_validation.py diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index 263e5039f2..e983db838d 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -2,14 +2,14 @@ import copy import sklearn -if sklearn.__version__ != "0.15.2": - raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " - "you installed %s." % sklearn.__version__) +#if sklearn.__version__ != "0.15.2": +# raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " +# "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.pipeline import Pipeline from sklearn.utils import check_random_state -from sklearn.utils.validation import safe_asarray, assert_all_finite +# from sklearn.utils.validation import safe_asarray, assert_all_finite from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py index 581c8f6662..00130d7f3a 100644 --- a/AutoSklearn/autosklearn_regression.py +++ b/AutoSklearn/autosklearn_regression.py @@ -2,9 +2,9 @@ import copy import sklearn -if sklearn.__version__ != "0.15.2": - raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " - "you installed %s." % sklearn.__version__) +#if sklearn.__version__ != "0.15.2": +# raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " +# "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator, RegressorMixin from sklearn.pipeline import Pipeline diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index d60369fcd1..89270d199e 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -15,8 +15,7 @@ class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", #use_max_depth=False, bootstrap=False, max_leaf_nodes=None, max_depth="None", - oob_score=False, n_jobs=1, random_state=None, verbose=0, - min_density=None, compute_importances=None): + oob_score=False, n_jobs=1, random_state=None, verbose=0): self.n_estimators = int(n_estimators) if criterion not in ("gini", "entropy"): @@ -58,8 +57,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.n_jobs = int(n_jobs) self.random_state = random_state self.verbose = int(verbose) - self.min_density = min_density - self.compute_importances = compute_importances + self.estimator = None def fit(self, X, Y): @@ -69,8 +67,7 @@ def fit(self, X, Y): min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, max_features=self.max_features, max_leaf_nodes=self.max_leaf_nodes, oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, - random_state=self.random_state, min_density=self.min_density, - compute_importances=self.compute_importances + random_state=self.random_state ) return self.estimator.fit(X, Y) diff --git a/AutoSklearn/implementations/MinMaxScaler.py b/AutoSklearn/implementations/MinMaxScaler.py index b69cf239d3..5b2e554e26 100644 --- a/AutoSklearn/implementations/MinMaxScaler.py +++ b/AutoSklearn/implementations/MinMaxScaler.py @@ -2,7 +2,8 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import check_arrays, warn_if_not_float +#from sklearn.utils import check_arrays, warn_if_not_float +from ..sklearn_backward_validation import check_arrays, warn_if_not_float class MinMaxScaler(BaseEstimator, TransformerMixin): diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index 3c11346bd3..83ad4daa06 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -3,8 +3,10 @@ from sklearn.base import BaseEstimator, TransformerMixin from sklearn.externals import six -from sklearn.utils import check_arrays -from sklearn.utils import atleast2d_or_csc, safe_asarray +#from sklearn.utils import check_arrays +#from sklearn.utils import atleast2d_or_csc, safe_asarray +from ..sklearn_backward_validation import check_arrays +from ..sklearn_backward_validation import atleast2d_or_csc, safe_asarray zip = six.moves.zip map = six.moves.map diff --git a/AutoSklearn/implementations/StandardScaler.py b/AutoSklearn/implementations/StandardScaler.py index 2bed2fe1c4..cb869ea80f 100644 --- a/AutoSklearn/implementations/StandardScaler.py +++ b/AutoSklearn/implementations/StandardScaler.py @@ -2,9 +2,11 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import check_arrays, warn_if_not_float +# from sklearn.utils import check_arrays, warn_if_not_float +from ..sklearn_backward_validation import check_arrays, warn_if_not_float + from sklearn.utils.sparsefuncs import inplace_column_scale, \ - mean_variance_axis0 + mean_variance_axis def _mean_and_std(X, axis=0, with_mean=True, with_std=True): @@ -139,7 +141,7 @@ def fit(self, X, y=None): self.mean_ = None if self.with_std: - var = mean_variance_axis0(X)[1] + var = mean_variance_axis(X=X, axis=0)[1] self.std_ = np.sqrt(var) self.std_[var == 0.0] = 1.0 else: diff --git a/AutoSklearn/sklearn_backward_fixes.py b/AutoSklearn/sklearn_backward_fixes.py new file mode 100644 index 0000000000..416f4f5c1b --- /dev/null +++ b/AutoSklearn/sklearn_backward_fixes.py @@ -0,0 +1,316 @@ +"""Compatibility fixes for older version of python, numpy and scipy + +If you add content to this file, please give the version of the package +at which the fixe is no longer needed. +""" +# Authors: Emmanuelle Gouillart +# Gael Varoquaux +# Fabian Pedregosa +# Lars Buitinck +# +# License: BSD 3 clause + +import inspect +import warnings + +import numpy as np +import scipy.sparse as sp +import scipy + + +def _parse_version(version_string): + version = [] + for x in version_string.split('.'): + try: + version.append(int(x)) + except ValueError: + # x may be of the form dev-1ea1592 + version.append(x) + return tuple(version) + + +np_version = _parse_version(np.__version__) +sp_version = _parse_version(scipy.__version__) + +# +# try: +# from scipy.special import expit # SciPy >= 0.10 +# with np.errstate(invalid='ignore', over='ignore'): +# if np.isnan(expit(1000)): # SciPy < 0.14 +# raise ImportError("no stable expit in scipy.special") +# except ImportError: +# def expit(x, out=None): +# """Logistic sigmoid function, ``1 / (1 + exp(-x))``. +# +# See sklearn.utils.extmath.log_logistic for the log of this function. +# """ +# if out is None: +# out = np.empty(np.atleast_1d(x).shape, dtype=np.float64) +# out[:] = x +# +# # 1 / (1 + exp(-x)) = (1 + tanh(x / 2)) / 2 +# # This way of computing the logistic is both fast and stable. +# out *= .5 +# np.tanh(out, out) +# out += 1 +# out *= .5 +# +# return out.reshape(np.shape(x)) + + +# little danse to see if np.copy has an 'order' keyword argument +if 'order' in inspect.getargspec(np.copy)[0]: + def safe_copy(X): + # Copy, but keep the order + return np.copy(X, order='K') +else: + # Before an 'order' argument was introduced, numpy wouldn't muck with + # the ordering + safe_copy = np.copy +# +# try: +# if (not np.allclose(np.divide(.4, 1, casting="unsafe"), +# np.divide(.4, 1, casting="unsafe", dtype=np.float)) +# or not np.allclose(np.divide(.4, 1), .4)): +# raise TypeError('Divide not working with dtype: ' +# 'https://github.com/numpy/numpy/issues/3484') +# divide = np.divide +# +# except TypeError: +# # Compat for old versions of np.divide that do not provide support for +# # the dtype args +# def divide(x1, x2, out=None, dtype=None): +# out_orig = out +# if out is None: +# out = np.asarray(x1, dtype=dtype) +# if out is x1: +# out = x1.copy() +# else: +# if out is not x1: +# out[:] = x1 +# if dtype is not None and out.dtype != dtype: +# out = out.astype(dtype) +# out /= x2 +# if out_orig is None and np.isscalar(x1): +# out = np.asscalar(out) +# return out +# +# +# try: +# np.array(5).astype(float, copy=False) +# except TypeError: +# # Compat where astype accepted no copy argument +# def astype(array, dtype, copy=True): +# if array.dtype == dtype: +# return array +# return array.astype(dtype) +# else: +# astype = np.ndarray.astype +# +# +# try: +# with warnings.catch_warnings(record=True): +# # Don't raise the numpy deprecation warnings that appear in +# # 1.9, but avoid Python bug due to simplefilter('ignore') +# warnings.simplefilter('always') +# sp.csr_matrix([1.0, 2.0, 3.0]).max(axis=0) +# except (TypeError, AttributeError): +# # in scipy < 14.0, sparse matrix min/max doesn't accept an `axis` argument +# # the following code is taken from the scipy 0.14 codebase +# +# def _minor_reduce(X, ufunc): +# major_index = np.flatnonzero(np.diff(X.indptr)) +# if X.data.size == 0 and major_index.size == 0: +# # Numpy < 1.8.0 don't handle empty arrays in reduceat +# value = np.zeros_like(X.data) +# else: +# value = ufunc.reduceat(X.data, X.indptr[major_index]) +# return major_index, value +# +# def _min_or_max_axis(X, axis, min_or_max): +# N = X.shape[axis] +# if N == 0: +# raise ValueError("zero-size array to reduction operation") +# M = X.shape[1 - axis] +# mat = X.tocsc() if axis == 0 else X.tocsr() +# mat.sum_duplicates() +# major_index, value = _minor_reduce(mat, min_or_max) +# not_full = np.diff(mat.indptr)[major_index] < N +# value[not_full] = min_or_max(value[not_full], 0) +# mask = value != 0 +# major_index = np.compress(mask, major_index) +# value = np.compress(mask, value) +# +# from scipy.sparse import coo_matrix +# if axis == 0: +# res = coo_matrix((value, (np.zeros(len(value)), major_index)), +# dtype=X.dtype, shape=(1, M)) +# else: +# res = coo_matrix((value, (major_index, np.zeros(len(value)))), +# dtype=X.dtype, shape=(M, 1)) +# return res.A.ravel() +# +# def _sparse_min_or_max(X, axis, min_or_max): +# if axis is None: +# if 0 in X.shape: +# raise ValueError("zero-size array to reduction operation") +# zero = X.dtype.type(0) +# if X.nnz == 0: +# return zero +# m = min_or_max.reduce(X.data.ravel()) +# if X.nnz != np.product(X.shape): +# m = min_or_max(zero, m) +# return m +# if axis < 0: +# axis += 2 +# if (axis == 0) or (axis == 1): +# return _min_or_max_axis(X, axis, min_or_max) +# else: +# raise ValueError("invalid axis, use 0 for rows, or 1 for columns") +# +# def sparse_min_max(X, axis): +# return (_sparse_min_or_max(X, axis, np.minimum), +# _sparse_min_or_max(X, axis, np.maximum)) +# +# else: +# def sparse_min_max(X, axis): +# return (X.min(axis=axis).toarray().ravel(), +# X.max(axis=axis).toarray().ravel()) +# +# +# try: +# from numpy import argpartition +# except ImportError: +# # numpy.argpartition was introduced in v 1.8.0 +# def argpartition(a, kth, axis=-1, kind='introselect', order=None): +# return np.argsort(a, axis=axis, order=order) +# +# +# try: +# from itertools import combinations_with_replacement +# except ImportError: +# # Backport of itertools.combinations_with_replacement for Python 2.6, +# # from Python 3.4 documentation (http://tinyurl.com/comb-w-r), copyright +# # Python Software Foundation (https://docs.python.org/3/license.html) +# def combinations_with_replacement(iterable, r): +# # combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC +# pool = tuple(iterable) +# n = len(pool) +# if not n and r: +# return +# indices = [0] * r +# yield tuple(pool[i] for i in indices) +# while True: +# for i in reversed(range(r)): +# if indices[i] != n - 1: +# break +# else: +# return +# indices[i:] = [indices[i] + 1] * (r - i) +# yield tuple(pool[i] for i in indices) +# +# +# try: +# from numpy import isclose +# except ImportError: +# def isclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False): +# """ +# Returns a boolean array where two arrays are element-wise equal within +# a tolerance. +# +# This function was added to numpy v1.7.0, and the version you are +# running has been backported from numpy v1.8.1. See its documentation +# for more details. +# """ +# def within_tol(x, y, atol, rtol): +# with np.errstate(invalid='ignore'): +# result = np.less_equal(abs(x-y), atol + rtol * abs(y)) +# if np.isscalar(a) and np.isscalar(b): +# result = bool(result) +# return result +# +# x = np.array(a, copy=False, subok=True, ndmin=1) +# y = np.array(b, copy=False, subok=True, ndmin=1) +# xfin = np.isfinite(x) +# yfin = np.isfinite(y) +# if all(xfin) and all(yfin): +# return within_tol(x, y, atol, rtol) +# else: +# finite = xfin & yfin +# cond = np.zeros_like(finite, subok=True) +# # Since we're using boolean indexing, x & y must be the same shape. +# # Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in +# # lib.stride_tricks, though, so we can't import it here. +# x = x * np.ones_like(cond) +# y = y * np.ones_like(cond) +# # Avoid subtraction with infinite/nan values... +# cond[finite] = within_tol(x[finite], y[finite], atol, rtol) +# # Check for equality of infinite values... +# cond[~finite] = (x[~finite] == y[~finite]) +# if equal_nan: +# # Make NaN == NaN +# cond[np.isnan(x) & np.isnan(y)] = True +# return cond +# +# +# if np_version < (1, 7): +# # Prior to 1.7.0, np.frombuffer wouldn't work for empty first arg. +# def frombuffer_empty(buf, dtype): +# if len(buf) == 0: +# return np.empty(0, dtype=dtype) +# else: +# return np.frombuffer(buf, dtype=dtype) +# else: +# frombuffer_empty = np.frombuffer +# +# +# if np_version < (1, 8): +# def in1d(ar1, ar2, assume_unique=False, invert=False): +# # Backport of numpy function in1d 1.8.1 to support numpy 1.6.2 +# # Ravel both arrays, behavior for the first array could be different +# ar1 = np.asarray(ar1).ravel() +# ar2 = np.asarray(ar2).ravel() +# +# # This code is significantly faster when the condition is satisfied. +# if len(ar2) < 10 * len(ar1) ** 0.145: +# if invert: +# mask = np.ones(len(ar1), dtype=np.bool) +# for a in ar2: +# mask &= (ar1 != a) +# else: +# mask = np.zeros(len(ar1), dtype=np.bool) +# for a in ar2: +# mask |= (ar1 == a) +# return mask +# +# # Otherwise use sorting +# if not assume_unique: +# ar1, rev_idx = np.unique(ar1, return_inverse=True) +# ar2 = np.unique(ar2) +# +# ar = np.concatenate((ar1, ar2)) +# # We need this to be a stable sort, so always use 'mergesort' +# # here. The values from the first array should always come before +# # the values from the second array. +# order = ar.argsort(kind='mergesort') +# sar = ar[order] +# if invert: +# bool_ar = (sar[1:] != sar[:-1]) +# else: +# bool_ar = (sar[1:] == sar[:-1]) +# flag = np.concatenate((bool_ar, [invert])) +# indx = order.argsort(kind='mergesort')[:len(ar1)] +# +# if assume_unique: +# return flag[indx] +# else: +# return flag[indx][rev_idx] +# else: +# from numpy import in1d +# +# +# if sp_version < (0, 15): +# # Backport fix for scikit-learn/scikit-learn#2986 / scipy/scipy#4142 +# from ._scipy_sparse_lsqr_backport import lsqr as sparse_lsqr +# else: +# from scipy.sparse.linalg import lsqr as sparse_lsqr diff --git a/AutoSklearn/sklearn_backward_validation.py b/AutoSklearn/sklearn_backward_validation.py new file mode 100644 index 0000000000..f27c456f24 --- /dev/null +++ b/AutoSklearn/sklearn_backward_validation.py @@ -0,0 +1,353 @@ +"""Utilities for input validation""" +# Authors: Olivier Grisel +# Gael Varoquaux +# Andreas Mueller +# Lars Buitinck +# Alexandre Gramfort +# Nicolas Tresegnie +# License: BSD 3 clause + +import warnings +import numbers + +import numpy as np +import scipy.sparse as sp + +from sklearn.externals import six +#from sklearn.fixes import safe_copy +from sklearn_backward_fixes import safe_copy + + +class DataConversionWarning(UserWarning): + "A warning on implicit data conversions happening in the code" + pass + +warnings.simplefilter("always", DataConversionWarning) + + +class NonBLASDotWarning(UserWarning): + "A warning on implicit dispatch to numpy.dot" + pass + + +# Silenced by default to reduce verbosity. Turn on at runtime for +# performance profiling. +warnings.simplefilter('ignore', NonBLASDotWarning) + + +def _assert_all_finite(X): + """Like assert_all_finite, but only for ndarray.""" + X = np.asanyarray(X) + if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) + and not np.isfinite(X).all()): + raise ValueError("Input contains NaN, infinity" + " or a value too large for %r." % X.dtype) + + +def assert_all_finite(X): + """Throw a ValueError if X contains NaN or infinity. + + Input MUST be an np.ndarray instance or a scipy.sparse matrix.""" + + # First try an O(n) time, O(1) space solution for the common case that + # there everything is finite; fall back to O(n) space np.isfinite to + # prevent false positives from overflow in sum method. + _assert_all_finite(X.data if sp.issparse(X) else X) + + +def safe_asarray(X, dtype=None, order=None, copy=False, force_all_finite=True): + """Convert X to an array or CSC/CSR/COO sparse matrix. + + Prevents copying X when possible. Sparse matrices in CSR, CSC and COO + formats are passed through. Other sparse formats are converted to CSR + (somewhat arbitrarily). + + If a specific compressed sparse format is required, use atleast2d_or_cs{c,r} + instead. + """ + if sp.issparse(X): + if not isinstance(X, (sp.coo_matrix, sp.csc_matrix, sp.csr_matrix)): + X = X.tocsr() + elif copy: + X = X.copy() + if force_all_finite: + _assert_all_finite(X.data) + # enforces dtype on data array (order should be kept the same). + X.data = np.asarray(X.data, dtype=dtype) + else: + X = np.array(X, dtype=dtype, order=order, copy=copy) + if force_all_finite: + _assert_all_finite(X) + return X + + +def as_float_array(X, copy=True, force_all_finite=True): + """Converts an array-like to an array of floats + + The new dtype will be np.float32 or np.float64, depending on the original + type. The function can create a copy or modify the argument depending + on the argument copy. + + Parameters + ---------- + X : {array-like, sparse matrix} + + copy : bool, optional + If True, a copy of X will be created. If False, a copy may still be + returned if X's dtype is not a floating point type. + + Returns + ------- + XT : {array, sparse matrix} + An array of type np.float + """ + if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray) + and not sp.issparse(X)): + return safe_asarray(X, dtype=np.float64, copy=copy, + force_all_finite=force_all_finite) + elif sp.issparse(X) and X.dtype in [np.float32, np.float64]: + return X.copy() if copy else X + elif X.dtype in [np.float32, np.float64]: # is numpy array + return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X + else: + return X.astype(np.float32 if X.dtype == np.int32 else np.float64) + + +def array2d(X, dtype=None, order=None, copy=False, force_all_finite=True): + """Returns at least 2-d array with data from X""" + if sp.issparse(X): + raise TypeError('A sparse matrix was passed, but dense data ' + 'is required. Use X.toarray() to convert to dense.') + X_2d = np.asarray(np.atleast_2d(X), dtype=dtype, order=order) + if force_all_finite: + _assert_all_finite(X_2d) + if X is X_2d and copy: + X_2d = safe_copy(X_2d) + return X_2d + + +def _atleast2d_or_sparse(X, dtype, order, copy, sparse_class, convmethod, + check_same_type, force_all_finite): + if sp.issparse(X): + if check_same_type(X) and X.dtype == dtype: + X = getattr(X, convmethod)(copy=copy) + elif dtype is None or X.dtype == dtype: + X = getattr(X, convmethod)() + else: + X = sparse_class(X, dtype=dtype) + if force_all_finite: + _assert_all_finite(X.data) + X.data = np.array(X.data, copy=False, order=order) + else: + X = array2d(X, dtype=dtype, order=order, copy=copy, + force_all_finite=force_all_finite) + return X + + +def atleast2d_or_csc(X, dtype=None, order=None, copy=False, + force_all_finite=True): + """Like numpy.atleast_2d, but converts sparse matrices to CSC format. + + Also, converts np.matrix to np.ndarray. + """ + return _atleast2d_or_sparse(X, dtype, order, copy, sp.csc_matrix, + "tocsc", sp.isspmatrix_csc, + force_all_finite) + + +def atleast2d_or_csr(X, dtype=None, order=None, copy=False, + force_all_finite=True): + """Like numpy.atleast_2d, but converts sparse matrices to CSR format + + Also, converts np.matrix to np.ndarray. + """ + return _atleast2d_or_sparse(X, dtype, order, copy, sp.csr_matrix, + "tocsr", sp.isspmatrix_csr, + force_all_finite) + + +def _num_samples(x): + """Return number of samples in array-like x.""" + if not hasattr(x, '__len__') and not hasattr(x, 'shape'): + if hasattr(x, '__array__'): + x = np.asarray(x) + else: + raise TypeError("Expected sequence or array-like, got %r" % x) + return x.shape[0] if hasattr(x, 'shape') else len(x) + + +def check_arrays(*arrays, **options): + """Check that all arrays have consistent first dimensions. + + Checks whether all objects in arrays have the same shape or length. + By default lists and tuples are converted to numpy arrays. + + It is possible to enforce certain properties, such as dtype, continguity + and sparse matrix format (if a sparse matrix is passed). + + Converting lists to arrays can be disabled by setting ``allow_lists=True``. + Lists can then contain arbitrary objects and are not checked for dtype, + finiteness or anything else but length. Arrays are still checked + and possibly converted. + + + Parameters + ---------- + *arrays : sequence of arrays or scipy.sparse matrices with same shape[0] + Python lists or tuples occurring in arrays are converted to 1D numpy + arrays, unless allow_lists is specified. + + sparse_format : 'csr', 'csc' or 'dense', None by default + If not None, any scipy.sparse matrix is converted to + Compressed Sparse Rows or Compressed Sparse Columns representations. + If 'dense', an error is raised when a sparse array is + passed. + + copy : boolean, False by default + If copy is True, ensure that returned arrays are copies of the original + (if not already converted to another format earlier in the process). + + check_ccontiguous : boolean, False by default + Check that the arrays are C contiguous + + dtype : a numpy dtype instance, None by default + Enforce a specific dtype. + + allow_lists : bool + Allow lists of arbitrary objects as input, just check their length. + Disables + + allow_nans : boolean, False by default + Allows nans in the arrays + + allow_nd : boolean, False by default + Allows arrays of more than 2 dimensions. + """ + sparse_format = options.pop('sparse_format', None) + if sparse_format not in (None, 'csr', 'csc', 'dense'): + raise ValueError('Unexpected sparse format: %r' % sparse_format) + copy = options.pop('copy', False) + check_ccontiguous = options.pop('check_ccontiguous', False) + dtype = options.pop('dtype', None) + allow_lists = options.pop('allow_lists', False) + allow_nans = options.pop('allow_nans', False) + allow_nd = options.pop('allow_nd', False) + + if options: + raise TypeError("Unexpected keyword arguments: %r" % options.keys()) + + if len(arrays) == 0: + return None + + n_samples = _num_samples(arrays[0]) + + checked_arrays = [] + for array in arrays: + array_orig = array + if array is None: + # special case: ignore optional y=None kwarg pattern + checked_arrays.append(array) + continue + size = _num_samples(array) + + if size != n_samples: + raise ValueError("Found array with dim %d. Expected %d" + % (size, n_samples)) + + if not allow_lists or hasattr(array, "shape"): + if sp.issparse(array): + if sparse_format == 'csr': + array = array.tocsr() + elif sparse_format == 'csc': + array = array.tocsc() + elif sparse_format == 'dense': + raise TypeError('A sparse matrix was passed, but dense ' + 'data is required. Use X.toarray() to ' + 'convert to a dense numpy array.') + if check_ccontiguous: + array.data = np.ascontiguousarray(array.data, dtype=dtype) + elif hasattr(array, 'data'): + array.data = np.asarray(array.data, dtype=dtype) + elif array.dtype != dtype: + array = array.astype(dtype) + if not allow_nans: + if hasattr(array, 'data'): + _assert_all_finite(array.data) + else: + _assert_all_finite(array.values()) + else: + if check_ccontiguous: + array = np.ascontiguousarray(array, dtype=dtype) + else: + array = np.asarray(array, dtype=dtype) + if not allow_nans: + _assert_all_finite(array) + + if not allow_nd and array.ndim >= 3: + raise ValueError("Found array with dim %d. Expected <= 2" % + array.ndim) + + if copy and array is array_orig: + array = array.copy() + checked_arrays.append(array) + + return checked_arrays + + +def column_or_1d(y, warn=False): + """ Ravel column or 1d numpy array, else raises an error + + Parameters + ---------- + y : array-like + + Returns + ------- + y : array + + """ + shape = np.shape(y) + if len(shape) == 1: + return np.ravel(y) + if len(shape) == 2 and shape[1] == 1: + if warn: + warnings.warn("A column-vector y was passed when a 1d array was" + " expected. Please change the shape of y to " + "(n_samples, ), for example using ravel().", + DataConversionWarning, stacklevel=2) + return np.ravel(y) + + raise ValueError("bad input shape {0}".format(shape)) + + +def warn_if_not_float(X, estimator='This algorithm'): + """Warning utility function to check that data type is floating point. + + Returns True if a warning was raised (i.e. the input is not float) and + False otherwise, for easier input validation. + """ + if not isinstance(estimator, six.string_types): + estimator = estimator.__class__.__name__ + if X.dtype.kind != 'f': + warnings.warn("%s assumes floating point values as input, " + "got %s" % (estimator, X.dtype)) + return True + return False + + +def check_random_state(seed): + """Turn seed into a np.random.RandomState instance + + If seed is None, return the RandomState singleton used by np.random. + If seed is an int, return a new RandomState instance seeded with seed. + If seed is already a RandomState instance, return it. + Otherwise raise ValueError. + """ + if seed is None or seed is np.random: + return np.random.mtrand._rand + if isinstance(seed, (numbers.Integral, np.integer)): + return np.random.RandomState(seed) + if isinstance(seed, np.random.RandomState): + return seed + raise ValueError('%r cannot be used to seed a numpy.random.RandomState' + ' instance' % seed) diff --git a/setup.py b/setup.py index e63b26f633..6eba483c2c 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ packages=setuptools.find_packages(), install_requires=["numpy", "scipy", - "scikit-learn==0.15.2", + #"scikit-learn==0.15.2", "nose", "HPOlibConfigSpace"], test_suite="nose.collector", diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index 5eb133b19a..f8b898e5aa 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -7,7 +7,7 @@ import sklearn.metrics -class GradientBoostingComponentTest(unittest.TestCase): +class ExtraTreesComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = \ diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index b2a85e19d8..06eafb9208 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -38,5 +38,5 @@ def test_default_configuration(self): predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) - self.assertAlmostEqual(0.24658871483206091, + self.assertAlmostEqual(0.32731125809612438, #0.24658871483206091 sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file From 0e3f34a2f517987f8efb53987f3ca1ee1cbe61cb Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sat, 10 Jan 2015 11:53:15 +0100 Subject: [PATCH 090/352] manually undo changes made to support sklearn 0.16dev --- AutoSklearn/autosklearn.py | 7 +- AutoSklearn/autosklearn_regression.py | 6 +- AutoSklearn/implementations/MinMaxScaler.py | 3 +- AutoSklearn/implementations/OneHotEncoder.py | 6 +- AutoSklearn/implementations/StandardScaler.py | 8 +- AutoSklearn/sklearn_backward_fixes.py | 316 ---------------- AutoSklearn/sklearn_backward_validation.py | 353 ------------------ setup.py | 6 +- .../regression/test_ridge_regression.py | 2 +- 9 files changed, 16 insertions(+), 691 deletions(-) delete mode 100644 AutoSklearn/sklearn_backward_fixes.py delete mode 100644 AutoSklearn/sklearn_backward_validation.py diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/autosklearn.py index b04f6b282c..0227b507b4 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/autosklearn.py @@ -3,14 +3,13 @@ from itertools import product import sklearn -#if sklearn.__version__ != "0.15.2": -# raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " -# "you installed %s." % sklearn.__version__) +if sklearn.__version__ != "0.15.2": + raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator, ClassifierMixin from sklearn.pipeline import Pipeline from sklearn.utils import check_random_state -# from sklearn.utils.validation import safe_asarray, assert_all_finite from HPOlibConfigSpace.configuration_space import ConfigurationSpace diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py index ad8bfec0af..3137fe4c35 100644 --- a/AutoSklearn/autosklearn_regression.py +++ b/AutoSklearn/autosklearn_regression.py @@ -3,9 +3,9 @@ from itertools import product import sklearn -#if sklearn.__version__ != "0.15.2": -# raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " -# "you installed %s." % sklearn.__version__) +if sklearn.__version__ != "0.15.2": + raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator, RegressorMixin from sklearn.pipeline import Pipeline diff --git a/AutoSklearn/implementations/MinMaxScaler.py b/AutoSklearn/implementations/MinMaxScaler.py index 5b2e554e26..b69cf239d3 100644 --- a/AutoSklearn/implementations/MinMaxScaler.py +++ b/AutoSklearn/implementations/MinMaxScaler.py @@ -2,8 +2,7 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -#from sklearn.utils import check_arrays, warn_if_not_float -from ..sklearn_backward_validation import check_arrays, warn_if_not_float +from sklearn.utils import check_arrays, warn_if_not_float class MinMaxScaler(BaseEstimator, TransformerMixin): diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index 83ad4daa06..3c11346bd3 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -3,10 +3,8 @@ from sklearn.base import BaseEstimator, TransformerMixin from sklearn.externals import six -#from sklearn.utils import check_arrays -#from sklearn.utils import atleast2d_or_csc, safe_asarray -from ..sklearn_backward_validation import check_arrays -from ..sklearn_backward_validation import atleast2d_or_csc, safe_asarray +from sklearn.utils import check_arrays +from sklearn.utils import atleast2d_or_csc, safe_asarray zip = six.moves.zip map = six.moves.map diff --git a/AutoSklearn/implementations/StandardScaler.py b/AutoSklearn/implementations/StandardScaler.py index cb869ea80f..2bed2fe1c4 100644 --- a/AutoSklearn/implementations/StandardScaler.py +++ b/AutoSklearn/implementations/StandardScaler.py @@ -2,11 +2,9 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -# from sklearn.utils import check_arrays, warn_if_not_float -from ..sklearn_backward_validation import check_arrays, warn_if_not_float - +from sklearn.utils import check_arrays, warn_if_not_float from sklearn.utils.sparsefuncs import inplace_column_scale, \ - mean_variance_axis + mean_variance_axis0 def _mean_and_std(X, axis=0, with_mean=True, with_std=True): @@ -141,7 +139,7 @@ def fit(self, X, y=None): self.mean_ = None if self.with_std: - var = mean_variance_axis(X=X, axis=0)[1] + var = mean_variance_axis0(X)[1] self.std_ = np.sqrt(var) self.std_[var == 0.0] = 1.0 else: diff --git a/AutoSklearn/sklearn_backward_fixes.py b/AutoSklearn/sklearn_backward_fixes.py deleted file mode 100644 index 416f4f5c1b..0000000000 --- a/AutoSklearn/sklearn_backward_fixes.py +++ /dev/null @@ -1,316 +0,0 @@ -"""Compatibility fixes for older version of python, numpy and scipy - -If you add content to this file, please give the version of the package -at which the fixe is no longer needed. -""" -# Authors: Emmanuelle Gouillart -# Gael Varoquaux -# Fabian Pedregosa -# Lars Buitinck -# -# License: BSD 3 clause - -import inspect -import warnings - -import numpy as np -import scipy.sparse as sp -import scipy - - -def _parse_version(version_string): - version = [] - for x in version_string.split('.'): - try: - version.append(int(x)) - except ValueError: - # x may be of the form dev-1ea1592 - version.append(x) - return tuple(version) - - -np_version = _parse_version(np.__version__) -sp_version = _parse_version(scipy.__version__) - -# -# try: -# from scipy.special import expit # SciPy >= 0.10 -# with np.errstate(invalid='ignore', over='ignore'): -# if np.isnan(expit(1000)): # SciPy < 0.14 -# raise ImportError("no stable expit in scipy.special") -# except ImportError: -# def expit(x, out=None): -# """Logistic sigmoid function, ``1 / (1 + exp(-x))``. -# -# See sklearn.utils.extmath.log_logistic for the log of this function. -# """ -# if out is None: -# out = np.empty(np.atleast_1d(x).shape, dtype=np.float64) -# out[:] = x -# -# # 1 / (1 + exp(-x)) = (1 + tanh(x / 2)) / 2 -# # This way of computing the logistic is both fast and stable. -# out *= .5 -# np.tanh(out, out) -# out += 1 -# out *= .5 -# -# return out.reshape(np.shape(x)) - - -# little danse to see if np.copy has an 'order' keyword argument -if 'order' in inspect.getargspec(np.copy)[0]: - def safe_copy(X): - # Copy, but keep the order - return np.copy(X, order='K') -else: - # Before an 'order' argument was introduced, numpy wouldn't muck with - # the ordering - safe_copy = np.copy -# -# try: -# if (not np.allclose(np.divide(.4, 1, casting="unsafe"), -# np.divide(.4, 1, casting="unsafe", dtype=np.float)) -# or not np.allclose(np.divide(.4, 1), .4)): -# raise TypeError('Divide not working with dtype: ' -# 'https://github.com/numpy/numpy/issues/3484') -# divide = np.divide -# -# except TypeError: -# # Compat for old versions of np.divide that do not provide support for -# # the dtype args -# def divide(x1, x2, out=None, dtype=None): -# out_orig = out -# if out is None: -# out = np.asarray(x1, dtype=dtype) -# if out is x1: -# out = x1.copy() -# else: -# if out is not x1: -# out[:] = x1 -# if dtype is not None and out.dtype != dtype: -# out = out.astype(dtype) -# out /= x2 -# if out_orig is None and np.isscalar(x1): -# out = np.asscalar(out) -# return out -# -# -# try: -# np.array(5).astype(float, copy=False) -# except TypeError: -# # Compat where astype accepted no copy argument -# def astype(array, dtype, copy=True): -# if array.dtype == dtype: -# return array -# return array.astype(dtype) -# else: -# astype = np.ndarray.astype -# -# -# try: -# with warnings.catch_warnings(record=True): -# # Don't raise the numpy deprecation warnings that appear in -# # 1.9, but avoid Python bug due to simplefilter('ignore') -# warnings.simplefilter('always') -# sp.csr_matrix([1.0, 2.0, 3.0]).max(axis=0) -# except (TypeError, AttributeError): -# # in scipy < 14.0, sparse matrix min/max doesn't accept an `axis` argument -# # the following code is taken from the scipy 0.14 codebase -# -# def _minor_reduce(X, ufunc): -# major_index = np.flatnonzero(np.diff(X.indptr)) -# if X.data.size == 0 and major_index.size == 0: -# # Numpy < 1.8.0 don't handle empty arrays in reduceat -# value = np.zeros_like(X.data) -# else: -# value = ufunc.reduceat(X.data, X.indptr[major_index]) -# return major_index, value -# -# def _min_or_max_axis(X, axis, min_or_max): -# N = X.shape[axis] -# if N == 0: -# raise ValueError("zero-size array to reduction operation") -# M = X.shape[1 - axis] -# mat = X.tocsc() if axis == 0 else X.tocsr() -# mat.sum_duplicates() -# major_index, value = _minor_reduce(mat, min_or_max) -# not_full = np.diff(mat.indptr)[major_index] < N -# value[not_full] = min_or_max(value[not_full], 0) -# mask = value != 0 -# major_index = np.compress(mask, major_index) -# value = np.compress(mask, value) -# -# from scipy.sparse import coo_matrix -# if axis == 0: -# res = coo_matrix((value, (np.zeros(len(value)), major_index)), -# dtype=X.dtype, shape=(1, M)) -# else: -# res = coo_matrix((value, (major_index, np.zeros(len(value)))), -# dtype=X.dtype, shape=(M, 1)) -# return res.A.ravel() -# -# def _sparse_min_or_max(X, axis, min_or_max): -# if axis is None: -# if 0 in X.shape: -# raise ValueError("zero-size array to reduction operation") -# zero = X.dtype.type(0) -# if X.nnz == 0: -# return zero -# m = min_or_max.reduce(X.data.ravel()) -# if X.nnz != np.product(X.shape): -# m = min_or_max(zero, m) -# return m -# if axis < 0: -# axis += 2 -# if (axis == 0) or (axis == 1): -# return _min_or_max_axis(X, axis, min_or_max) -# else: -# raise ValueError("invalid axis, use 0 for rows, or 1 for columns") -# -# def sparse_min_max(X, axis): -# return (_sparse_min_or_max(X, axis, np.minimum), -# _sparse_min_or_max(X, axis, np.maximum)) -# -# else: -# def sparse_min_max(X, axis): -# return (X.min(axis=axis).toarray().ravel(), -# X.max(axis=axis).toarray().ravel()) -# -# -# try: -# from numpy import argpartition -# except ImportError: -# # numpy.argpartition was introduced in v 1.8.0 -# def argpartition(a, kth, axis=-1, kind='introselect', order=None): -# return np.argsort(a, axis=axis, order=order) -# -# -# try: -# from itertools import combinations_with_replacement -# except ImportError: -# # Backport of itertools.combinations_with_replacement for Python 2.6, -# # from Python 3.4 documentation (http://tinyurl.com/comb-w-r), copyright -# # Python Software Foundation (https://docs.python.org/3/license.html) -# def combinations_with_replacement(iterable, r): -# # combinations_with_replacement('ABC', 2) --> AA AB AC BB BC CC -# pool = tuple(iterable) -# n = len(pool) -# if not n and r: -# return -# indices = [0] * r -# yield tuple(pool[i] for i in indices) -# while True: -# for i in reversed(range(r)): -# if indices[i] != n - 1: -# break -# else: -# return -# indices[i:] = [indices[i] + 1] * (r - i) -# yield tuple(pool[i] for i in indices) -# -# -# try: -# from numpy import isclose -# except ImportError: -# def isclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False): -# """ -# Returns a boolean array where two arrays are element-wise equal within -# a tolerance. -# -# This function was added to numpy v1.7.0, and the version you are -# running has been backported from numpy v1.8.1. See its documentation -# for more details. -# """ -# def within_tol(x, y, atol, rtol): -# with np.errstate(invalid='ignore'): -# result = np.less_equal(abs(x-y), atol + rtol * abs(y)) -# if np.isscalar(a) and np.isscalar(b): -# result = bool(result) -# return result -# -# x = np.array(a, copy=False, subok=True, ndmin=1) -# y = np.array(b, copy=False, subok=True, ndmin=1) -# xfin = np.isfinite(x) -# yfin = np.isfinite(y) -# if all(xfin) and all(yfin): -# return within_tol(x, y, atol, rtol) -# else: -# finite = xfin & yfin -# cond = np.zeros_like(finite, subok=True) -# # Since we're using boolean indexing, x & y must be the same shape. -# # Ideally, we'd just do x, y = broadcast_arrays(x, y). It's in -# # lib.stride_tricks, though, so we can't import it here. -# x = x * np.ones_like(cond) -# y = y * np.ones_like(cond) -# # Avoid subtraction with infinite/nan values... -# cond[finite] = within_tol(x[finite], y[finite], atol, rtol) -# # Check for equality of infinite values... -# cond[~finite] = (x[~finite] == y[~finite]) -# if equal_nan: -# # Make NaN == NaN -# cond[np.isnan(x) & np.isnan(y)] = True -# return cond -# -# -# if np_version < (1, 7): -# # Prior to 1.7.0, np.frombuffer wouldn't work for empty first arg. -# def frombuffer_empty(buf, dtype): -# if len(buf) == 0: -# return np.empty(0, dtype=dtype) -# else: -# return np.frombuffer(buf, dtype=dtype) -# else: -# frombuffer_empty = np.frombuffer -# -# -# if np_version < (1, 8): -# def in1d(ar1, ar2, assume_unique=False, invert=False): -# # Backport of numpy function in1d 1.8.1 to support numpy 1.6.2 -# # Ravel both arrays, behavior for the first array could be different -# ar1 = np.asarray(ar1).ravel() -# ar2 = np.asarray(ar2).ravel() -# -# # This code is significantly faster when the condition is satisfied. -# if len(ar2) < 10 * len(ar1) ** 0.145: -# if invert: -# mask = np.ones(len(ar1), dtype=np.bool) -# for a in ar2: -# mask &= (ar1 != a) -# else: -# mask = np.zeros(len(ar1), dtype=np.bool) -# for a in ar2: -# mask |= (ar1 == a) -# return mask -# -# # Otherwise use sorting -# if not assume_unique: -# ar1, rev_idx = np.unique(ar1, return_inverse=True) -# ar2 = np.unique(ar2) -# -# ar = np.concatenate((ar1, ar2)) -# # We need this to be a stable sort, so always use 'mergesort' -# # here. The values from the first array should always come before -# # the values from the second array. -# order = ar.argsort(kind='mergesort') -# sar = ar[order] -# if invert: -# bool_ar = (sar[1:] != sar[:-1]) -# else: -# bool_ar = (sar[1:] == sar[:-1]) -# flag = np.concatenate((bool_ar, [invert])) -# indx = order.argsort(kind='mergesort')[:len(ar1)] -# -# if assume_unique: -# return flag[indx] -# else: -# return flag[indx][rev_idx] -# else: -# from numpy import in1d -# -# -# if sp_version < (0, 15): -# # Backport fix for scikit-learn/scikit-learn#2986 / scipy/scipy#4142 -# from ._scipy_sparse_lsqr_backport import lsqr as sparse_lsqr -# else: -# from scipy.sparse.linalg import lsqr as sparse_lsqr diff --git a/AutoSklearn/sklearn_backward_validation.py b/AutoSklearn/sklearn_backward_validation.py deleted file mode 100644 index f27c456f24..0000000000 --- a/AutoSklearn/sklearn_backward_validation.py +++ /dev/null @@ -1,353 +0,0 @@ -"""Utilities for input validation""" -# Authors: Olivier Grisel -# Gael Varoquaux -# Andreas Mueller -# Lars Buitinck -# Alexandre Gramfort -# Nicolas Tresegnie -# License: BSD 3 clause - -import warnings -import numbers - -import numpy as np -import scipy.sparse as sp - -from sklearn.externals import six -#from sklearn.fixes import safe_copy -from sklearn_backward_fixes import safe_copy - - -class DataConversionWarning(UserWarning): - "A warning on implicit data conversions happening in the code" - pass - -warnings.simplefilter("always", DataConversionWarning) - - -class NonBLASDotWarning(UserWarning): - "A warning on implicit dispatch to numpy.dot" - pass - - -# Silenced by default to reduce verbosity. Turn on at runtime for -# performance profiling. -warnings.simplefilter('ignore', NonBLASDotWarning) - - -def _assert_all_finite(X): - """Like assert_all_finite, but only for ndarray.""" - X = np.asanyarray(X) - if (X.dtype.char in np.typecodes['AllFloat'] and not np.isfinite(X.sum()) - and not np.isfinite(X).all()): - raise ValueError("Input contains NaN, infinity" - " or a value too large for %r." % X.dtype) - - -def assert_all_finite(X): - """Throw a ValueError if X contains NaN or infinity. - - Input MUST be an np.ndarray instance or a scipy.sparse matrix.""" - - # First try an O(n) time, O(1) space solution for the common case that - # there everything is finite; fall back to O(n) space np.isfinite to - # prevent false positives from overflow in sum method. - _assert_all_finite(X.data if sp.issparse(X) else X) - - -def safe_asarray(X, dtype=None, order=None, copy=False, force_all_finite=True): - """Convert X to an array or CSC/CSR/COO sparse matrix. - - Prevents copying X when possible. Sparse matrices in CSR, CSC and COO - formats are passed through. Other sparse formats are converted to CSR - (somewhat arbitrarily). - - If a specific compressed sparse format is required, use atleast2d_or_cs{c,r} - instead. - """ - if sp.issparse(X): - if not isinstance(X, (sp.coo_matrix, sp.csc_matrix, sp.csr_matrix)): - X = X.tocsr() - elif copy: - X = X.copy() - if force_all_finite: - _assert_all_finite(X.data) - # enforces dtype on data array (order should be kept the same). - X.data = np.asarray(X.data, dtype=dtype) - else: - X = np.array(X, dtype=dtype, order=order, copy=copy) - if force_all_finite: - _assert_all_finite(X) - return X - - -def as_float_array(X, copy=True, force_all_finite=True): - """Converts an array-like to an array of floats - - The new dtype will be np.float32 or np.float64, depending on the original - type. The function can create a copy or modify the argument depending - on the argument copy. - - Parameters - ---------- - X : {array-like, sparse matrix} - - copy : bool, optional - If True, a copy of X will be created. If False, a copy may still be - returned if X's dtype is not a floating point type. - - Returns - ------- - XT : {array, sparse matrix} - An array of type np.float - """ - if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray) - and not sp.issparse(X)): - return safe_asarray(X, dtype=np.float64, copy=copy, - force_all_finite=force_all_finite) - elif sp.issparse(X) and X.dtype in [np.float32, np.float64]: - return X.copy() if copy else X - elif X.dtype in [np.float32, np.float64]: # is numpy array - return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X - else: - return X.astype(np.float32 if X.dtype == np.int32 else np.float64) - - -def array2d(X, dtype=None, order=None, copy=False, force_all_finite=True): - """Returns at least 2-d array with data from X""" - if sp.issparse(X): - raise TypeError('A sparse matrix was passed, but dense data ' - 'is required. Use X.toarray() to convert to dense.') - X_2d = np.asarray(np.atleast_2d(X), dtype=dtype, order=order) - if force_all_finite: - _assert_all_finite(X_2d) - if X is X_2d and copy: - X_2d = safe_copy(X_2d) - return X_2d - - -def _atleast2d_or_sparse(X, dtype, order, copy, sparse_class, convmethod, - check_same_type, force_all_finite): - if sp.issparse(X): - if check_same_type(X) and X.dtype == dtype: - X = getattr(X, convmethod)(copy=copy) - elif dtype is None or X.dtype == dtype: - X = getattr(X, convmethod)() - else: - X = sparse_class(X, dtype=dtype) - if force_all_finite: - _assert_all_finite(X.data) - X.data = np.array(X.data, copy=False, order=order) - else: - X = array2d(X, dtype=dtype, order=order, copy=copy, - force_all_finite=force_all_finite) - return X - - -def atleast2d_or_csc(X, dtype=None, order=None, copy=False, - force_all_finite=True): - """Like numpy.atleast_2d, but converts sparse matrices to CSC format. - - Also, converts np.matrix to np.ndarray. - """ - return _atleast2d_or_sparse(X, dtype, order, copy, sp.csc_matrix, - "tocsc", sp.isspmatrix_csc, - force_all_finite) - - -def atleast2d_or_csr(X, dtype=None, order=None, copy=False, - force_all_finite=True): - """Like numpy.atleast_2d, but converts sparse matrices to CSR format - - Also, converts np.matrix to np.ndarray. - """ - return _atleast2d_or_sparse(X, dtype, order, copy, sp.csr_matrix, - "tocsr", sp.isspmatrix_csr, - force_all_finite) - - -def _num_samples(x): - """Return number of samples in array-like x.""" - if not hasattr(x, '__len__') and not hasattr(x, 'shape'): - if hasattr(x, '__array__'): - x = np.asarray(x) - else: - raise TypeError("Expected sequence or array-like, got %r" % x) - return x.shape[0] if hasattr(x, 'shape') else len(x) - - -def check_arrays(*arrays, **options): - """Check that all arrays have consistent first dimensions. - - Checks whether all objects in arrays have the same shape or length. - By default lists and tuples are converted to numpy arrays. - - It is possible to enforce certain properties, such as dtype, continguity - and sparse matrix format (if a sparse matrix is passed). - - Converting lists to arrays can be disabled by setting ``allow_lists=True``. - Lists can then contain arbitrary objects and are not checked for dtype, - finiteness or anything else but length. Arrays are still checked - and possibly converted. - - - Parameters - ---------- - *arrays : sequence of arrays or scipy.sparse matrices with same shape[0] - Python lists or tuples occurring in arrays are converted to 1D numpy - arrays, unless allow_lists is specified. - - sparse_format : 'csr', 'csc' or 'dense', None by default - If not None, any scipy.sparse matrix is converted to - Compressed Sparse Rows or Compressed Sparse Columns representations. - If 'dense', an error is raised when a sparse array is - passed. - - copy : boolean, False by default - If copy is True, ensure that returned arrays are copies of the original - (if not already converted to another format earlier in the process). - - check_ccontiguous : boolean, False by default - Check that the arrays are C contiguous - - dtype : a numpy dtype instance, None by default - Enforce a specific dtype. - - allow_lists : bool - Allow lists of arbitrary objects as input, just check their length. - Disables - - allow_nans : boolean, False by default - Allows nans in the arrays - - allow_nd : boolean, False by default - Allows arrays of more than 2 dimensions. - """ - sparse_format = options.pop('sparse_format', None) - if sparse_format not in (None, 'csr', 'csc', 'dense'): - raise ValueError('Unexpected sparse format: %r' % sparse_format) - copy = options.pop('copy', False) - check_ccontiguous = options.pop('check_ccontiguous', False) - dtype = options.pop('dtype', None) - allow_lists = options.pop('allow_lists', False) - allow_nans = options.pop('allow_nans', False) - allow_nd = options.pop('allow_nd', False) - - if options: - raise TypeError("Unexpected keyword arguments: %r" % options.keys()) - - if len(arrays) == 0: - return None - - n_samples = _num_samples(arrays[0]) - - checked_arrays = [] - for array in arrays: - array_orig = array - if array is None: - # special case: ignore optional y=None kwarg pattern - checked_arrays.append(array) - continue - size = _num_samples(array) - - if size != n_samples: - raise ValueError("Found array with dim %d. Expected %d" - % (size, n_samples)) - - if not allow_lists or hasattr(array, "shape"): - if sp.issparse(array): - if sparse_format == 'csr': - array = array.tocsr() - elif sparse_format == 'csc': - array = array.tocsc() - elif sparse_format == 'dense': - raise TypeError('A sparse matrix was passed, but dense ' - 'data is required. Use X.toarray() to ' - 'convert to a dense numpy array.') - if check_ccontiguous: - array.data = np.ascontiguousarray(array.data, dtype=dtype) - elif hasattr(array, 'data'): - array.data = np.asarray(array.data, dtype=dtype) - elif array.dtype != dtype: - array = array.astype(dtype) - if not allow_nans: - if hasattr(array, 'data'): - _assert_all_finite(array.data) - else: - _assert_all_finite(array.values()) - else: - if check_ccontiguous: - array = np.ascontiguousarray(array, dtype=dtype) - else: - array = np.asarray(array, dtype=dtype) - if not allow_nans: - _assert_all_finite(array) - - if not allow_nd and array.ndim >= 3: - raise ValueError("Found array with dim %d. Expected <= 2" % - array.ndim) - - if copy and array is array_orig: - array = array.copy() - checked_arrays.append(array) - - return checked_arrays - - -def column_or_1d(y, warn=False): - """ Ravel column or 1d numpy array, else raises an error - - Parameters - ---------- - y : array-like - - Returns - ------- - y : array - - """ - shape = np.shape(y) - if len(shape) == 1: - return np.ravel(y) - if len(shape) == 2 and shape[1] == 1: - if warn: - warnings.warn("A column-vector y was passed when a 1d array was" - " expected. Please change the shape of y to " - "(n_samples, ), for example using ravel().", - DataConversionWarning, stacklevel=2) - return np.ravel(y) - - raise ValueError("bad input shape {0}".format(shape)) - - -def warn_if_not_float(X, estimator='This algorithm'): - """Warning utility function to check that data type is floating point. - - Returns True if a warning was raised (i.e. the input is not float) and - False otherwise, for easier input validation. - """ - if not isinstance(estimator, six.string_types): - estimator = estimator.__class__.__name__ - if X.dtype.kind != 'f': - warnings.warn("%s assumes floating point values as input, " - "got %s" % (estimator, X.dtype)) - return True - return False - - -def check_random_state(seed): - """Turn seed into a np.random.RandomState instance - - If seed is None, return the RandomState singleton used by np.random. - If seed is an int, return a new RandomState instance seeded with seed. - If seed is already a RandomState instance, return it. - Otherwise raise ValueError. - """ - if seed is None or seed is np.random: - return np.random.mtrand._rand - if isinstance(seed, (numbers.Integral, np.integer)): - return np.random.RandomState(seed) - if isinstance(seed, np.random.RandomState): - return seed - raise ValueError('%r cannot be used to seed a numpy.random.RandomState' - ' instance' % seed) diff --git a/setup.py b/setup.py index 6eba483c2c..f7834db653 100644 --- a/setup.py +++ b/setup.py @@ -5,9 +5,9 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["numpy", - "scipy", - #"scikit-learn==0.15.2", + install_requires=["numpy==1.9.1", + "scipy==0.14.0", + "scikit-learn==0.15.2", "nose", "HPOlibConfigSpace"], test_suite="nose.collector", diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index 06eafb9208..b2a85e19d8 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -38,5 +38,5 @@ def test_default_configuration(self): predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) - self.assertAlmostEqual(0.32731125809612438, #0.24658871483206091 + self.assertAlmostEqual(0.24658871483206091, sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file From 14218af4db87b7272ed6dde489750e006c3667cf Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Sat, 10 Jan 2015 12:11:15 +0100 Subject: [PATCH 091/352] add our own hacked together wrapper around scikit learn trees --- AutoSklearn/implementations/forest.py | 1114 +++++++++++++++++++++++++ 1 file changed, 1114 insertions(+) create mode 100644 AutoSklearn/implementations/forest.py diff --git a/AutoSklearn/implementations/forest.py b/AutoSklearn/implementations/forest.py new file mode 100644 index 0000000000..a89fb88f34 --- /dev/null +++ b/AutoSklearn/implementations/forest.py @@ -0,0 +1,1114 @@ +"""Forest of trees-based ensemble methods + Jost Tobias Springenberg (JTS) + This is a quick and dirty hack to get some of the functionality (like warm-starting) + from sklearn 0.16 into 0.15.2, we should aim to remove this once we can safely use + the newer version! + Most of this is shamelessly copied from sklearn directly :) +""" + + +from __future__ import division + +import numpy as np + +from warnings import warn +from abc import ABCMeta, abstractmethod + +import numpy as np +from scipy.sparse import issparse + +from sklearn.base import ClassifierMixin, RegressorMixin +from sklearn.externals.joblib import Parallel, delayed +from sklearn.externals import six +from sklearn.feature_selection.from_model import _LearntSelectorMixin +from sklearn.metrics import r2_score +from sklearn.tree import (DecisionTreeClassifier, DecisionTreeRegressor, + ExtraTreeClassifier, ExtraTreeRegressor) +from sklearn.tree._tree import DTYPE, DOUBLE +from sklearn.utils import array2d, check_random_state, check_arrays, safe_asarray +from sklearn.utils.validation import DataConversionWarning +from sklearn.ensemble.base import BaseEnsemble, _partition_estimators + +# bring in all the stuff from forests that we can reuse +from sklearn.ensemble.forest import _parallel_build_trees +from sklearn.ensemble.forest import _parallel_predict_proba +from sklearn.ensemble.forest import _parallel_predict_regression +from sklearn.ensemble.forest import _parallel_apply + +# bring in the base forest class from sklearn +from sklearn.ensemble.forest import BaseForest + +MAX_INT = np.iinfo(np.int32).max + + +class MyBaseForest(six.with_metaclass(ABCMeta, BaseForest, + _LearntSelectorMixin)): + """Base class for forests of trees. + WARNING JTS: this is a mix between the 0.15.2 sklearn and 0.16 forest + """ + + @abstractmethod + def __init__(self, + base_estimator, + n_estimators=10, + estimator_params=tuple(), + bootstrap=False, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(BaseForest, self).__init__( + base_estimator=base_estimator, + n_estimators=n_estimators, + estimator_params=estimator_params) + + self.bootstrap = bootstrap + self.oob_score = oob_score + self.n_jobs = n_jobs + self.random_state = random_state + self.verbose = verbose + self.warm_start = warm_start + + + def fit(self, X, y, sample_weight=None): + """Build a forest of trees from the training set (X, y). + JTS: this now supports the warm_start procedure + + Parameters + ---------- + X : array-like of shape = [n_samples, n_features] + + y : array-like, shape = [n_samples] or [n_samples, n_outputs] + The target values (class labels in classification, real numbers in + regression). + + sample_weight : array-like, shape = [n_samples] or None + Sample weights. If None, then samples are equally weighted. Splits + that would create child nodes with net zero or negative weight are + ignored while searching for a split in each node. In the case of + classification, splits are also ignored if they would result in any + single class carrying a negative weight in either child node. + + Returns + ------- + self : object + Returns self. + """ + # Convert data + X = check_arrays(X, dtype=DTYPE, sparse_format="dense") + + # Remap output + n_samples, self.n_features_ = X.shape + + y = np.atleast_1d(y) + if y.ndim == 2 and y.shape[1] == 1: + warn("A column-vector y was passed when a 1d array was" + " expected. Please change the shape of y to " + "(n_samples, ), for example using ravel().", + DataConversionWarning, stacklevel=2) + + if y.ndim == 1: + # reshape is necessary to preserve the data contiguity against vs + # [:, np.newaxis] that does not. + y = np.reshape(y, (-1, 1)) + + self.n_outputs_ = y.shape[1] + + y = self._validate_y(y) + + if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous: + y = np.ascontiguousarray(y, dtype=DOUBLE) + + # Check parameters + self._validate_estimator() + + if not self.bootstrap and self.oob_score: + raise ValueError("Out of bag estimation only available" + " if bootstrap=True") + + random_state = check_random_state(self.random_state) + + if not self.warm_start: + # Free allocated memory, if any + self.estimators_ = [] + + n_more_estimators = self.n_estimators - len(self.estimators_) + + if n_more_estimators < 0: + raise ValueError('n_estimators=%d must be larger or equal to ' + 'len(estimators_)=%d when warm_start==True' + % (self.n_estimators, len(self.estimators_))) + + elif n_more_estimators == 0: + warn("Warm-start fitting without increasing n_estimators does not " + "fit new trees.") + else: + if self.warm_start and len(self.estimators_) > 0: + # We draw from the random state to get the random state we + # would have got if we hadn't used a warm_start. + random_state.randint(MAX_INT, size=len(self.estimators_)) + + trees = [] + for i in range(n_more_estimators): + tree = self._make_estimator(append=False) + tree.set_params(random_state=random_state.randint(MAX_INT)) + trees.append(tree) + + # Parallel loop: we use the threading backend as the Cython code + # for fitting the trees is internally releasing the Python GIL + # making threading always more efficient than multiprocessing in + # that case. + trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, + backend="threading")( + delayed(_parallel_build_trees)( + t, self, X, y, sample_weight, i, len(trees), + verbose=self.verbose) + for i, t in enumerate(trees)) + + # Collect newly grown trees + self.estimators_.extend(trees) + + if self.oob_score: + self._set_oob_score(X, y) + + # Decapsulate classes_ attributes + if hasattr(self, "classes_") and self.n_outputs_ == 1: + self.n_classes_ = self.n_classes_[0] + self.classes_ = self.classes_[0] + + return self + +# Now that we have the base forest defined we can define all the other variants + +class ForestClassifier(six.with_metaclass(ABCMeta, MyBaseForest, + ClassifierMixin)): + """Base class for forest of trees-based classifiers. + + Warning: This class should not be used directly. Use derived classes + instead. + """ + + @abstractmethod + def __init__(self, + base_estimator, + n_estimators=10, + estimator_params=tuple(), + bootstrap=False, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + + super(ForestClassifier, self).__init__( + base_estimator, + n_estimators=n_estimators, + estimator_params=estimator_params, + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + def _set_oob_score(self, X, y): + n_classes_ = self.n_classes_ + n_samples = y.shape[0] + + oob_decision_function = [] + oob_score = 0.0 + predictions = [] + + for k in xrange(self.n_outputs_): + predictions.append(np.zeros((n_samples, + n_classes_[k]))) + + for estimator in self.estimators_: + mask = np.ones(n_samples, dtype=np.bool) + mask[estimator.indices_] = False + p_estimator = estimator.predict_proba(X[mask, :]) + + if self.n_outputs_ == 1: + p_estimator = [p_estimator] + + for k in xrange(self.n_outputs_): + predictions[k][mask, :] += p_estimator[k] + + for k in xrange(self.n_outputs_): + if (predictions[k].sum(axis=1) == 0).any(): + warn("Some inputs do not have OOB scores. " + "This probably means too few trees were used " + "to compute any reliable oob estimates.") + + decision = (predictions[k] / + predictions[k].sum(axis=1)[:, np.newaxis]) + oob_decision_function.append(decision) + oob_score += np.mean(y[:, k] == + np.argmax(predictions[k], axis=1), axis=0) + + if self.n_outputs_ == 1: + self.oob_decision_function_ = oob_decision_function[0] + else: + self.oob_decision_function_ = oob_decision_function + + self.oob_score_ = oob_score / self.n_outputs_ + + def _validate_y(self, y): + y = np.copy(y) + + self.classes_ = [] + self.n_classes_ = [] + + for k in xrange(self.n_outputs_): + classes_k, y[:, k] = np.unique(y[:, k], return_inverse=True) + self.classes_.append(classes_k) + self.n_classes_.append(classes_k.shape[0]) + + return y + + def predict(self, X): + """Predict class for X. + + The predicted class of an input sample is computed as the majority + prediction of the trees in the forest. + + Parameters + ---------- + X : array-like of shape = [n_samples, n_features] + The input samples. + + Returns + ------- + y : array of shape = [n_samples] or [n_samples, n_outputs] + The predicted classes. + """ + n_samples = len(X) + proba = self.predict_proba(X) + + if self.n_outputs_ == 1: + return self.classes_.take(np.argmax(proba, axis=1), axis=0) + + else: + predictions = np.zeros((n_samples, self.n_outputs_)) + + for k in xrange(self.n_outputs_): + predictions[:, k] = self.classes_[k].take(np.argmax(proba[k], + axis=1), + axis=0) + + return predictions + + def predict_proba(self, X): + """Predict class probabilities for X. + + The predicted class probabilities of an input sample is computed as + the mean predicted class probabilities of the trees in the forest. + + Parameters + ---------- + X : array-like of shape = [n_samples, n_features] + The input samples. + + Returns + ------- + p : array of shape = [n_samples, n_classes], or a list of n_outputs + such arrays if n_outputs > 1. + The class probabilities of the input samples. The order of the + classes corresponds to that in the attribute `classes_`. + """ + # Check data + if getattr(X, "dtype", None) != DTYPE or X.ndim != 2: + X = array2d(X, dtype=DTYPE) + + # Assign chunk of trees to jobs + n_jobs, n_trees, starts = _partition_estimators(self) + + # Parallel loop + all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose, + backend="threading")( + delayed(_parallel_predict_proba)( + self.estimators_[starts[i]:starts[i + 1]], + X, + self.n_classes_, + self.n_outputs_) + for i in range(n_jobs)) + + # Reduce + proba = all_proba[0] + + if self.n_outputs_ == 1: + for j in xrange(1, len(all_proba)): + proba += all_proba[j] + + proba /= len(self.estimators_) + + else: + for j in xrange(1, len(all_proba)): + for k in xrange(self.n_outputs_): + proba[k] += all_proba[j][k] + + for k in xrange(self.n_outputs_): + proba[k] /= self.n_estimators + + return proba + + def predict_log_proba(self, X): + """Predict class log-probabilities for X. + + The predicted class log-probabilities of an input sample is computed as + the log of the mean predicted class probabilities of the trees in the + forest. + + Parameters + ---------- + X : array-like of shape = [n_samples, n_features] + The input samples. + + Returns + ------- + p : array of shape = [n_samples, n_classes], or a list of n_outputs + such arrays if n_outputs > 1. + The class probabilities of the input samples. The order of the + classes corresponds to that in the attribute `classes_`. + """ + proba = self.predict_proba(X) + + if self.n_outputs_ == 1: + return np.log(proba) + + else: + for k in xrange(self.n_outputs_): + proba[k] = np.log(proba[k]) + + return proba + +class ForestRegressor(six.with_metaclass(ABCMeta, BaseForest, RegressorMixin)): + """Base class for forest of trees-based regressors. + + Warning: This class should not be used directly. Use derived classes + instead. + """ + + @abstractmethod + def __init__(self, + base_estimator, + n_estimators=10, + estimator_params=tuple(), + bootstrap=False, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(ForestRegressor, self).__init__( + base_estimator, + n_estimators=n_estimators, + estimator_params=estimator_params, + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + + def predict(self, X): + """Predict regression target for X. + + The predicted regression target of an input sample is computed as the + mean predicted regression targets of the trees in the forest. + + Parameters + ---------- + X : array-like of shape = [n_samples, n_features] + The input samples. + + Returns + ------- + y: array of shape = [n_samples] or [n_samples, n_outputs] + The predicted values. + """ + # Check data + if getattr(X, "dtype", None) != DTYPE or X.ndim != 2: + X = array2d(X, dtype=DTYPE) + + # Assign chunk of trees to jobs + n_jobs, n_trees, starts = _partition_estimators(self) + + # Parallel loop + all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.verbose, + backend="threading")( + delayed(_parallel_predict_regression)( + self.estimators_[starts[i]:starts[i + 1]], X) + for i in range(n_jobs)) + + # Reduce + y_hat = sum(all_y_hat) / len(self.estimators_) + + return y_hat + + def _set_oob_score(self, X, y): + n_samples = y.shape[0] + + predictions = np.zeros((n_samples, self.n_outputs_)) + n_predictions = np.zeros((n_samples, self.n_outputs_)) + + for estimator in self.estimators_: + mask = np.ones(n_samples, dtype=np.bool) + mask[estimator.indices_] = False + p_estimator = estimator.predict(X[mask, :]) + + if self.n_outputs_ == 1: + p_estimator = p_estimator[:, np.newaxis] + + predictions[mask, :] += p_estimator + n_predictions[mask, :] += 1 + + if (n_predictions == 0).any(): + warn("Some inputs do not have OOB scores. " + "This probably means too few trees were used " + "to compute any reliable oob estimates.") + n_predictions[n_predictions == 0] = 1 + + predictions /= n_predictions + self.oob_prediction_ = predictions + + if self.n_outputs_ == 1: + self.oob_prediction_ = \ + self.oob_prediction_.reshape((n_samples, )) + + self.oob_score_ = 0.0 + + for k in xrange(self.n_outputs_): + self.oob_score_ += r2_score(y[:, k], + predictions[:, k]) + + self.oob_score_ /= self.n_outputs_ + + +class RandomForestClassifier(ForestClassifier): + """A random forest classifier. + + A random forest is a meta estimator that fits a number of decision tree + classifiers on various sub-samples of the dataset and use averaging to + improve the predictive accuracy and control over-fitting. + + Parameters + ---------- + n_estimators : integer, optional (default=10) + The number of trees in the forest. + + criterion : string, optional (default="gini") + The function to measure the quality of a split. Supported criteria are + "gini" for the Gini impurity and "entropy" for the information gain. + Note: this parameter is tree-specific. + + max_features : int, float, string or None, optional (default="auto") + The number of features to consider when looking for the best split: + + - If int, then consider `max_features` features at each split. + - If float, then `max_features` is a percentage and + `int(max_features * n_features)` features are considered at each + split. + - If "auto", then `max_features=sqrt(n_features)`. + - If "sqrt", then `max_features=sqrt(n_features)`. + - If "log2", then `max_features=log2(n_features)`. + - If None, then `max_features=n_features`. + + Note: the search for a split does not stop until at least one + valid partition of the node samples is found, even if it requires to + effectively inspect more than ``max_features`` features. + Note: this parameter is tree-specific. + + max_depth : integer or None, optional (default=None) + The maximum depth of the tree. If None, then nodes are expanded until + all leaves are pure or until all leaves contain less than + min_samples_split samples. + Ignored if ``max_leaf_nodes`` is not None. + Note: this parameter is tree-specific. + + min_samples_split : integer, optional (default=2) + The minimum number of samples required to split an internal node. + Note: this parameter is tree-specific. + + min_samples_leaf : integer, optional (default=1) + The minimum number of samples in newly created leaves. A split is + discarded if after the split, one of the leaves would contain less then + ``min_samples_leaf`` samples. + Note: this parameter is tree-specific. + + min_weight_fraction_leaf : float, optional (default=0.) + The minimum weighted fraction of the input samples required to be at a + leaf node. + Note: this parameter is tree-specific. + + max_leaf_nodes : int or None, optional (default=None) + Grow trees with ``max_leaf_nodes`` in best-first fashion. + Best nodes are defined as relative reduction in impurity. + If None then unlimited number of leaf nodes. + If not None then ``max_depth`` will be ignored. + Note: this parameter is tree-specific. + + bootstrap : boolean, optional (default=True) + Whether bootstrap samples are used when building trees. + + oob_score : bool + Whether to use out-of-bag samples to estimate + the generalization error. + + n_jobs : integer, optional (default=1) + The number of jobs to run in parallel for both `fit` and `predict`. + If -1, then the number of jobs is set to the number of cores. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance used + by `np.random`. + + verbose : int, optional (default=0) + Controls the verbosity of the tree building process. + + warm_start : bool, optional (default=False) + When set to ``True``, reuse the solution of the previous call to fit + and add more estimators to the ensemble, otherwise, just fit a whole + new forest. + + Attributes + ---------- + estimators_ : list of DecisionTreeClassifier + The collection of fitted sub-estimators. + + classes_ : array of shape = [n_classes] or a list of such arrays + The classes labels (single output problem), or a list of arrays of + class labels (multi-output problem). + + n_classes_ : int or list + The number of classes (single output problem), or a list containing the + number of classes for each output (multi-output problem). + + feature_importances_ : array of shape = [n_features] + The feature importances (the higher, the more important the feature). + + oob_score_ : float + Score of the training dataset obtained using an out-of-bag estimate. + + oob_decision_function_ : array of shape = [n_samples, n_classes] + Decision function computed with out-of-bag estimate on the training + set. If n_estimators is small it might be possible that a data point + was never left out during the bootstrap. In this case, + `oob_decision_function_` might contain NaN. + + References + ---------- + + .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. + + See also + -------- + DecisionTreeClassifier, ExtraTreesClassifier + """ + def __init__(self, + n_estimators=10, + criterion="gini", + max_depth=None, + min_samples_split=2, + min_samples_leaf=1, + min_weight_fraction_leaf=0., + max_features="auto", + max_leaf_nodes=None, + bootstrap=True, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(RandomForestClassifier, self).__init__( + base_estimator=DecisionTreeClassifier(), + n_estimators=n_estimators, + estimator_params=("criterion", "max_depth", "min_samples_split", + "min_samples_leaf", "min_weight_fraction_leaf", + "max_features", "max_leaf_nodes", + "random_state"), + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + self.criterion = criterion + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + + +class RandomForestRegressor(ForestRegressor): + """A random forest regressor. + + A random forest is a meta estimator that fits a number of classifying + decision trees on various sub-samples of the dataset and use averaging + to improve the predictive accuracy and control over-fitting. + + Parameters + ---------- + n_estimators : integer, optional (default=10) + The number of trees in the forest. + + criterion : string, optional (default="mse") + The function to measure the quality of a split. The only supported + criterion is "mse" for the mean squared error. + Note: this parameter is tree-specific. + + max_features : int, float, string or None, optional (default="auto") + The number of features to consider when looking for the best split: + + - If int, then consider `max_features` features at each split. + - If float, then `max_features` is a percentage and + `int(max_features * n_features)` features are considered at each + split. + - If "auto", then `max_features=n_features`. + - If "sqrt", then `max_features=sqrt(n_features)`. + - If "log2", then `max_features=log2(n_features)`. + - If None, then `max_features=n_features`. + + Note: the search for a split does not stop until at least one + valid partition of the node samples is found, even if it requires to + effectively inspect more than ``max_features`` features. + Note: this parameter is tree-specific. + + max_depth : integer or None, optional (default=None) + The maximum depth of the tree. If None, then nodes are expanded until + all leaves are pure or until all leaves contain less than + min_samples_split samples. + Ignored if ``max_leaf_nodes`` is not None. + Note: this parameter is tree-specific. + + min_samples_split : integer, optional (default=2) + The minimum number of samples required to split an internal node. + Note: this parameter is tree-specific. + + min_samples_leaf : integer, optional (default=1) + The minimum number of samples in newly created leaves. A split is + discarded if after the split, one of the leaves would contain less then + ``min_samples_leaf`` samples. + Note: this parameter is tree-specific. + + min_weight_fraction_leaf : float, optional (default=0.) + The minimum weighted fraction of the input samples required to be at a + leaf node. + Note: this parameter is tree-specific. + + max_leaf_nodes : int or None, optional (default=None) + Grow trees with ``max_leaf_nodes`` in best-first fashion. + Best nodes are defined as relative reduction in impurity. + If None then unlimited number of leaf nodes. + If not None then ``max_depth`` will be ignored. + Note: this parameter is tree-specific. + + bootstrap : boolean, optional (default=True) + Whether bootstrap samples are used when building trees. + + oob_score : bool + whether to use out-of-bag samples to estimate + the generalization error. + + n_jobs : integer, optional (default=1) + The number of jobs to run in parallel for both `fit` and `predict`. + If -1, then the number of jobs is set to the number of cores. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance used + by `np.random`. + + verbose : int, optional (default=0) + Controls the verbosity of the tree building process. + + warm_start : bool, optional (default=False) + When set to ``True``, reuse the solution of the previous call to fit + and add more estimators to the ensemble, otherwise, just fit a whole + new forest. + + Attributes + ---------- + estimators_ : list of DecisionTreeRegressor + The collection of fitted sub-estimators. + + feature_importances_ : array of shape = [n_features] + The feature importances (the higher, the more important the feature). + + oob_score_ : float + Score of the training dataset obtained using an out-of-bag estimate. + + oob_prediction_ : array of shape = [n_samples] + Prediction computed with out-of-bag estimate on the training set. + + References + ---------- + + .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. + + See also + -------- + DecisionTreeRegressor, ExtraTreesRegressor + """ + def __init__(self, + n_estimators=10, + criterion="mse", + max_depth=None, + min_samples_split=2, + min_samples_leaf=1, + min_weight_fraction_leaf=0., + max_features="auto", + max_leaf_nodes=None, + bootstrap=True, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(RandomForestRegressor, self).__init__( + base_estimator=DecisionTreeRegressor(), + n_estimators=n_estimators, + estimator_params=("criterion", "max_depth", "min_samples_split", + "min_samples_leaf", "min_weight_fraction_leaf", + "max_features", "max_leaf_nodes", + "random_state"), + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + self.criterion = criterion + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + + +class ExtraTreesClassifier(ForestClassifier): + """An extra-trees classifier. + + This class implements a meta estimator that fits a number of + randomized decision trees (a.k.a. extra-trees) on various sub-samples + of the dataset and use averaging to improve the predictive accuracy + and control over-fitting. + + Parameters + ---------- + n_estimators : integer, optional (default=10) + The number of trees in the forest. + + criterion : string, optional (default="gini") + The function to measure the quality of a split. Supported criteria are + "gini" for the Gini impurity and "entropy" for the information gain. + Note: this parameter is tree-specific. + + max_features : int, float, string or None, optional (default="auto") + The number of features to consider when looking for the best split: + + - If int, then consider `max_features` features at each split. + - If float, then `max_features` is a percentage and + `int(max_features * n_features)` features are considered at each + split. + - If "auto", then `max_features=sqrt(n_features)`. + - If "sqrt", then `max_features=sqrt(n_features)`. + - If "log2", then `max_features=log2(n_features)`. + - If None, then `max_features=n_features`. + + Note: the search for a split does not stop until at least one + valid partition of the node samples is found, even if it requires to + effectively inspect more than ``max_features`` features. + Note: this parameter is tree-specific. + + max_depth : integer or None, optional (default=None) + The maximum depth of the tree. If None, then nodes are expanded until + all leaves are pure or until all leaves contain less than + min_samples_split samples. + Ignored if ``max_leaf_nodes`` is not None. + Note: this parameter is tree-specific. + + min_samples_split : integer, optional (default=2) + The minimum number of samples required to split an internal node. + Note: this parameter is tree-specific. + + min_samples_leaf : integer, optional (default=1) + The minimum number of samples in newly created leaves. A split is + discarded if after the split, one of the leaves would contain less then + ``min_samples_leaf`` samples. + Note: this parameter is tree-specific. + + min_weight_fraction_leaf : float, optional (default=0.) + The minimum weighted fraction of the input samples required to be at a + leaf node. + Note: this parameter is tree-specific. + + max_leaf_nodes : int or None, optional (default=None) + Grow trees with ``max_leaf_nodes`` in best-first fashion. + Best nodes are defined as relative reduction in impurity. + If None then unlimited number of leaf nodes. + If not None then ``max_depth`` will be ignored. + Note: this parameter is tree-specific. + + bootstrap : boolean, optional (default=False) + Whether bootstrap samples are used when building trees. + + oob_score : bool + Whether to use out-of-bag samples to estimate + the generalization error. + + n_jobs : integer, optional (default=1) + The number of jobs to run in parallel for both `fit` and `predict`. + If -1, then the number of jobs is set to the number of cores. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance used + by `np.random`. + + verbose : int, optional (default=0) + Controls the verbosity of the tree building process. + + warm_start : bool, optional (default=False) + When set to ``True``, reuse the solution of the previous call to fit + and add more estimators to the ensemble, otherwise, just fit a whole + new forest. + + Attributes + ---------- + estimators_ : list of DecisionTreeClassifier + The collection of fitted sub-estimators. + + classes_ : array of shape = [n_classes] or a list of such arrays + The classes labels (single output problem), or a list of arrays of + class labels (multi-output problem). + + n_classes_ : int or list + The number of classes (single output problem), or a list containing the + number of classes for each output (multi-output problem). + + feature_importances_ : array of shape = [n_features] + The feature importances (the higher, the more important the feature). + + oob_score_ : float + Score of the training dataset obtained using an out-of-bag estimate. + + oob_decision_function_ : array of shape = [n_samples, n_classes] + Decision function computed with out-of-bag estimate on the training + set. If n_estimators is small it might be possible that a data point + was never left out during the bootstrap. In this case, + `oob_decision_function_` might contain NaN. + + References + ---------- + + .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", + Machine Learning, 63(1), 3-42, 2006. + + See also + -------- + sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble. + RandomForestClassifier : Ensemble Classifier based on trees with optimal + splits. + """ + def __init__(self, + n_estimators=10, + criterion="gini", + max_depth=None, + min_samples_split=2, + min_samples_leaf=1, + min_weight_fraction_leaf=0., + max_features="auto", + max_leaf_nodes=None, + bootstrap=False, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(ExtraTreesClassifier, self).__init__( + base_estimator=ExtraTreeClassifier(), + n_estimators=n_estimators, + estimator_params=("criterion", "max_depth", "min_samples_split", + "min_samples_leaf", "min_weight_fraction_leaf", + "max_features", "max_leaf_nodes", "random_state"), + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + self.criterion = criterion + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + + +class ExtraTreesRegressor(ForestRegressor): + """An extra-trees regressor. + + This class implements a meta estimator that fits a number of + randomized decision trees (a.k.a. extra-trees) on various sub-samples + of the dataset and use averaging to improve the predictive accuracy + and control over-fitting. + + Parameters + ---------- + n_estimators : integer, optional (default=10) + The number of trees in the forest. + + criterion : string, optional (default="mse") + The function to measure the quality of a split. The only supported + criterion is "mse" for the mean squared error. + Note: this parameter is tree-specific. + + max_features : int, float, string or None, optional (default="auto") + The number of features to consider when looking for the best split: + + - If int, then consider `max_features` features at each split. + - If float, then `max_features` is a percentage and + `int(max_features * n_features)` features are considered at each + split. + - If "auto", then `max_features=n_features`. + - If "sqrt", then `max_features=sqrt(n_features)`. + - If "log2", then `max_features=log2(n_features)`. + - If None, then `max_features=n_features`. + + Note: the search for a split does not stop until at least one + valid partition of the node samples is found, even if it requires to + effectively inspect more than ``max_features`` features. + Note: this parameter is tree-specific. + + max_depth : integer or None, optional (default=None) + The maximum depth of the tree. If None, then nodes are expanded until + all leaves are pure or until all leaves contain less than + min_samples_split samples. + Ignored if ``max_leaf_nodes`` is not None. + Note: this parameter is tree-specific. + + min_samples_split : integer, optional (default=2) + The minimum number of samples required to split an internal node. + Note: this parameter is tree-specific. + + min_samples_leaf : integer, optional (default=1) + The minimum number of samples in newly created leaves. A split is + discarded if after the split, one of the leaves would contain less then + ``min_samples_leaf`` samples. + Note: this parameter is tree-specific. + + min_weight_fraction_leaf : float, optional (default=0.) + The minimum weighted fraction of the input samples required to be at a + leaf node. + Note: this parameter is tree-specific. + + max_leaf_nodes : int or None, optional (default=None) + Grow trees with ``max_leaf_nodes`` in best-first fashion. + Best nodes are defined as relative reduction in impurity. + If None then unlimited number of leaf nodes. + If not None then ``max_depth`` will be ignored. + Note: this parameter is tree-specific. + + bootstrap : boolean, optional (default=False) + Whether bootstrap samples are used when building trees. + Note: this parameter is tree-specific. + + oob_score : bool + Whether to use out-of-bag samples to estimate + the generalization error. + + n_jobs : integer, optional (default=1) + The number of jobs to run in parallel for both `fit` and `predict`. + If -1, then the number of jobs is set to the number of cores. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance used + by `np.random`. + + verbose : int, optional (default=0) + Controls the verbosity of the tree building process. + + warm_start : bool, optional (default=False) + When set to ``True``, reuse the solution of the previous call to fit + and add more estimators to the ensemble, otherwise, just fit a whole + new forest. + + Attributes + ---------- + estimators_ : list of DecisionTreeRegressor + The collection of fitted sub-estimators. + + feature_importances_ : array of shape = [n_features] + The feature importances (the higher, the more important the feature). + + oob_score_ : float + Score of the training dataset obtained using an out-of-bag estimate. + + oob_prediction_ : array of shape = [n_samples] + Prediction computed with out-of-bag estimate on the training set. + + References + ---------- + + .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", + Machine Learning, 63(1), 3-42, 2006. + + See also + -------- + sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble. + RandomForestRegressor: Ensemble regressor using trees with optimal splits. + """ + def __init__(self, + n_estimators=10, + criterion="mse", + max_depth=None, + min_samples_split=2, + min_samples_leaf=1, + min_weight_fraction_leaf=0., + max_features="auto", + max_leaf_nodes=None, + bootstrap=False, + oob_score=False, + n_jobs=1, + random_state=None, + verbose=0, + warm_start=False): + super(ExtraTreesRegressor, self).__init__( + base_estimator=ExtraTreeRegressor(), + n_estimators=n_estimators, + estimator_params=("criterion", "max_depth", "min_samples_split", + "min_samples_leaf", "min_weight_fraction_leaf", + "max_features", "max_leaf_nodes", + "random_state"), + bootstrap=bootstrap, + oob_score=oob_score, + n_jobs=n_jobs, + random_state=random_state, + verbose=verbose, + warm_start=warm_start) + + self.criterion = criterion + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + + From a395683fade276116e2e71743f83c47609316c83 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 10 Jan 2015 18:12:50 +0100 Subject: [PATCH 092/352] Replace absolute max_features in tree-based models by a scaling factor for max_features=log_2(X.shape[1]) --- .../components/classification/extra_trees.py | 13 +++++++------ .../components/classification/gradient_boosting.py | 13 ++++++++----- .../components/classification/random_forest.py | 10 +++++++--- AutoSklearn/components/regression/random_forest.py | 13 +++++++++---- AutoSklearn/implementations/OneHotEncoder.py | 6 +++--- tests/components/classification/test_extra_trees.py | 2 +- .../components/classification/test_random_forest.py | 2 +- tests/components/regression/test_random_forests.py | 2 +- tests/test_autosklearn_regression.py | 2 +- 9 files changed, 38 insertions(+), 25 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index d60369fcd1..c01b7feedd 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -45,9 +45,6 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.min_samples_split = int(min_samples_split) self.max_features = float(max_features) - if self.max_features > 1: - raise ValueError("'max features' in should be < 1: %f" % - self.max_features) if bootstrap == "True": self.bootstrap = True @@ -62,12 +59,14 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.compute_importances = compute_importances def fit(self, X, Y): - + num_features = X.shape[1] + max_features = float(self.max_features) * (np.log(num_features) + 1) + max_features = min(0.5, max_features) self.estimator = sklearn.ensemble.ExtraTreesClassifier( n_estimators=self.n_estimators, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, - max_features=self.max_features, max_leaf_nodes=self.max_leaf_nodes, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, random_state=self.random_state, min_density=self.min_density, compute_importances=self.compute_importances @@ -115,8 +114,10 @@ def get_hyperparameter_search_space(): "n_estimators", 10, 100, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") + #max_features = UniformFloatHyperparameter( + # "max_features", 0.01, 0.5, default=0.1) max_features = UniformFloatHyperparameter( - "max_features", 0.01, 0.5, default=0.1) + "max_features", 0.5, 5, default=1) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index cf70ac4272..fbb0e38413 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -47,9 +47,7 @@ def __init__(self, learning_rate, n_estimators, subsample, raise ValueError("'max_features' should be a float: %s" % max_features) self.max_features = float(max_features) - if self.max_features > 1: - raise ValueError("'max features' in should be < 1: %f" % - self.max_features) + self.loss = loss self.warm_start = warm_start self.init = init @@ -58,13 +56,16 @@ def __init__(self, learning_rate, n_estimators, subsample, self.estimator = None def fit(self, X, Y): + num_features = X.shape[1] + max_features = float(self.max_features) * (np.log(num_features) + 1) + max_features = min(0.5, max_features) self.estimator = sklearn.ensemble.GradientBoostingClassifier( learning_rate=self.learning_rate, n_estimators=self.n_estimators, subsample=self.subsample, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, - max_features=self.max_features, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, loss=self.loss, max_depth=self.max_depth, @@ -123,8 +124,10 @@ def get_hyperparameter_search_space(): # Copied from random_forest.py n_estimators = UniformIntegerHyperparameter( name="n_estimators", lower=10, upper=100, default=10, log=False) + #max_features = UniformFloatHyperparameter( + # name="max_features", lower=0.01, upper=0.5, default=0.1) max_features = UniformFloatHyperparameter( - name="max_features", lower=0.01, upper=0.5, default=0.1) + "max_features", 0.5, 5, default=1) max_depth = UniformIntegerHyperparameter( name="max_depth", lower=1, upper=10, default=3) min_samples_split = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 7594ba8365..9b921af139 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -34,7 +34,9 @@ def fit(self, X, Y): self.min_samples_split = int(self.min_samples_split) self.min_samples_leaf = int(self.min_samples_leaf) if self.max_features not in ("sqrt", "log2", "auto"): - self.max_features = float(self.max_features) + num_features = X.shape[1] + max_features = float(self.max_features) * (np.log(num_features) + 1) + max_features = min(0.5, max_features) if self.bootstrap == "True": self.bootstrap = True else: @@ -45,7 +47,7 @@ def fit(self, X, Y): self.estimator = sklearn.ensemble.RandomForestClassifier( n_estimators=self.n_estimators, criterion=self.criterion, - max_features=self.max_features, + max_features=max_features, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, @@ -89,8 +91,10 @@ def get_hyperparameter_search_space(): "n_estimators", 10, 100, default=10) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") + #max_features = UniformFloatHyperparameter( + # "max_features", 0.01, 0.5, default=0.2) max_features = UniformFloatHyperparameter( - "max_features", 0.01, 0.5, default=0.2) + "max_features", 0.5, 5, default=1) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2) diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 5aa2fbaafd..b68a9e527f 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -27,8 +27,6 @@ def __init__(self, n_estimators, criterion, max_features, raise ValueError("'max_features' should be a float: %s" % str(max_features)) self.max_features = float(max_features) - if self.max_features > 1: - raise ValueError("'max_features' > 1: %s" % str(max_features)) self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) if self.max_leaf_nodes_or_max_depth == "max_depth": @@ -60,10 +58,17 @@ def __init__(self, n_estimators, criterion, max_features, self.estimator = None def fit(self, X, Y): + num_features = X.shape[1] + max_features = float(self.max_features) * (np.log(num_features) + 1) + max_features = min(0.5, max_features) + print max_features + import sys + sys.stdout.flush() + self.estimator = sklearn.ensemble.RandomForestRegressor( n_estimators=self.n_estimators, criterion=self.criterion, - max_features=self.max_features, + max_features=max_features, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, @@ -101,7 +106,7 @@ def get_hyperparameter_search_space(): n_estimators = UniformIntegerHyperparameter( name="n_estimators", lower=10, upper=100, default=10, log=False) max_features = UniformFloatHyperparameter( - name="max_features", lower=0.01, upper=0.5, default=0.1) + "max_features", 0.5, 5, default=1) max_depth = UnParametrizedHyperparameter("max_depth", "None") min_samples_split = UniformIntegerHyperparameter( name="min_samples_split", lower=2, upper=20, default=2, log=False) diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/AutoSklearn/implementations/OneHotEncoder.py index 3c11346bd3..43d7d8bc2a 100644 --- a/AutoSklearn/implementations/OneHotEncoder.py +++ b/AutoSklearn/implementations/OneHotEncoder.py @@ -6,9 +6,9 @@ from sklearn.utils import check_arrays from sklearn.utils import atleast2d_or_csc, safe_asarray -zip = six.moves.zip -map = six.moves.map -range = six.moves.range +#zip = six.moves.zip +#map = six.moves.map +#range = six.moves.range def _transform_selected(X, transform, selected="all", copy=True): diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index 5eb133b19a..7dc0d78d98 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -12,5 +12,5 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(ExtraTreesClassifier) - self.assertAlmostEqual(0.97999999999999998, + self.assertAlmostEqual(0.93999999999999995, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index 28e55cd918..dfc9e5fafa 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -10,5 +10,5 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(RandomForest, dataset='iris') - self.assertAlmostEqual(0.92, + self.assertAlmostEqual(0.93999999999999995, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index 9932f9213c..c895bd2550 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -12,5 +12,5 @@ def test_default_configuration(self): predictions, targets = _test_regressor(RandomForest, dataset='diabetes') - self.assertAlmostEqual(0.30805962106685625, + self.assertAlmostEqual(0.39646965538696599, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) \ No newline at end of file diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index cdacd9c202..45dc8fce11 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -49,7 +49,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.30805962106685625, r2_score) + self.assertAlmostEqual(0.39525804491127225, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) From 02cba4af7865ec8e1c727257888b698c5e6c0c7a Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Sat, 10 Jan 2015 19:13:00 +0100 Subject: [PATCH 093/352] use our own forest everywhere --- .../components/classification/extra_trees.py | 21 +++++++++++++------ .../classification/random_forest.py | 21 ++++++++++++++----- .../components/regression/random_forest.py | 21 +++++++++++++------ 3 files changed, 46 insertions(+), 17 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 89270d199e..2e6bb04ea5 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -8,7 +7,8 @@ from HPOlibConfigSpace.conditions import EqualsCondition from ..classification_base import AutoSklearnClassificationAlgorithm - +# get our own forests to replace the sklearn ones +import ..implementations.forest as forest class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): @@ -18,6 +18,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, oob_score=False, n_jobs=1, random_state=None, verbose=0): self.n_estimators = int(n_estimators) + self.estimator_increment = 10 if criterion not in ("gini", "entropy"): raise ValueError("'criterion' is not in ('gini', 'entropy'): " "%s" % criterion) @@ -61,15 +62,23 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, def fit(self, X, Y): - self.estimator = sklearn.ensemble.ExtraTreesClassifier( - n_estimators=self.n_estimators, criterion=self.criterion, + self.estimator = forest.ExtraTreesClassifier( + n_estimators=self.estimator_increment, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, max_features=self.max_features, max_leaf_nodes=self.max_leaf_nodes, oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, - random_state=self.random_state + random_state=self.random_state, + warm_state = True ) - return self.estimator.fit(X, Y) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.estimator.estimators_) < self.n_estimators: + tmp = self.estimator.copy() + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y) + self.estimator = tmp + return self.estimator def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 7594ba8365..4c2558ab5c 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -7,12 +6,15 @@ UnParametrizedHyperparameter from ..classification_base import AutoSklearnClassificationAlgorithm +# get our own forests to replace the sklearn ones +import ..implementations.forest as forest class RandomForest(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, bootstrap, max_leaf_nodes, random_state=None, n_jobs=1): self.n_estimators = n_estimators + self.estimator_increment = 10 self.criterion = criterion self.max_features = max_features self.max_depth = max_depth @@ -42,8 +44,9 @@ def fit(self, X, Y): if self.max_leaf_nodes == "None": self.max_leaf_nodes = None - self.estimator = sklearn.ensemble.RandomForestClassifier( - n_estimators=self.n_estimators, + # initial fit of only increment trees + self.estimator = forest.RandomForestClassifier( + n_estimators=self.estimator_increment, criterion=self.criterion, max_features=self.max_features, max_depth=self.max_depth, @@ -52,8 +55,16 @@ def fit(self, X, Y): bootstrap=self.bootstrap, max_leaf_nodes=self.max_leaf_nodes, random_state=self.random_state, - n_jobs=self.n_jobs) - return self.estimator.fit(X, Y) + n_jobs=self.n_jobs, + warm_start=True) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.estimator.estimators_) < self.n_estimators: + tmp = self.estimator.copy() + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y) + self.estimator = tmp + return self.estimator def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 5aa2fbaafd..ac505d9ad1 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -7,7 +6,8 @@ UnParametrizedHyperparameter, Constant from ..regression_base import AutoSklearnRegressionAlgorithm - +# get our own forests to replace the sklearn ones +import ..implementations.forest as forest class RandomForest(AutoSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, max_features, @@ -17,6 +17,7 @@ def __init__(self, n_estimators, criterion, max_features, max_leaf_nodes=None, random_state=None, n_jobs=1): self.n_estimators = int(n_estimators) + self.estimator_increment = 10 if criterion in ("mse",): self.criterion = criterion else: @@ -60,8 +61,8 @@ def __init__(self, n_estimators, criterion, max_features, self.estimator = None def fit(self, X, Y): - self.estimator = sklearn.ensemble.RandomForestRegressor( - n_estimators=self.n_estimators, + self.estimator = forest.RandomForestRegressor( + n_estimators=self.estimator_increment, criterion=self.criterion, max_features=self.max_features, max_depth=self.max_depth, @@ -70,8 +71,16 @@ def fit(self, X, Y): bootstrap=self.bootstrap, max_leaf_nodes=self.max_leaf_nodes, random_state=self.random_state, - n_jobs=self.n_jobs) - return self.estimator.fit(X, Y) + n_jobs=self.n_jobs, + warm_start=True) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.estimator.estimators_) < self.n_estimators: + tmp = self.estimator.copy() + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y) + self.estimator = tmp + return self.estimator def predict(self, X): if self.estimator is None: From be9b5d9da7ac0b4eae15e0d6097e7d2a08b564e8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sun, 11 Jan 2015 17:45:28 +0100 Subject: [PATCH 094/352] Revert to numpy 1.9.0 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f7834db653..15eb7532e5 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["numpy==1.9.1", + install_requires=["numpy==1.9.0", "scipy==0.14.0", "scikit-learn==0.15.2", "nose", From c94e251303067f2bf0ed647108a8f70b58d1006a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sun, 11 Jan 2015 17:46:05 +0100 Subject: [PATCH 095/352] Limit max_features to 0.5, set n_estimators to a constant value of 100 --- AutoSklearn/components/classification/extra_trees.py | 8 +++++--- .../components/classification/gradient_boosting.py | 7 ++++--- AutoSklearn/components/classification/random_forest.py | 8 +++++--- AutoSklearn/components/regression/random_forest.py | 6 ++++-- tests/components/classification/test_extra_trees.py | 2 +- tests/components/classification/test_gradient_boosting.py | 2 +- tests/components/classification/test_random_forest.py | 2 +- tests/components/regression/test_random_forests.py | 2 +- tests/test_autosklearn.py | 2 +- tests/test_autosklearn_regression.py | 2 +- 10 files changed, 24 insertions(+), 17 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 21608024bc..1235e76e03 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -3,7 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter + UnParametrizedHyperparameter, Constant from HPOlibConfigSpace.conditions import EqualsCondition from ..classification_base import AutoSklearnClassificationAlgorithm @@ -60,6 +60,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, def fit(self, X, Y): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + max_features = min(0.5, max_features) self.estimator = forest.ExtraTreesClassifier( n_estimators=0, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, @@ -115,8 +116,9 @@ def get_hyperparameter_search_space(): "bootstrap", ["True", "False"], default="False") # Copied from random_forest.py - n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 100, default=10) + #n_estimators = UniformIntegerHyperparameter( + # "n_estimators", 10, 100, default=10) + n_estimators = Constant("n_estimators", 100) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") #max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 49115c7b24..a8b59c7141 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter + UnParametrizedHyperparameter, Constant from ..classification_base import AutoSklearnClassificationAlgorithm @@ -129,8 +129,9 @@ def get_hyperparameter_search_space(): # Copied from random_forest.py - n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=10, upper=100, default=10, log=False) + #n_estimators = UniformIntegerHyperparameter( + # name="n_estimators", lower=10, upper=100, default=10, log=False) + n_estimators = Constant("n_estimators", 100) #max_features = UniformFloatHyperparameter( # name="max_features", lower=0.01, upper=0.5, default=0.1) max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 9322e8b4ca..5ff4e170c8 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -3,7 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter + UnParametrizedHyperparameter, Constant from ..classification_base import AutoSklearnClassificationAlgorithm # get our own forests to replace the sklearn ones @@ -38,6 +38,7 @@ def fit(self, X, Y): if self.max_features not in ("sqrt", "log2", "auto"): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + max_features = min(0.5, max_features) if self.bootstrap == "True": self.bootstrap = True else: @@ -97,8 +98,9 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(): - n_estimators = UniformIntegerHyperparameter( - "n_estimators", 10, 100, default=10) + #n_estimators = UniformIntegerHyperparameter( + # "n_estimators", 10, 100, default=10) + n_estimators = Constant("n_estimators", 100) criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") #max_features = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 4b27fab8ed..4faab47520 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -61,6 +61,7 @@ def __init__(self, n_estimators, criterion, max_features, def fit(self, X, Y): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + max_features = min(0.5, max_features) self.estimator = forest.RandomForestRegressor( n_estimators=0, criterion=self.criterion, @@ -107,8 +108,9 @@ def get_properties(): def get_hyperparameter_search_space(): criterion = Constant(name="criterion", value="mse") # Copied from classification/random_forest.py - n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=10, upper=100, default=10, log=False) + #n_estimators = UniformIntegerHyperparameter( + # name="n_estimators", lower=10, upper=100, default=10, log=False) + n_estimators = Constant("n_estimators", 100) max_features = UniformFloatHyperparameter( "max_features", 0.5, 5, default=1) max_depth = UnParametrizedHyperparameter("max_depth", "None") diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index b6a620ff4a..98999577c3 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -12,5 +12,5 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(ExtraTreesClassifier) - self.assertAlmostEqual(0.93999999999999995, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index c7af6b68a4..fbf5ea83c9 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -12,5 +12,5 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(GradientBoostingClassifier) - self.assertAlmostEqual(0.95999999999999996, + self.assertAlmostEqual(0.93999999999999995, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index dfc9e5fafa..d1a7fb12dd 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -10,5 +10,5 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(RandomForest, dataset='iris') - self.assertAlmostEqual(0.93999999999999995, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index 78107eb171..04203a33bf 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -12,5 +12,5 @@ def test_default_configuration(self): predictions, targets = _test_regressor(RandomForest, dataset='diabetes') - self.assertAlmostEqual(0.33509113544178348, + self.assertAlmostEqual(0.4343284900648805, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index f64715d7ee..5ad3734d5f 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -46,7 +46,7 @@ def test_default_configuration(self): auto = AutoSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) - self.assertAlmostEqual(0.94, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index 1b614fd206..da5ff3699e 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -49,7 +49,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.39525804491127225, r2_score) + self.assertAlmostEqual(0.42626559843685119, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) From 6466c983cec2df0d6b15677463be614a2fff60f5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 12 Jan 2015 16:01:47 +0100 Subject: [PATCH 096/352] Fix tree-based max_features calculation --- AutoSklearn/components/classification/extra_trees.py | 3 ++- AutoSklearn/components/classification/gradient_boosting.py | 3 ++- AutoSklearn/components/classification/random_forest.py | 3 ++- AutoSklearn/components/regression/random_forest.py | 3 ++- tests/components/regression/test_random_forests.py | 2 +- tests/test_autosklearn_regression.py | 2 +- 6 files changed, 10 insertions(+), 6 deletions(-) diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 1235e76e03..f4b1608f1b 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -60,7 +60,8 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, def fit(self, X, Y): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - max_features = min(0.5, max_features) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) self.estimator = forest.ExtraTreesClassifier( n_estimators=0, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index a8b59c7141..376df4ab7f 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -58,7 +58,8 @@ def __init__(self, learning_rate, n_estimators, subsample, def fit(self, X, Y): num_features = X.shape[1] max_features = float(self.max_features) * (np.log(num_features) + 1) - max_features = min(0.5, max_features) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) self.estimator = sklearn.ensemble.GradientBoostingClassifier( learning_rate=self.learning_rate, n_estimators=0, diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 5ff4e170c8..7f97d4e325 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -38,7 +38,8 @@ def fit(self, X, Y): if self.max_features not in ("sqrt", "log2", "auto"): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - max_features = min(0.5, max_features) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) if self.bootstrap == "True": self.bootstrap = True else: diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 4faab47520..37c794d35a 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -61,7 +61,8 @@ def __init__(self, n_estimators, criterion, max_features, def fit(self, X, Y): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - max_features = min(0.5, max_features) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) self.estimator = forest.RandomForestRegressor( n_estimators=0, criterion=self.criterion, diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index 04203a33bf..b2b8372e7a 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -12,5 +12,5 @@ def test_default_configuration(self): predictions, targets = _test_regressor(RandomForest, dataset='diabetes') - self.assertAlmostEqual(0.4343284900648805, + self.assertAlmostEqual(0.41960285574345746, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index da5ff3699e..94bd300237 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -49,7 +49,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.42626559843685119, r2_score) + self.assertAlmostEqual(0.41855369945075482, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) From a8da9b36a8c72e34f162909bc1fade85ad0df4d5 Mon Sep 17 00:00:00 2001 From: kleinaa Date: Mon, 12 Jan 2015 16:55:57 +0100 Subject: [PATCH 097/352] fix bug with multilabel classification in random forest code --- AutoSklearn/implementations/forest.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/AutoSklearn/implementations/forest.py b/AutoSklearn/implementations/forest.py index a10fe4fa40..dc27d8dbe9 100644 --- a/AutoSklearn/implementations/forest.py +++ b/AutoSklearn/implementations/forest.py @@ -348,14 +348,19 @@ def predict_proba(self, X): # Assign chunk of trees to jobs n_jobs, n_trees, starts = _partition_estimators(self) - + + # Bugfix for _parallel_predict_proba which expects a list for multi-label and integer for single-label problems + if not isinstance(self.n_classes_, int) and len(self.n_classes_) == 1: + n_classes_ = self.n_classes_[0] + else: + n_classes_ = self.n_classes_ # Parallel loop all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose, backend="threading")( delayed(_parallel_predict_proba)( self.estimators_[starts[i]:starts[i + 1]], X, - self.n_classes_, + n_classes_, self.n_outputs_) for i in range(n_jobs)) From 3b7f78521fc93eb7775187829eee4f9d60f3f16b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 12 Jan 2015 21:42:26 +0100 Subject: [PATCH 098/352] fix max_features in gradient boosting --- AutoSklearn/components/classification/gradient_boosting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 376df4ab7f..fc174a429e 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -57,7 +57,7 @@ def __init__(self, learning_rate, n_estimators, subsample, def fit(self, X, Y): num_features = X.shape[1] - max_features = float(self.max_features) * (np.log(num_features) + 1) + max_features = int(float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) self.estimator = sklearn.ensemble.GradientBoostingClassifier( From 5f57d21ec8a913488dca2137ae0f73fff218d6e9 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 12 Jan 2015 21:42:54 +0100 Subject: [PATCH 099/352] add gradient boosting regression --- .../regression/gradient_boosting.py | 152 ++++++++++++++++++ .../regression/test_gradient_boosting.py | 16 ++ tests/test_autosklearn_regression.py | 2 +- 3 files changed, 169 insertions(+), 1 deletion(-) create mode 100644 AutoSklearn/components/regression/gradient_boosting.py create mode 100644 tests/components/regression/test_gradient_boosting.py diff --git a/AutoSklearn/components/regression/gradient_boosting.py b/AutoSklearn/components/regression/gradient_boosting.py new file mode 100644 index 0000000000..07bbf69a83 --- /dev/null +++ b/AutoSklearn/components/regression/gradient_boosting.py @@ -0,0 +1,152 @@ +import numpy as np +import sklearn.ensemble + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.conditions import InCondition +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..regression_base import AutoSklearnRegressionAlgorithm + + +class GradientBoosting(AutoSklearnRegressionAlgorithm): + def __init__(self, + loss, learning_rate, subsample, min_samples_split, + min_samples_leaf, max_depth, max_features, alpha=0.9, + max_leaf_nodes=None, estimator_increment=10, + max_leaf_nodes_or_max_depth="max_depth", + n_estimators=100, init=None, random_state=None, verbose=0): + + self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) + + if self.max_leaf_nodes_or_max_depth == "max_depth": + if max_depth == 'None': + self.max_depth = None + else: + self.max_depth = int(max_depth) + self.max_leaf_nodes = None + elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": + self.max_depth = None + if max_leaf_nodes == 'None': + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + else: + raise ValueError("max_leaf_nodes_or_max_depth sould be in " + "('max_leaf_nodes', 'max_depth'): %s" % + self.max_leaf_nodes_or_max_depth) + + if loss in ("ls", "lad", "huber", "quantile"): + self.loss = loss + else: + raise ValueError("'loss' should be in ('ls', 'lad', 'huber', " + "'quantile'), but is %s" % str(loss)) + self.learning_rate = float(learning_rate) + self.subsample = float(subsample) + self.min_samples_split = int(float(min_samples_split)) + self.min_samples_leaf = int(float(min_samples_leaf)) + self.max_depth = int(float(max_depth)) + + if self.loss in ('huber', 'quantile'): + self.alpha = float(alpha) + else: + self.alpha = 0.9 # default value + + self.n_estimators = n_estimators + + self.estimator_increment = int(estimator_increment) + self.init = init + + # We handle this later + self.max_features = float(max_features) + + # Defaults + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + num_features = X.shape[1] + max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + + self.estimator = sklearn.ensemble.GradientBoostingRegressor( + max_leaf_nodes=self.max_leaf_nodes, + loss=self.loss, + learning_rate=self.learning_rate, + n_estimators=0, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_depth=self.max_depth, + init=self.init, + max_features=max_features, + alpha=self.alpha, + warm_start=True, + random_state=self.random_state, + verbose=self.verbose + ) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.estimator.estimators_) < self.n_estimators: + tmp = self.estimator # TODO I think we need to copy here! + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y) + self.estimator = tmp + return self.estimator.fit(X, Y) + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'GB', + 'name': 'Gradient Boosting Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + + loss = CategoricalHyperparameter( + name="loss", choices=["ls", "lad"], default='ls') #, "huber", "quantile"], default='ls') + + learning_rate = UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) + subsample = UniformFloatHyperparameter( + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False) + + n_estimators = Constant("n_estimators", 100) + + max_features = UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1) + max_depth = UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3) + min_samples_split = UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False) + min_samples_leaf = UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False) + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(loss) + cs.add_hyperparameter(learning_rate) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(subsample) + return cs \ No newline at end of file diff --git a/tests/components/regression/test_gradient_boosting.py b/tests/components/regression/test_gradient_boosting.py new file mode 100644 index 0000000000..1c9af229d8 --- /dev/null +++ b/tests/components/regression/test_gradient_boosting.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.regression.gradient_boosting import GradientBoosting +from AutoSklearn.util import _test_regressor + +import sklearn.metrics + + +class GradientBoostingComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + + predictions, targets = _test_regressor(GradientBoosting, + dataset='diabetes') + self.assertAlmostEqual(0.39056015252360077, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index 94bd300237..998b7b3f6d 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -58,7 +58,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(18, len(hyperparameters)) + self.assertEqual(26, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 2dc218140e0f51eba0fe6aea363a7c1a118edda7 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 12 Jan 2015 23:22:03 +0100 Subject: [PATCH 100/352] fix train method (return self not self.estimator.fit()) --- AutoSklearn/components/classification/extra_trees.py | 2 +- AutoSklearn/components/classification/gradient_boosting.py | 3 ++- .../components/classification/k_nearest_neighbors.py | 3 ++- AutoSklearn/components/classification/liblinear.py | 3 ++- AutoSklearn/components/classification/libsvm_svc.py | 7 +++++-- AutoSklearn/components/classification/random_forest.py | 2 +- AutoSklearn/components/classification/sgd.py | 3 ++- AutoSklearn/components/regression/gradient_boosting.py | 2 +- AutoSklearn/components/regression/random_forest.py | 2 +- AutoSklearn/components/regression/ridge_regression.py | 4 ++-- .../regression/test_support_vector_regression.py | 1 + tests/test_autosklearn_regression.py | 4 ++-- 12 files changed, 22 insertions(+), 14 deletions(-) create mode 100644 tests/components/regression/test_support_vector_regression.py diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index f4b1608f1b..916843c50d 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -78,7 +78,7 @@ def fit(self, X, Y): tmp.n_estimators += self.estimator_increment tmp.fit(X, Y) self.estimator = tmp - return self.estimator + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index fc174a429e..400ce087da 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -82,7 +82,8 @@ def fit(self, X, Y): tmp.n_estimators += self.estimator_increment tmp.fit(X, Y) self.estimator = tmp - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/AutoSklearn/components/classification/k_nearest_neighbors.py index b183e2fc4f..b8b1b4695e 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/AutoSklearn/components/classification/k_nearest_neighbors.py @@ -30,7 +30,8 @@ def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, def fit(self, X, Y): self.estimator = \ sklearn.neighbors.KNeighborsClassifier() - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 1810cdb56f..845e4c10e1 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -45,7 +45,8 @@ def fit(self, X, Y): C=self.C, class_weight=self.class_weight, random_state=self.random_state) - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 269b4f684a..182f4f4837 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -53,7 +53,8 @@ def fit(self, X, Y): random_state=self.random_state, cache_size=2000, probability=True) - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: @@ -90,7 +91,9 @@ def get_hyperparameter_search_space(): C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default=1.0) # No linear kernel here, because we have liblinear - kernel = CategoricalHyperparameter("kernel", ["rbf", "poly", "sigmoid"]) + kernel = CategoricalHyperparameter(name="kernel", + choices=["rbf", "poly", "sigmoid"], + default="rbf") degree = UniformIntegerHyperparameter("degree", 1, 5, default=3) gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, log=True, default=0.1) diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index 7f97d4e325..e4ca54c71a 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -67,7 +67,7 @@ def fit(self, X, Y): tmp.n_estimators += self.estimator_increment tmp.fit(X, Y) self.estimator = tmp - return self.estimator + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index 66eece50f8..6fc8d48638 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -53,7 +53,8 @@ def fit(self, X, Y): power_t=self.power_t, shuffle=True, random_state=self.random_state) - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/regression/gradient_boosting.py b/AutoSklearn/components/regression/gradient_boosting.py index 07bbf69a83..9181b08ddd 100644 --- a/AutoSklearn/components/regression/gradient_boosting.py +++ b/AutoSklearn/components/regression/gradient_boosting.py @@ -95,7 +95,7 @@ def fit(self, X, Y): tmp.n_estimators += self.estimator_increment tmp.fit(X, Y) self.estimator = tmp - return self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 37c794d35a..5d85b01ae1 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -82,7 +82,7 @@ def fit(self, X, Y): tmp.n_estimators += self.estimator_increment tmp.fit(X, Y) self.estimator = tmp - return self.estimator + return self def predict(self, X): if self.estimator is None: diff --git a/AutoSklearn/components/regression/ridge_regression.py b/AutoSklearn/components/regression/ridge_regression.py index 71bff71849..8ba944a905 100644 --- a/AutoSklearn/components/regression/ridge_regression.py +++ b/AutoSklearn/components/regression/ridge_regression.py @@ -33,8 +33,8 @@ def fit(self, X, Y): max_iter=self.max_iter, tol=self.tol, solver=self.solver) - - return self.estimator.fit(X, Y) + self.estimator.fit(X, Y) + return self def predict(self, X): if self.estimator is None: diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py new file mode 100644 index 0000000000..163eead837 --- /dev/null +++ b/tests/components/regression/test_support_vector_regression.py @@ -0,0 +1 @@ +__author__ = 'eggenspk' diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index 998b7b3f6d..0b82d0a5d1 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -25,7 +25,7 @@ class TestAutoSKlearnRegressor(unittest.TestCase): # TODO: test for both possible ways to initialize AutoSklearn # parameters and other... - def test_find_classifiers(self): + def test_find_regressors(self): regressors = regression_components._regressors self.assertGreaterEqual(len(regressors), 1) for key in regressors: @@ -58,7 +58,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(26, len(hyperparameters)) + self.assertEqual(35, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 44cb2c59905e6358a75097fe0c24dc3fd11378a8 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 12 Jan 2015 23:24:39 +0100 Subject: [PATCH 101/352] add support vector regression --- .../regression/support_vector_regression.py | 141 ++++++++++++++++++ misc/regressors.csv | 4 +- .../test_support_vector_regression.py | 21 ++- 3 files changed, 163 insertions(+), 3 deletions(-) create mode 100644 AutoSklearn/components/regression/support_vector_regression.py diff --git a/AutoSklearn/components/regression/support_vector_regression.py b/AutoSklearn/components/regression/support_vector_regression.py new file mode 100644 index 0000000000..273910ce2c --- /dev/null +++ b/AutoSklearn/components/regression/support_vector_regression.py @@ -0,0 +1,141 @@ +import numpy as np +import sklearn.svm + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction, \ + InCondition +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter + + +from ..regression_base import AutoSklearnRegressionAlgorithm + + +class SupportVectorRegression(AutoSklearnRegressionAlgorithm): + def __init__(self, kernel, gamma, C, epsilon, degree, + coef0, tol, shrinking, probability=False, + cache_size=2000, verbose=False, + max_iter=-1, random_state=None + ): + + if kernel in ('linear', 'poly', 'rbf', 'sigmoid'): + self.kernel = kernel + else: + raise ValueError("'kernel' must be in ('linear', 'poly', 'rbf', " + "'sigmoid'), but is %s" % str(kernel)) + self.gamma = float(gamma) + self.C = float(C) + self.epsilon = epsilon + self.degree = int(float(degree)) + self.coef0 = float(coef0) + self.tol = float(tol) + + if shrinking == "True": + self.shrinking = True + elif shrinking == "False": + self.shrinking = False + else: + raise ValueError("'shrinking' must be in ('True', 'False'), " + "but is %s" % str(shrinking)) + + # We don't assume any hyperparameters here + self.probability = probability + self.cache_size = cache_size + self.verbose = verbose + self.max_iter = int(float(max_iter)) + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + + self.estimator = sklearn.svm.SVR( + kernel=self.kernel, + degree=self.degree, + gamma=self.gamma, + coef0=self.coef0, + tol=self.tol, + C=self.C, + epsilon=self.epsilon, + shrinking=self.shrinking, + probability=self.probability, + cache_size=self.cache_size, + verbose=self.verbose, + max_iter=self.max_iter, + random_state=self.random_state + ) + + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'SVR', + 'name': 'Support Vector Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': True, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(): + # Copied from libsvm_c + C = UniformFloatHyperparameter( + name="C", lower=0.03125, upper=32768, log=True, default=1.0) + + kernel = CategoricalHyperparameter( + name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'], + default="rbf") + degree = UniformIntegerHyperparameter( + name="degree", lower=1, upper=5, default=3) + + # Changed the gamma value to 0.0 (is 0.1 for classification) + gamma = UniformFloatHyperparameter( + name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1) + + # TODO this is totally ad-hoc + coef0 = UniformFloatHyperparameter( + name="coef0", lower=-1, upper=1, default=0) + # probability is no hyperparameter, but an argument to the SVM algo + shrinking = CategoricalHyperparameter( + name="shrinking", choices=["True", "False"], default="True") + tol = UniformFloatHyperparameter( + name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True) + max_iter = UnParametrizedHyperparameter("max_iter", -1) + + # Random Guess + epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001, + upper=1, default=0.1, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(C) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(shrinking) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(max_iter) + cs.add_hyperparameter(epsilon) + + degree_depends_on_kernel = InCondition(child=degree, parent=kernel, + values=('poly', 'rbf', 'sigmoid')) + gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel, + values=('poly', 'rbf')) + coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel, + values=('poly', 'sigmoid')) + cs.add_condition(degree_depends_on_kernel) + cs.add_condition(gamma_depends_on_kernel) + cs.add_condition(coef0_depends_on_kernel) + return cs diff --git a/misc/regressors.csv b/misc/regressors.csv index df7dbe7afb..db52d29cef 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -13,7 +13,7 @@ Preprocessing,,False,Preprocessin ,,, ,,, WeDoNotAddThis,,False,We already have this method -,,, +Support_vector_regression,,True,Check searchspace ,,, ,,, ,,,Can crash when there is no neighbour within the radius @@ -36,7 +36,7 @@ WeDoNotAddThis,,False,We a ,,, ,,, ,,, -,,, +Gradient_boosting,,True, ,,, ,,, WeDoNotAddThis,,False,We already have this method diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py index 163eead837..0dbf7c9170 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/tests/components/regression/test_support_vector_regression.py @@ -1 +1,20 @@ -__author__ = 'eggenspk' +import unittest + +from AutoSklearn.components.regression.support_vector_regression import SupportVectorRegression +from AutoSklearn.util import _test_regressor + + +import sklearn.metrics + + +class SupportVectorComponentTest(unittest.TestCase): + + def test_default_configuration(self): + + for i in range(10): + predictions, targets = _test_regressor(SupportVectorRegression, + dataset='boston') + print predictions + print targets + self.assertAlmostEqual(-0.070779979927571235, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) From 1acfaa1eaaa95233026ff07e2e11d463c0d8818b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 13 Jan 2015 00:47:27 +0100 Subject: [PATCH 102/352] Deactivate SVR --- .../components/regression/support_vector_regression.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/regression/support_vector_regression.py b/AutoSklearn/components/regression/support_vector_regression.py index 273910ce2c..ffe92de1fd 100644 --- a/AutoSklearn/components/regression/support_vector_regression.py +++ b/AutoSklearn/components/regression/support_vector_regression.py @@ -11,11 +11,11 @@ from ..regression_base import AutoSklearnRegressionAlgorithm - +# Something is wrong here... +""" class SupportVectorRegression(AutoSklearnRegressionAlgorithm): - def __init__(self, kernel, gamma, C, epsilon, degree, - coef0, tol, shrinking, probability=False, - cache_size=2000, verbose=False, + def __init__(self, kernel, C, epsilon, degree, coef0, tol, shrinking, + gamma=0.0, probability=False, cache_size=2000, verbose=False, max_iter=-1, random_state=None ): @@ -139,3 +139,4 @@ def get_hyperparameter_search_space(): cs.add_condition(gamma_depends_on_kernel) cs.add_condition(coef0_depends_on_kernel) return cs +""" From f190975cc517a6b1cb9015a921b44ffa4f4c1c3a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 13 Jan 2015 13:00:05 +0100 Subject: [PATCH 103/352] Forbid GradientBoostRegressor with Feature Learning --- AutoSklearn/autosklearn_regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py index 3137fe4c35..de110ea1aa 100644 --- a/AutoSklearn/autosklearn_regression.py +++ b/AutoSklearn/autosklearn_regression.py @@ -418,7 +418,7 @@ def get_hyperparameter_search_space(include_regressors=None, # long # Combinations of tree-based models with feature learning: - regressors_ = ["random_forest", ] + regressors_ = ["random_forest", "gradient_boosting"] feature_learning_ = ["kitchen_sinks", "sparse_filtering"] for c, f in product(regressors_, feature_learning_): From 110abeb097f747bd134639736aba30edcef4b408 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 14 Jan 2015 11:32:32 +0100 Subject: [PATCH 104/352] Deactivate support vector regression test --- tests/components/regression/test_support_vector_regression.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py index 0dbf7c9170..582de1d291 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/tests/components/regression/test_support_vector_regression.py @@ -1,5 +1,5 @@ import unittest - +""" from AutoSklearn.components.regression.support_vector_regression import SupportVectorRegression from AutoSklearn.util import _test_regressor @@ -18,3 +18,4 @@ def test_default_configuration(self): print targets self.assertAlmostEqual(-0.070779979927571235, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) +""" \ No newline at end of file From 8aa3170e7d24fb903c41cd41989e34d37851f5b8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 14 Jan 2015 11:39:34 +0100 Subject: [PATCH 105/352] Create a common base class for both classification and regression --- AutoSklearn/autosklearn_regression.py | 437 ------------------------ AutoSklearn/{autosklearn.py => base.py} | 343 ++++++------------- AutoSklearn/classification.py | 194 +++++++++++ AutoSklearn/regression.py | 225 ++++++++++++ tests/test_autosklearn.py | 9 +- tests/test_autosklearn_regression.py | 13 +- 6 files changed, 533 insertions(+), 688 deletions(-) delete mode 100644 AutoSklearn/autosklearn_regression.py rename AutoSklearn/{autosklearn.py => base.py} (52%) create mode 100644 AutoSklearn/classification.py create mode 100644 AutoSklearn/regression.py diff --git a/AutoSklearn/autosklearn_regression.py b/AutoSklearn/autosklearn_regression.py deleted file mode 100644 index de110ea1aa..0000000000 --- a/AutoSklearn/autosklearn_regression.py +++ /dev/null @@ -1,437 +0,0 @@ -from collections import defaultdict -import copy -from itertools import product - -import sklearn -if sklearn.__version__ != "0.15.2": - raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " - "you installed %s." % sklearn.__version__) - -from sklearn.base import BaseEstimator, RegressorMixin -from sklearn.pipeline import Pipeline -from sklearn.utils import check_random_state - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - InactiveHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction - -from . import components as components - - -class AutoSklearnRegressor(BaseEstimator, RegressorMixin): - """This class implements the regression task. - - It implements a pipeline, which includes one preprocessing step and one - regression algorithm. It can render a search space including all known - regression and preprocessing algorithms. - - Contrary to the sklearn API it is not possible to enumerate the - possible parameters in the __init__ function because we only know the - available regressors at runtime. For this reason the user must - specifiy the parameters by passing an instance of - HPOlibConfigSpace.configuration_space.Configuration. - - Parameters - ---------- - configuration : HPOlibConfigSpace.configuration_space.Configuration - The configuration to evaluate. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance - used by `np.random`. - - Attributes - ---------- - _estimator : The underlying scikit-learn regression model. This - variable is assigned after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. - - _preprocessor : The underlying scikit-learn preprocessing algorithm. This - variable is only assigned if a preprocessor is specified and - after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. - - See also - -------- - - References - ---------- - - Examples - -------- - - """ - def __init__(self, configuration, random_state=None): - - # TODO check sklearn version! - self.configuration = configuration - - cs = self.get_hyperparameter_search_space() - cs.check_configuration(configuration) - - self._pipeline = None - - if random_state is None: - self.random_state = check_random_state(1) - else: - self.random_state = check_random_state(random_state) - - def fit(self, X, Y, fit_params=None, init_params=None): - """Fit the selected algorithm to the training data. - - Parameters - ---------- - X : array-like or sparse, shape = (n_samples, n_features) - Training data. The preferred type of the matrix (dense or sparse) - depends on the regressor selected. - - y : array-like - Targets - - fit_params : dict - See the documentation of sklearn.pipeline.Pipeline for formatting - instructions. - - init_params : dict - Pass arguments to the constructors of single methods. To pass - arguments to only one of the methods (lets says the - OneHotEncoder), seperate the class name from the argument by a ':'. - - Returns - ------- - self : returns an instance of self. - - Raises - ------ - NoModelException - NoModelException is raised if fit() is called without specifying - a regression algorithm first. - """ - # TODO: perform input validation - # TODO: look if X.shape[0] == y.shape[0] - # TODO: check if the hyperparameters have been set... - # TODO: this is an example of the antipattern of not properly - # initializing a class in the init function! - # TODO: can this happen now that a configuration is specified at - # instantiation time - - steps = [] - init_params_per_method = defaultdict(dict) - if init_params is not None: - for init_param, value in init_params: - method, param = init_param.split(":") - init_params_per_method[method][param] = value - - preprocessors_names = ["imputation", "rescaling", - self.configuration['preprocessor'].value] - - for preproc_name in preprocessors_names: - if preproc_name != "None": - preproc_params = {} - - for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name \ - .startswith(preproc_name): - continue - if isinstance(instantiated_hyperparameter, - InactiveHyperparameter): - continue - - name_ = instantiated_hyperparameter.hyperparameter.name. \ - split(":")[1] - preproc_params[name_] = instantiated_hyperparameter.value - - preproc_params.update(init_params_per_method[preproc_name]) - preprocessor_object = components.preprocessing_components. \ - _preprocessors[preproc_name](random_state=self.random_state, - **preproc_params) - steps.append((preproc_name, preprocessor_object)) - - # Extract Hyperparameters from the configuration object - regressor_name = self.configuration["regressor"].value - regressor_parameters = {} - for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name.startswith( - regressor_name): - continue - if isinstance(instantiated_hyperparameter, InactiveHyperparameter): - continue - - name_ = instantiated_hyperparameter.hyperparameter.name.\ - split(":")[1] - regressor_parameters[name_] = instantiated_hyperparameter.value - - regressor_parameters.update(init_params_per_method[regressor_name]) - regressor_object = components.regression_components._regressors\ - [regressor_name](random_state=self.random_state, - **regressor_parameters) - steps.append((regressor_name, regressor_object)) - - self._validate_input_X(X) - self._validate_input_Y(Y) - - self._pipeline = Pipeline(steps) - if fit_params is None or not isinstance(fit_params, dict): - fit_params = dict() - else: - fit_params = {key.replace(":", "__"): value for key, value in - fit_params.items()} - self._pipeline.fit(X, Y, **fit_params) - return self - - def predict(self, X): - """Predict the classes using the selected model. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - Returns the predicted values""" - # TODO check if fit() was called before... - self._validate_input_X(X) - return self._pipeline.predict(X) - - def predict_proba(self, X): - """predict_proba. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - """ - self._validate_input_X(X) - - Xt = X - for name, transform in self._pipeline.steps[:-1]: - Xt = transform.transform(Xt) - return self._pipeline.steps[-1][-1].predict_proba(Xt) - - def _validate_input_X(self, X): - # TODO: think of all possible states which can occur and how to - # handle them - pass - - def _validate_input_Y(self, Y): - pass - - def add_model_class(self, model): - """ - Raises - ------ - NotImplementedError - """ - raise NotImplementedError() - - @staticmethod - def get_hyperparameter_search_space(include_regressors=None, - exclude_regressors=None, - include_preprocessors=None, - exclude_preprocessors=None, - sparse=False): - # TODO: We assume that there exists only a single regression task. which - # is different to classification where we have multiclass, - # multilabel, etc - """Return the configuration space for the CASH problem. - - Parameters - ---------- - include_regressors : list of str - If include_regressors is given, only the regressors specified - are used. Specify them by their module name; e.g., to include - only the SVM use :python:`include_regressors=['svr']`. - Cannot be used together with :python:`exclude_regressors`. - - exclude_regressors : list of str - If exclude_regressors is given, only the regressors specified - are used. Specify them by their module name; e.g., to include - all regressors except the SVM use - :python:`exclude_regressors=['svr']`. - Cannot be used together with :python:`include_regressors`. - - include_preprocessors : list of str - If include_preprocessors is given, only the preprocessors specified - are used. Specify them by their module name; e.g., to include - only the PCA use :python:`include_preprocessors=['pca']`. - Cannot be used together with :python:`exclude_preprocessors`. - - exclude_preprocessors : list of str - If include_preprocessors is given, only the preprocessors specified - are used. Specify them by their module name; e.g., to include - all preprocessors except the PCA use - :python:`exclude_preprocessors=['pca']`. - Cannot be used together with :python:`include_preprocessors`. - - Returns - ------- - cs : HPOlibConfigSpace.configuration_space.Configuration - The configuration space describing the AutoSklearnClassifier. - - """ - if include_regressors is not None and exclude_regressors is not None: - raise ValueError("The arguments include_regressors and " - "exclude_regressors cannot be used together.") - - if include_preprocessors is not None and exclude_preprocessors is not None: - raise ValueError("The arguments include_preprocessors and " - "exclude_preprocessors cannot be used together.") - - always_active = ["imputation", "rescaling"] - - cs = ConfigurationSpace() - - available_regressors = \ - components.regression_components._regressors - available_preprocessors = \ - components.preprocessing_components._preprocessors - - names = [] - names_ = [] - for name in available_regressors: - if name in always_active: - names_.append(name) - continue - elif include_regressors is not None and \ - name not in include_regressors: - continue - elif exclude_regressors is not None and \ - name in exclude_regressors: - continue - - if sparse is True and available_regressors[name]. \ - get_properties()['handles_sparse'] is False: - continue - names.append(name) - - if len(names + names_) == 0: - raise ValueError("No regressor to build a configuration space " - "for...") - - regressor = CategoricalHyperparameter("regressor", names, - default='random_forest' if 'random_forest' in names else names[0]) - cs.add_hyperparameter(regressor) - for name in names + names_: - - # We have to retrieve the configuration space every time because - # we change the objects it returns. If we reused it, we could not - # retrieve the conditions further down - # TODO implement copy for hyperparameters and forbidden and - # conditions! - - regressor_configuration_space = available_regressors[name]. \ - get_hyperparameter_search_space() - for parameter in regressor_configuration_space.get_hyperparameters(): - new_parameter = copy.deepcopy(parameter) - new_parameter.name = "%s:%s" % (name, new_parameter.name) - cs.add_hyperparameter(new_parameter) - # We must only add a condition if the hyperparameter is not - # conditional on something else - if len(regressor_configuration_space. - get_parents_of(parameter)) == 0: - condition = EqualsCondition(new_parameter, regressor, name) - cs.add_condition(condition) - - for condition in available_regressors[name]. \ - get_hyperparameter_search_space().get_conditions(): - dlcs = condition.get_descendant_literal_conditions() - for dlc in dlcs: - if not dlc.child.name.startswith(name): - dlc.child.name = "%s:%s" % (name, dlc.child.name) - if not dlc.parent.name.startswith(name): - dlc.parent.name = "%s:%s" % (name, dlc.parent.name) - cs.add_condition(condition) - - for forbidden_clause in available_regressors[name]. \ - get_hyperparameter_search_space().forbidden_clauses: - dlcs = forbidden_clause.get_descendant_literal_clauses() - for dlc in dlcs: - if not dlc.hyperparameter.name.startswith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) - cs.add_forbidden_clause(forbidden_clause) - - names = [] - names_ = [] - for name in available_preprocessors: - if name in always_active: - names_.append(name) - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - if sparse is True and available_preprocessors[name]. \ - get_properties()['handles_sparse'] is False: - continue - - names.append(name) - - preprocessor = CategoricalHyperparameter("preprocessor", - ["None"] + names, - default='None') - cs.add_hyperparameter(preprocessor) - for name in names + names_: - preprocessor_configuration_space = available_preprocessors[name]. \ - get_hyperparameter_search_space() - for parameter in preprocessor_configuration_space.get_hyperparameters(): - new_parameter = copy.deepcopy(parameter) - new_parameter.name = "%s:%s" % (name, new_parameter.name) - cs.add_hyperparameter(new_parameter) - # We must only add a condition if the hyperparameter is not - # conditional on something else - if len(preprocessor_configuration_space. - get_parents_of(parameter)) == 0 and name not in always_active: - condition = EqualsCondition(new_parameter, preprocessor, name) - cs.add_condition(condition) - - for condition in available_preprocessors[name]. \ - get_hyperparameter_search_space().get_conditions(): - dlcs = condition.get_descendent_literal_conditions() - for dlc in dlcs: - if not dlc.child.name.startswith(name): - dlc.child.name = "%s:%s" % (name, dlc.child.name) - if not dlc.parent.name.startswith(name): - dlc.parent.name = "%s:%s" % (name, dlc.parent.name) - cs.add_condition(condition) - - for forbidden_clause in available_preprocessors[name]. \ - get_hyperparameter_search_space().forbidden_clauses: - dlcs = forbidden_clause.get_descendant_literal_clauses() - for dlc in dlcs: - if not dlc.hyperparameter.startwith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) - cs.add_forbidden_clause(forbidden_clause) - - # And now add forbidden parameter configurations which would take too - # long - - # Combinations of tree-based models with feature learning: - regressors_ = ["random_forest", "gradient_boosting"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] - - for c, f in product(regressors_, feature_learning_): - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "regressor"), c), - ForbiddenEqualsClause(cs.get_hyperparameter( - "preprocessor"), f))) - except: - pass - - return cs - - # TODO: maybe provide an interface to the underlying predictor like - # decision_function or predict_proba \ No newline at end of file diff --git a/AutoSklearn/autosklearn.py b/AutoSklearn/base.py similarity index 52% rename from AutoSklearn/autosklearn.py rename to AutoSklearn/base.py index 0227b507b4..b3bc00c736 100644 --- a/AutoSklearn/autosklearn.py +++ b/AutoSklearn/base.py @@ -1,73 +1,33 @@ +from abc import ABCMeta, abstractmethod from collections import defaultdict import copy -from itertools import product import sklearn if sklearn.__version__ != "0.15.2": raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " "you installed %s." % sklearn.__version__) -from sklearn.base import BaseEstimator, ClassifierMixin +from sklearn.base import BaseEstimator from sklearn.pipeline import Pipeline from sklearn.utils import check_random_state - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ InactiveHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from . import components as components -class AutoSklearnClassifier(BaseEstimator, ClassifierMixin): - """This class implements the classification task. - - It implements a pipeline, which includes one preprocessing step and one - classification algorithm. It can render a search space including all known - classification and preprocessing algorithms. - - Contrary to the sklearn API it is not possible to enumerate the - possible parameters in the __init__ function because we only know the - available classifiers at runtime. For this reason the user must - specifiy the parameters by passing an instance of - HPOlibConfigSpace.configuration_space.Configuration. - - Parameters - ---------- - configuration : HPOlibConfigSpace.configuration_space.Configuration - The configuration to evaluate. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance - used by `np.random`. - - Attributes - ---------- - _estimator : The underlying scikit-learn classification model. This - variable is assigned after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. +class AutoSklearnBaseEstimator(BaseEstimator): + """Base class for all AutoSklearn task models. - _preprocessor : The underlying scikit-learn preprocessing algorithm. This - variable is only assigned if a preprocessor is specified and - after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + Notes + ----- + This class should not be instantiated, only subclassed.""" + __metaclass__ = ABCMeta - See also - -------- - - References - ---------- - - Examples - -------- - - """ def __init__(self, configuration, random_state=None): - # TODO check sklearn version! self.configuration = configuration @@ -88,7 +48,7 @@ def fit(self, X, Y, fit_params=None, init_params=None): ---------- X : array-like or sparse, shape = (n_samples, n_features) Training data. The preferred type of the matrix (dense or sparse) - depends on the classifier selected. + depends on the estimator selected. y : array-like Targets @@ -116,17 +76,21 @@ def fit(self, X, Y, fit_params=None, init_params=None): # TODO: look if X.shape[0] == y.shape[0] # TODO: check if the hyperparameters have been set... # TODO: this is an example of the antipattern of not properly - # initializing a class in the init function! + # initializing a class in the init function! # TODO: can this happen now that a configuration is specified at # instantiation time + # Save all transformation object in a list to create a pipeline object steps = [] + + # seperate the init parameters for the single methods init_params_per_method = defaultdict(dict) if init_params is not None: for init_param, value in init_params: method, param = init_param.split(":") init_params_per_method[method][param] = value + # List of preprocessing steps (and their order) preprocessors_names = ["imputation", "rescaling", self.configuration['preprocessor'].value] @@ -152,25 +116,26 @@ def fit(self, X, Y, fit_params=None, init_params=None): **preproc_params) steps.append((preproc_name, preprocessor_object)) - # Extract Hyperparameters from the configuration object - classifier_name = self.configuration["classifier"].value - classifier_parameters = {} + # Extract Estimator Hyperparameters from the configuration object + estimator_name = self.configuration[ + self._get_estimator_hyperparameter_name()].value + estimator_parameters = {} for instantiated_hyperparameter in self.configuration: if not instantiated_hyperparameter.hyperparameter.name.startswith( - classifier_name): + estimator_name): continue if isinstance(instantiated_hyperparameter, InactiveHyperparameter): continue - name_ = instantiated_hyperparameter.hyperparameter.name.\ + name_ = instantiated_hyperparameter.hyperparameter.name. \ split(":")[1] - classifier_parameters[name_] = instantiated_hyperparameter.value + estimator_parameters[name_] = instantiated_hyperparameter.value - classifier_parameters.update(init_params_per_method[classifier_name]) - classifier_object = components.classification_components._classifiers\ - [classifier_name](random_state=self.random_state, - **classifier_parameters) - steps.append((classifier_name, classifier_object)) + estimator_parameters.update(init_params_per_method[estimator_name]) + estimator_object = self._get_estimator_components()[ + estimator_name](random_state=self.random_state, + **estimator_parameters) + steps.append((estimator_name, estimator_object)) self._validate_input_X(X) self._validate_input_Y(Y) @@ -184,39 +149,6 @@ def fit(self, X, Y, fit_params=None, init_params=None): self._pipeline.fit(X, Y, **fit_params) return self - def predict(self, X): - """Predict the classes using the selected model. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - Returns the predicted values""" - # TODO check if fit() was called before... - self._validate_input_X(X) - return self._pipeline.predict(X) - - def predict_proba(self, X): - """predict_proba. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - """ - self._validate_input_X(X) - - Xt = X - for name, transform in self._pipeline.steps[:-1]: - Xt = transform.transform(Xt) - return self._pipeline.steps[-1][-1].predict_proba(Xt) - def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to # handle them @@ -276,43 +208,58 @@ def add_model_class(self, model): """ raise NotImplementedError() + def predict(self, X): + """Predict the classes using the selected model. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + Returns the predicted values""" + # TODO check if fit() was called before... + self._validate_input_X(X) + return self._pipeline.predict(X) + @staticmethod - def get_hyperparameter_search_space(include_classifiers=None, - exclude_classifiers=None, - include_preprocessors=None, - exclude_preprocessors=None, - multiclass=False, - multilabel=False, - sparse=False): + def _get_hyperparameter_search_space(estimator_name, + estimator_components, + preprocessor_components, + always_active, + default_estimator): """Return the configuration space for the CASH problem. + This method should be called by the method + get_hyperparameter_search_space of a subclass. After the subclass + assembles a list of available estimators and preprocessor components, + _get_hyperparameter_search_space can be called to do the work of + creating the actual + HPOlibConfigSpace.configuration_space.ConfigurationSpace object. + Parameters ---------- - include_classifiers : list of str - If include_classifiers is given, only the classifiers specified - are used. Specify them by their module name; e.g., to include - only the SVM use :python:`include_classifiers=['libsvm_svc']`. - Cannot be used together with :python:`exclude_classifiers`. - - exclude_classifiers : list of str - If exclude_classifiers is given, only the classifiers specified - are used. Specify them by their module name; e.g., to include - all classifiers except the SVM use - :python:`exclude_classifiers=['libsvm_svc']`. - Cannot be used together with :python:`include_classifiers`. - - include_preprocessors : list of str - If include_preprocessors is given, only the preprocessors specified - are used. Specify them by their module name; e.g., to include - only the PCA use :python:`include_preprocessors=['pca']`. - Cannot be used together with :python:`exclude_preprocessors`. - - exclude_preprocessors : list of str - If include_preprocessors is given, only the preprocessors specified - are used. Specify them by their module name; e.g., to include - all preprocessors except the PCA use - :python:`exclude_preprocessors=['pca']`. - Cannot be used together with :python:`include_preprocessors`. + estimator_name : str + Name of the estimator hyperparameter which will be used in the + configuration space. For a classification task, this would be + 'classifier'. + + estimator_components : dict {name: component} + Dictionary with all estimator components to be included in the + configuration space. + + preprocessor_components : dict {name: component} + Dictionary with all preprocessor components to be included in the + configuration space. . + + always_active : list of str + A list of components which will always be active in the pipeline. + This is useful for components like imputation which have + hyperparameters to be configured, but which do not have any parent. + + default_estimator : str + Default value for the estimator hyperparameter. Returns ------- @@ -320,87 +267,40 @@ def get_hyperparameter_search_space(include_classifiers=None, The configuration space describing the AutoSklearnClassifier. """ - if include_classifiers is not None and exclude_classifiers is not None: - raise ValueError("The arguments include_classifiers and " - "exclude_classifiers cannot be used together.") - - if include_preprocessors is not None and exclude_preprocessors is not None: - raise ValueError("The arguments include_preprocessors and " - "exclude_preprocessors cannot be used together.") - - always_active = ["imputation", "rescaling"] cs = ConfigurationSpace() - available_classifiers = \ - components.classification_components._classifiers - available_preprocessors = \ - components.preprocessing_components._preprocessors + available_estimators = estimator_components + available_preprocessors = preprocessor_components - names = [] - names_ = [] - for name in available_classifiers: - if name in always_active: - names_.append(name) - continue - elif include_classifiers is not None and \ - name not in include_classifiers: - continue - elif exclude_classifiers is not None and \ - name in exclude_classifiers: - continue + if default_estimator is None: + default_estimator = available_estimators.keys()[0] - if multiclass is True and available_classifiers[name]. \ - get_properties()['handles_multiclass'] is False: - continue - if multilabel is True and available_classifiers[name]. \ - get_properties()['handles_multilabel'] is False: - continue - if sparse is True and available_classifiers[name]. \ - get_properties()['handles_sparse'] is False: - continue - names.append(name) - - if len(names + names_) == 0: - raise ValueError("No classifier to build a configuration space " - "for...") - - # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear', 'sgd', - 'libsvm_svc'] - classifier_default = None - for cd_ in classifier_defaults: - if cd_ in names: - classifier_default = cd_ - break - if classifier_default is None: - classifier_default = names[0] - - classifier = CategoricalHyperparameter("classifier", names, - default=classifier_default) - cs.add_hyperparameter(classifier) - for name in names + names_: + estimator = CategoricalHyperparameter(estimator_name, + available_estimators.keys(), default=default_estimator) + cs.add_hyperparameter(estimator) + for name in available_estimators.keys(): # We have to retrieve the configuration space every time because # we change the objects it returns. If we reused it, we could not - # retrieve the conditions further down + # retrieve the conditions further down # TODO implement copy for hyperparameters and forbidden and # conditions! - classifier_configuration_space = available_classifiers[name]. \ + estimator_configuration_space = available_estimators[name]. \ get_hyperparameter_search_space() - for parameter in classifier_configuration_space.get_hyperparameters(): + for parameter in estimator_configuration_space.get_hyperparameters(): new_parameter = copy.deepcopy(parameter) new_parameter.name = "%s:%s" % (name, new_parameter.name) cs.add_hyperparameter(new_parameter) # We must only add a condition if the hyperparameter is not # conditional on something else - if len(classifier_configuration_space. + if len(estimator_configuration_space. get_parents_of(parameter)) == 0: - condition = EqualsCondition(new_parameter, classifier, name) + condition = EqualsCondition(new_parameter, estimator, name) cs.add_condition(condition) - for condition in available_classifiers[name]. \ + for condition in available_estimators[name]. \ get_hyperparameter_search_space().get_conditions(): dlcs = condition.get_descendant_literal_conditions() for dlc in dlcs: @@ -410,45 +310,19 @@ def get_hyperparameter_search_space(include_classifiers=None, dlc.parent.name = "%s:%s" % (name, dlc.parent.name) cs.add_condition(condition) - for forbidden_clause in available_classifiers[name]. \ + for forbidden_clause in available_estimators[name]. \ get_hyperparameter_search_space().forbidden_clauses: dlcs = forbidden_clause.get_descendant_literal_clauses() for dlc in dlcs: if not dlc.hyperparameter.name.startswith(name): dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) + dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - names = [] - names_ = [] - for name in available_preprocessors: - if name in always_active: - names_.append(name) - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if multiclass is True and available_preprocessors[name]. \ - get_properties()['handles_multiclass'] is False: - continue - if multilabel is True and available_preprocessors[name]. \ - get_properties()['handles_multilabel'] is False: - continue - if sparse is True and available_preprocessors[name]. \ - get_properties()['handles_sparse'] is False: - continue - - names.append(name) - preprocessor = CategoricalHyperparameter("preprocessor", - ["None"] + names, - default='None') + ["None"] + available_preprocessors.keys(), default='None') cs.add_hyperparameter(preprocessor) - for name in names + names_: + for name in available_preprocessors.keys(): preprocessor_configuration_space = available_preprocessors[name]. \ get_hyperparameter_search_space() for parameter in preprocessor_configuration_space.get_hyperparameters(): @@ -458,8 +332,10 @@ def get_hyperparameter_search_space(include_classifiers=None, # We must only add a condition if the hyperparameter is not # conditional on something else if len(preprocessor_configuration_space. - get_parents_of(parameter)) == 0 and name not in always_active: - condition = EqualsCondition(new_parameter, preprocessor, name) + get_parents_of( + parameter)) == 0 and name not in always_active: + condition = EqualsCondition(new_parameter, preprocessor, + name) cs.add_condition(condition) for condition in available_preprocessors[name]. \ @@ -478,28 +354,15 @@ def get_hyperparameter_search_space(include_classifiers=None, for dlc in dlcs: if not dlc.hyperparameter.startwith(name): dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) + dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - # And now add forbidden parameter configurations which would take too - # long - - # Combinations of tree-based models with feature learning: - classifiers_ = ["extra_trees", "gradient_boosting", - "k_nearest_neighbors", "libsvm_svc", "random_forest"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] - - for c, f in product(classifiers_, feature_learning_): - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "classifier"), c), - ForbiddenEqualsClause(cs.get_hyperparameter( - "preprocessor"), f))) - except: - pass - return cs - # TODO: maybe provide an interface to the underlying predictor like - # decision_function or predict_proba + @staticmethod + def _get_estimator_hyperparameter_name(): + pass + + @staticmethod + def _get_estimator_components(): + pass \ No newline at end of file diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py new file mode 100644 index 0000000000..11f3fb9e4c --- /dev/null +++ b/AutoSklearn/classification.py @@ -0,0 +1,194 @@ +import copy +from itertools import product + +from sklearn.base import ClassifierMixin + +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction + +from . import components as components +from .base import AutoSklearnBaseEstimator + + +class AutoSklearnClassifier(ClassifierMixin, AutoSklearnBaseEstimator): + """This class implements the classification task. + + It implements a pipeline, which includes one preprocessing step and one + classification algorithm. It can render a search space including all known + classification and preprocessing algorithms. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available classifiers at runtime. For this reason the user must + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. + + Parameters + ---------- + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : The underlying scikit-learn classification model. This + variable is assigned after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + + See also + -------- + + References + ---------- + + Examples + -------- + + """ + _pipeline = ["imputation", "rescaling", "__preprocessor__", + "__estimator__"] + + def predict_proba(self, X): + """predict_proba. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + self._validate_input_X(X) + + Xt = X + for name, transform in self._pipeline.steps[:-1]: + Xt = transform.transform(Xt) + return self._pipeline.steps[-1][-1].predict_proba(Xt) + + @staticmethod + def get_hyperparameter_search_space(include_estimators=None, + exclude_estimators=None, + include_preprocessors=None, + exclude_preprocessors=None, + multiclass=False, + multilabel=False, + sparse=False): + + if include_estimators is not None and exclude_estimators is not None: + raise ValueError("The arguments include_estimators and " + "exclude_estimators cannot be used together.") + + if include_preprocessors is not None and exclude_preprocessors is not None: + raise ValueError("The arguments include_preprocessors and " + "exclude_preprocessors cannot be used together.") + + # Compile a list of all estimator objects for this problem + available_classifiers = AutoSklearnClassifier._get_estimator_components() + + classifiers = dict() + for name in available_classifiers: + if include_estimators is not None and \ + name not in include_estimators: + continue + elif exclude_estimators is not None and \ + name in exclude_estimators: + continue + + if multiclass is True and available_classifiers[name]. \ + get_properties()['handles_multiclass'] is False: + continue + if multilabel is True and available_classifiers[name]. \ + get_properties()['handles_multilabel'] is False: + continue + if sparse is True and available_classifiers[name]. \ + get_properties()['handles_sparse'] is False: + continue + classifiers[name] = available_classifiers[name] + + if len(classifiers) == 0: + raise ValueError("No classifier to build a configuration space " + "for...") + + # Hardcode the defaults based on some educated guesses + classifier_defaults = ['random_forest', 'liblinear', 'sgd', + 'libsvm_svc'] + classifier_default = None + for cd_ in classifier_defaults: + if cd_ in classifiers: + classifier_default = cd_ + break + if classifier_default is None: + classifier_default = classifiers[0] + + # Compile a list of preprocessor for this problem + available_preprocessors = \ + components.preprocessing_components._preprocessors + + preprocessors = dict() + for name in available_preprocessors: + if name in ["imputation", "rescaling"]: + preprocessors[name] = available_preprocessors[name] + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + + if multiclass is True and available_preprocessors[name]. \ + get_properties()['handles_multiclass'] is False: + continue + if multilabel is True and available_preprocessors[name]. \ + get_properties()['handles_multilabel'] is False: + continue + if sparse is True and available_preprocessors[name]. \ + get_properties()['handles_sparse'] is False: + continue + + preprocessors[name] = available_preprocessors[name] + + # Get the configuration space + configuration_space = AutoSklearnBaseEstimator\ + ._get_hyperparameter_search_space( + AutoSklearnClassifier._get_estimator_hyperparameter_name(), + classifiers, preprocessors, + AutoSklearnClassifier._pipeline, classifier_default) + + # And now add forbidden parameter configurations which would take too + # long + + # Combinations of tree-based models with feature learning: + classifiers_ = ["extra_trees", "gradient_boosting", + "k_nearest_neighbors", "libsvm_svc", "random_forest"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + + for c, f in product(classifiers_, feature_learning_): + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f))) + except: + pass + + return configuration_space + + @staticmethod + def _get_estimator_hyperparameter_name(): + return "classifier" + + @staticmethod + def _get_estimator_components(): + return components.classification_components._classifiers \ No newline at end of file diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py new file mode 100644 index 0000000000..f1ffe0e66a --- /dev/null +++ b/AutoSklearn/regression.py @@ -0,0 +1,225 @@ +from collections import defaultdict +import copy +from itertools import product + +import sklearn +if sklearn.__version__ != "0.15.2": + raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + "you installed %s." % sklearn.__version__) + +from sklearn.base import RegressorMixin +from sklearn.pipeline import Pipeline +from sklearn.utils import check_random_state + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + InactiveHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction + +from . import components as components +from .base import AutoSklearnBaseEstimator + + +class AutoSklearnRegressor(RegressorMixin, AutoSklearnBaseEstimator): + """This class implements the regression task. + + It implements a pipeline, which includes one preprocessing step and one + regression algorithm. It can render a search space including all known + regression and preprocessing algorithms. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available regressors at runtime. For this reason the user must + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. + + Parameters + ---------- + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : The underlying scikit-learn regression model. This + variable is assigned after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + + See also + -------- + + References + ---------- + + Examples + -------- + + """ + _pipeline = ["imputation", "rescaling", "__preprocessor__", + "__estimator__"] + + def _validate_input_X(self, X): + # TODO: think of all possible states which can occur and how to + # handle them + pass + + def _validate_input_Y(self, Y): + pass + + def add_model_class(self, model): + """ + Raises + ------ + NotImplementedError + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(include_estimators=None, + exclude_estimators=None, + include_preprocessors=None, + exclude_preprocessors=None, + sparse=False): + """Return the configuration space for the CASH problem. + + Parameters + ---------- + include_estimators : list of str + If include_estimators is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + only the SVM use :python:`include_regressors=['svr']`. + Cannot be used together with :python:`exclude_regressors`. + + exclude_estimators : list of str + If exclude_estimators is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + all regressors except the SVM use + :python:`exclude_regressors=['svr']`. + Cannot be used together with :python:`include_regressors`. + + include_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + only the PCA use :python:`include_preprocessors=['pca']`. + Cannot be used together with :python:`exclude_preprocessors`. + + exclude_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + all preprocessors except the PCA use + :python:`exclude_preprocessors=['pca']`. + Cannot be used together with :python:`include_preprocessors`. + + Returns + ------- + cs : HPOlibConfigSpace.configuration_space.Configuration + The configuration space describing the AutoSklearnClassifier. + """ + + if include_estimators is not None and exclude_estimators is not None: + raise ValueError("The arguments include_estimators and " + "exclude_regressors cannot be used together.") + + if include_preprocessors is not None and exclude_preprocessors is not None: + raise ValueError("The arguments include_preprocessors and " + "exclude_preprocessors cannot be used together.") + + # Compile a list of all estimator objects for this problem + available_regressors = AutoSklearnRegressor._get_estimator_components() + + # We assume that there exists only a single regression task. which + # is different to classification where we have multiclass, + # multilabel, etc + regressors = dict() + for name in available_regressors: + if include_estimators is not None and \ + name not in include_estimators: + continue + elif exclude_estimators is not None and \ + name in exclude_estimators: + continue + if sparse is True and available_regressors[name]. \ + get_properties()['handles_sparse'] is False: + continue + regressors[name] = available_regressors[name] + + if len(regressors) == 0: + raise ValueError("No regressors to build a configuration space " + "for...") + + # Hardcode the defaults based on some educated guesses + classifier_defaults = ['random_forest', 'liblinear', 'sgd', + 'libsvm_svc'] + regressor_default = None + for cd_ in classifier_defaults: + if cd_ in regressors: + regressor_default = cd_ + break + if regressor_default is None: + regressor_default = regressors.keys()[0] + + # Compile a list of preprocessor for this problem + available_preprocessors = \ + components.preprocessing_components._preprocessors + + preprocessors = dict() + for name in available_preprocessors: + if name in AutoSklearnRegressor._pipeline: + preprocessors[name] = available_preprocessors[name] + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + if sparse is True and available_preprocessors[name]. \ + get_properties()['handles_sparse'] is False: + continue + + preprocessors[name] = available_preprocessors[name] + + # Get the configuration space + configuration_space = AutoSklearnBaseEstimator \ + ._get_hyperparameter_search_space( + AutoSklearnRegressor._get_estimator_hyperparameter_name(), + regressors, preprocessors, + AutoSklearnRegressor._pipeline, regressor_default) + + # And now add forbidden parameter configurations which would take too + # long + + # Combinations of tree-based models with feature learning: + regressors_ = ["random_forest", "gradient_boosting"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + + for c, f in product(regressors_, feature_learning_): + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "regressor"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f))) + except: + pass + + return configuration_space + + @staticmethod + def _get_estimator_components(): + return components.regression_components._regressors + + @staticmethod + def _get_estimator_hyperparameter_name(): + return "regressor" \ No newline at end of file diff --git a/tests/test_autosklearn.py b/tests/test_autosklearn.py index 5ad3734d5f..6a4058a61b 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_autosklearn.py @@ -13,7 +13,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from AutoSklearn.autosklearn import AutoSklearnClassifier +from AutoSklearn.classification import AutoSklearnClassifier from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm import AutoSklearn.components.classification as classification_components @@ -60,18 +60,19 @@ def test_get_hyperparameter_search_space(self): def test_get_hyperparameter_search_space_include_exclude_models(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space( - include_classifiers=['libsvm_svc']) + include_estimators=['libsvm_svc']) self.assertEqual(cs.get_hyperparameter('classifier'), CategoricalHyperparameter('classifier', ['libsvm_svc'])) cs = AutoSklearnClassifier.get_hyperparameter_search_space( - exclude_classifiers=['libsvm_svc']) + exclude_estimators=['libsvm_svc']) self.assertNotIn('libsvm_svc', str(cs)) cs = AutoSklearnClassifier.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + CategoricalHyperparameter('preprocessor', + ["None", 'pca', 'rescaling', 'imputation'])) cs = AutoSklearnClassifier.get_hyperparameter_search_space( exclude_preprocessors=['pca']) diff --git a/tests/test_autosklearn_regression.py b/tests/test_autosklearn_regression.py index 0b82d0a5d1..390219bd67 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_autosklearn_regression.py @@ -13,7 +13,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from AutoSklearn.autosklearn_regression import AutoSklearnRegressor +from AutoSklearn.regression import AutoSklearnRegressor from AutoSklearn.components.regression_base import AutoSklearnRegressionAlgorithm from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm import AutoSklearn.components.regression as regression_components @@ -22,8 +22,6 @@ class TestAutoSKlearnRegressor(unittest.TestCase): - # TODO: test for both possible ways to initialize AutoSklearn - # parameters and other... def test_find_regressors(self): regressors = regression_components._regressors @@ -58,24 +56,25 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(35, len(hyperparameters)) + self.assertEqual(26, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): cs = AutoSklearnRegressor.get_hyperparameter_search_space( - include_regressors=['random_forest']) + include_estimators=['random_forest']) self.assertEqual(cs.get_hyperparameter('regressor'), CategoricalHyperparameter('regressor', ['random_forest'])) # TODO add this test when more than one regressor is present cs = AutoSklearnRegressor.get_hyperparameter_search_space( - exclude_regressors=['random_forest']) + exclude_estimators=['random_forest']) self.assertNotIn('random_forest', str(cs)) cs = AutoSklearnRegressor.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + CategoricalHyperparameter('preprocessor', + ["None", 'pca', 'rescaling', 'imputation'])) cs = AutoSklearnRegressor.get_hyperparameter_search_space( exclude_preprocessors=['pca']) From 73ad7cede8d468babae76492151f315eada4ef54 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 14 Jan 2015 16:54:17 +0100 Subject: [PATCH 106/352] Fix tests --- AutoSklearn/base.py | 5 +++-- AutoSklearn/classification.py | 2 +- tests/{test_autosklearn.py => test_classification.py} | 3 +-- tests/{test_autosklearn_regression.py => test_regression.py} | 3 +-- 4 files changed, 6 insertions(+), 7 deletions(-) rename tests/{test_autosklearn.py => test_classification.py} (97%) rename tests/{test_autosklearn_regression.py => test_regression.py} (97%) diff --git a/AutoSklearn/base.py b/AutoSklearn/base.py index b3bc00c736..516f2eef9e 100644 --- a/AutoSklearn/base.py +++ b/AutoSklearn/base.py @@ -93,7 +93,6 @@ def fit(self, X, Y, fit_params=None, init_params=None): # List of preprocessing steps (and their order) preprocessors_names = ["imputation", "rescaling", self.configuration['preprocessor'].value] - for preproc_name in preprocessors_names: if preproc_name != "None": preproc_params = {} @@ -319,8 +318,10 @@ def _get_hyperparameter_search_space(estimator_name, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) + preprocessor_choices = filter(lambda app: app not in always_active, + available_preprocessors.keys()) preprocessor = CategoricalHyperparameter("preprocessor", - ["None"] + available_preprocessors.keys(), default='None') + ["None"] + preprocessor_choices, default='None') cs.add_hyperparameter(preprocessor) for name in available_preprocessors.keys(): preprocessor_configuration_space = available_preprocessors[name]. \ diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 11f3fb9e4c..29a9581c00 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -69,10 +69,10 @@ def predict_proba(self, X): array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) """ self._validate_input_X(X) - Xt = X for name, transform in self._pipeline.steps[:-1]: Xt = transform.transform(Xt) + return self._pipeline.steps[-1][-1].predict_proba(Xt) @staticmethod diff --git a/tests/test_autosklearn.py b/tests/test_classification.py similarity index 97% rename from tests/test_autosklearn.py rename to tests/test_classification.py index 6a4058a61b..2dae3707ee 100644 --- a/tests/test_autosklearn.py +++ b/tests/test_classification.py @@ -71,8 +71,7 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): cs = AutoSklearnClassifier.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', - ["None", 'pca', 'rescaling', 'imputation'])) + CategoricalHyperparameter('preprocessor', ["None", 'pca'])) cs = AutoSklearnClassifier.get_hyperparameter_search_space( exclude_preprocessors=['pca']) diff --git a/tests/test_autosklearn_regression.py b/tests/test_regression.py similarity index 97% rename from tests/test_autosklearn_regression.py rename to tests/test_regression.py index 390219bd67..d5bf125f38 100644 --- a/tests/test_autosklearn_regression.py +++ b/tests/test_regression.py @@ -73,8 +73,7 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): cs = AutoSklearnRegressor.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', - ["None", 'pca', 'rescaling', 'imputation'])) + CategoricalHyperparameter('preprocessor', ["None", 'pca'])) cs = AutoSklearnRegressor.get_hyperparameter_search_space( exclude_preprocessors=['pca']) From cdedee771633ab5e4514942dde7f540a53ca3908 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 14 Jan 2015 16:55:13 +0100 Subject: [PATCH 107/352] Fix bug in softmax (for liblinear, sgd) --- .../components/classification/liblinear.py | 9 +--- AutoSklearn/components/classification/sgd.py | 9 +--- AutoSklearn/implementations/util.py | 17 ++++++++ tests/implementations/test_util.py | 42 +++++++++++++++++++ 4 files changed, 63 insertions(+), 14 deletions(-) create mode 100644 AutoSklearn/implementations/util.py create mode 100644 tests/implementations/test_util.py diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index 845e4c10e1..a3d250b175 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -8,6 +8,7 @@ ForbiddenAndConjunction from ..classification_base import AutoSklearnClassificationAlgorithm +from ...implementations.util import softmax class LibLinear_SVC(AutoSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside @@ -58,13 +59,7 @@ def predict_proba(self, X): raise NotImplementedError() df = self.estimator.decision_function(X) - - if len(df.shape) == 1: - ppositive = 1 / (1 + np.exp(-df)) - return np.transpose(np.array((1 - ppositive, ppositive))) - else: - tmp = np.exp(-df) - return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) + return softmax(df) @staticmethod def get_properties(): diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index 6fc8d48638..8c08a0b7a1 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -8,6 +8,7 @@ from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction from ..classification_base import AutoSklearnClassificationAlgorithm +from ...implementations.util import softmax class SGD(AutoSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, @@ -69,13 +70,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) else: df = self.estimator.decision_function(X) - - if len(df.shape) == 1: - ppositive = 1 / (1 + np.exp(-df)) - return np.transpose(np.array((1 - ppositive, ppositive))) - else: - tmp = np.exp(-df) - return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) + return softmax(df) @staticmethod def get_properties(): diff --git a/AutoSklearn/implementations/util.py b/AutoSklearn/implementations/util.py new file mode 100644 index 0000000000..555fe3d323 --- /dev/null +++ b/AutoSklearn/implementations/util.py @@ -0,0 +1,17 @@ +import numpy as np + + +def softmax(df): + if len(df.shape) == 1: + df[df > 20] = 20 + df[df < -20] = -20 + ppositive = 1 / (1 + np.exp(-df)) + ppositive[ppositive > 0.999999] = 1 + ppositive[ppositive < 0.0000001] = 0 + return np.transpose(np.array((1 - ppositive, ppositive))) + else: + # Compute the Softmax like it is described here: + # http://www.iro.umontreal.ca/~bengioy/dlbook/numerical.html + tmp = df - np.max(df, axis=1).reshape((-1, 1)) + tmp = np.exp(tmp) + return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) \ No newline at end of file diff --git a/tests/implementations/test_util.py b/tests/implementations/test_util.py new file mode 100644 index 0000000000..196beb1fd2 --- /dev/null +++ b/tests/implementations/test_util.py @@ -0,0 +1,42 @@ +import unittest + +import numpy as np +from sklearn.utils.testing import assert_array_almost_equal + +from AutoSklearn.implementations.util import softmax + +class UtilTest(unittest.TestCase): + def test_softmax_binary(self): + df = np.array([-40.00643897, 34.69754581, 23.71181359 -29.89724287, + 27.06071791, -37.78334103, -40.15812461, 40.16139229, + -27.85887801, 42.67404756, -36.89753589 -36.45148009, + 54.68976306, 19.47886562, -49.99821027, -35.70205302, + -40.59639267, 32.96343916, -39.23777841, -37.86535019, + -33.10196906, 26.84144377, -36.8569686]) + probas = softmax(df) + expected = [[1., 0.], [0., 1.], [0.99794501, 0.00205499], + [0., 1.], [1., 0.], [1., 0.], [0., 1.], + [1., 0.], [0., 1.], [1., 0.], [0., 1.], + [0., 1.], [1., 0.], [1., 0.], [1., 0.], + [0., 1.], [1., 0.], [1., 0.], [1., 0.], + [0., 1.], [1., 0.]] + assert_array_almost_equal(expected, probas) + + def test_softmax(self): + df = np.array([[2.75021367e+10, -8.83772371e-01, -2.20516715e+27], + [-2.10848072e+11, 2.35024444e-01, 5.20106536e+25]]) + # With a numerically unstable softmax, the output would be something + # like this: + # [[ 0. 0. nan] + # [nan 0. 0.]] + probas = softmax(df) + expected = np.array([[1, 0, 0], [0, 0, 1]]) + self.assertTrue((expected == probas).all()) + + df = np.array([[0.1, 0.6, 0.3], [0.2, 0.3, 0.5]]) + probas = softmax(df) + expected = np.array([[0.25838965, 0.42601251, 0.31559783], + [0.28943311, 0.31987306, 0.39069383]]) + assert_array_almost_equal(expected, probas) + + From 656e2750c43daafd155343f0d8dd83f82fc4ec59 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 14 Jan 2015 17:20:55 +0100 Subject: [PATCH 108/352] Add handles_regression to preprocessing --- AutoSklearn/classification.py | 3 +++ AutoSklearn/components/preprocessing/imputation.py | 1 + AutoSklearn/components/preprocessing/kitchen_sinks.py | 1 + AutoSklearn/components/preprocessing/pca.py | 1 + AutoSklearn/components/preprocessing/rescaling.py | 1 + AutoSklearn/components/preprocessing/sparse_filtering.py | 1 + AutoSklearn/components/preprocessor_base.py | 2 ++ AutoSklearn/regression.py | 4 ++++ 8 files changed, 14 insertions(+) diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 29a9581c00..8b33900008 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -146,6 +146,9 @@ def get_hyperparameter_search_space(include_estimators=None, name in exclude_preprocessors: continue + if available_preprocessors[name]. \ + get_properties()['handles_classification'] is False: + continue if multiclass is True and available_preprocessors[name]. \ get_properties()['handles_multiclass'] is False: continue diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py index 2a66df338b..19b82e60af 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -31,6 +31,7 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/AutoSklearn/components/preprocessing/kitchen_sinks.py index 6b12d25f98..d05a56235a 100644 --- a/AutoSklearn/components/preprocessing/kitchen_sinks.py +++ b/AutoSklearn/components/preprocessing/kitchen_sinks.py @@ -39,6 +39,7 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, 'prefers_data_normalized': True, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index e8a6f262f3..f15d975d00 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -51,6 +51,7 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparsity... 'prefers_data_normalized': False, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index e597c25a53..e0b2d85a7f 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -35,6 +35,7 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/AutoSklearn/components/preprocessing/sparse_filtering.py index 1da188fba6..12ddf13c91 100644 --- a/AutoSklearn/components/preprocessing/sparse_filtering.py +++ b/AutoSklearn/components/preprocessing/sparse_filtering.py @@ -31,6 +31,7 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, 'prefers_data_normalized': True, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index e2d398cec3..c2914ed71e 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -23,6 +23,8 @@ def get_properties(): (prefers_data_scaled : {True, False} * Does the algorithm prefer data normalized to 0-mean, 1std? (prefers_data_normalized : {True, False} + * Can preprocess regression data? + (handles_regression : {True, False} * Can preprocess classification data? (handles_classification : {True, False} * Can the algorithm handle multiclass-classification problems? diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py index f1ffe0e66a..d5e4a0df90 100644 --- a/AutoSklearn/regression.py +++ b/AutoSklearn/regression.py @@ -184,9 +184,13 @@ def get_hyperparameter_search_space(include_estimators=None, elif exclude_preprocessors is not None and \ name in exclude_preprocessors: continue + if sparse is True and available_preprocessors[name]. \ get_properties()['handles_sparse'] is False: continue + elif available_preprocessors[name]. \ + get_properties()['handles_regression'] is False: + continue preprocessors[name] = available_preprocessors[name] From c8028ccbcc5f96b065db57f33bf166e14817a8c1 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 15 Jan 2015 10:32:49 +0100 Subject: [PATCH 109/352] Pass dictionary with dataset properties to get_hyperparameter_search_space of every component --- AutoSklearn/base.py | 17 ++++---- AutoSklearn/classification.py | 41 +++++++++++-------- .../components/classification/extra_trees.py | 2 +- .../classification/gradient_boosting.py | 2 +- .../classification/k_nearest_neighbors.py | 2 +- .../components/classification/liblinear.py | 2 +- .../components/classification/libsvm_svc.py | 2 +- .../classification/random_forest.py | 2 +- AutoSklearn/components/classification/sgd.py | 2 +- AutoSklearn/components/classification_base.py | 2 +- .../components/preprocessing/imputation.py | 2 +- .../components/preprocessing/kitchen_sinks.py | 2 +- AutoSklearn/components/preprocessing/pca.py | 2 +- .../components/preprocessing/rescaling.py | 2 +- .../preprocessing/sparse_filtering.py | 2 +- AutoSklearn/components/preprocessor_base.py | 2 +- .../regression/gradient_boosting.py | 2 +- .../components/regression/random_forest.py | 2 +- .../components/regression/ridge_regression.py | 2 +- .../regression/support_vector_regression.py | 2 +- AutoSklearn/components/regression_base.py | 2 +- AutoSklearn/regression.py | 20 +++++---- tests/test_classification.py | 12 +++--- 23 files changed, 71 insertions(+), 57 deletions(-) diff --git a/AutoSklearn/base.py b/AutoSklearn/base.py index 516f2eef9e..cef2dfd394 100644 --- a/AutoSklearn/base.py +++ b/AutoSklearn/base.py @@ -224,10 +224,11 @@ def predict(self, X): @staticmethod def _get_hyperparameter_search_space(estimator_name, + default_estimator, estimator_components, preprocessor_components, - always_active, - default_estimator): + dataset_properties, + always_active): """Return the configuration space for the CASH problem. This method should be called by the method @@ -287,7 +288,7 @@ def _get_hyperparameter_search_space(estimator_name, # conditions! estimator_configuration_space = available_estimators[name]. \ - get_hyperparameter_search_space() + get_hyperparameter_search_space(dataset_properties) for parameter in estimator_configuration_space.get_hyperparameters(): new_parameter = copy.deepcopy(parameter) new_parameter.name = "%s:%s" % (name, new_parameter.name) @@ -300,7 +301,7 @@ def _get_hyperparameter_search_space(estimator_name, cs.add_condition(condition) for condition in available_estimators[name]. \ - get_hyperparameter_search_space().get_conditions(): + get_hyperparameter_search_space(dataset_properties).get_conditions(): dlcs = condition.get_descendant_literal_conditions() for dlc in dlcs: if not dlc.child.name.startswith(name): @@ -310,7 +311,7 @@ def _get_hyperparameter_search_space(estimator_name, cs.add_condition(condition) for forbidden_clause in available_estimators[name]. \ - get_hyperparameter_search_space().forbidden_clauses: + get_hyperparameter_search_space(dataset_properties).forbidden_clauses: dlcs = forbidden_clause.get_descendant_literal_clauses() for dlc in dlcs: if not dlc.hyperparameter.name.startswith(name): @@ -325,7 +326,7 @@ def _get_hyperparameter_search_space(estimator_name, cs.add_hyperparameter(preprocessor) for name in available_preprocessors.keys(): preprocessor_configuration_space = available_preprocessors[name]. \ - get_hyperparameter_search_space() + get_hyperparameter_search_space(dataset_properties) for parameter in preprocessor_configuration_space.get_hyperparameters(): new_parameter = copy.deepcopy(parameter) new_parameter.name = "%s:%s" % (name, new_parameter.name) @@ -340,7 +341,7 @@ def _get_hyperparameter_search_space(estimator_name, cs.add_condition(condition) for condition in available_preprocessors[name]. \ - get_hyperparameter_search_space().get_conditions(): + get_hyperparameter_search_space(dataset_properties).get_conditions(): dlcs = condition.get_descendent_literal_conditions() for dlc in dlcs: if not dlc.child.name.startswith(name): @@ -350,7 +351,7 @@ def _get_hyperparameter_search_space(estimator_name, cs.add_condition(condition) for forbidden_clause in available_preprocessors[name]. \ - get_hyperparameter_search_space().forbidden_clauses: + get_hyperparameter_search_space(dataset_properties).forbidden_clauses: dlcs = forbidden_clause.get_descendant_literal_clauses() for dlc in dlcs: if not dlc.hyperparameter.startwith(name): diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 8b33900008..bcf75d586e 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -80,9 +80,7 @@ def get_hyperparameter_search_space(include_estimators=None, exclude_estimators=None, include_preprocessors=None, exclude_preprocessors=None, - multiclass=False, - multilabel=False, - sparse=False): + dataset_properties=None): if include_estimators is not None and exclude_estimators is not None: raise ValueError("The arguments include_estimators and " @@ -92,6 +90,9 @@ def get_hyperparameter_search_space(include_estimators=None, raise ValueError("The arguments include_preprocessors and " "exclude_preprocessors cannot be used together.") + if dataset_properties is None or not isinstance(dataset_properties, dict): + dataset_properties = dict() + # Compile a list of all estimator objects for this problem available_classifiers = AutoSklearnClassifier._get_estimator_components() @@ -104,14 +105,17 @@ def get_hyperparameter_search_space(include_estimators=None, name in exclude_estimators: continue - if multiclass is True and available_classifiers[name]. \ - get_properties()['handles_multiclass'] is False: + if dataset_properties.get('multiclass') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multiclass'] is False: continue - if multilabel is True and available_classifiers[name]. \ - get_properties()['handles_multilabel'] is False: + if dataset_properties.get('multilabel') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multilabel'] is False: continue - if sparse is True and available_classifiers[name]. \ - get_properties()['handles_sparse'] is False: + if dataset_properties.get('sparse') is True and \ + available_classifiers[name].get_properties()[ + 'handles_sparse'] is False: continue classifiers[name] = available_classifiers[name] @@ -149,14 +153,17 @@ def get_hyperparameter_search_space(include_estimators=None, if available_preprocessors[name]. \ get_properties()['handles_classification'] is False: continue - if multiclass is True and available_preprocessors[name]. \ - get_properties()['handles_multiclass'] is False: + if dataset_properties.get('multiclass') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multiclass'] is False: continue - if multilabel is True and available_preprocessors[name]. \ - get_properties()['handles_multilabel'] is False: + if dataset_properties.get('multilabel') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multilabel'] is False: continue - if sparse is True and available_preprocessors[name]. \ - get_properties()['handles_sparse'] is False: + if dataset_properties.get('sparse') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_sparse'] is False: continue preprocessors[name] = available_preprocessors[name] @@ -165,8 +172,8 @@ def get_hyperparameter_search_space(include_estimators=None, configuration_space = AutoSklearnBaseEstimator\ ._get_hyperparameter_search_space( AutoSklearnClassifier._get_estimator_hyperparameter_name(), - classifiers, preprocessors, - AutoSklearnClassifier._pipeline, classifier_default) + classifier_default, classifiers, preprocessors, dataset_properties, + AutoSklearnClassifier._pipeline) # And now add forbidden parameter configurations which would take too # long diff --git a/AutoSklearn/components/classification/extra_trees.py b/AutoSklearn/components/classification/extra_trees.py index 916843c50d..57f80174e2 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/AutoSklearn/components/classification/extra_trees.py @@ -109,7 +109,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): #use_max_depth = CategoricalHyperparameter( # name="use_max_depth", choices=("True", "False"), default="False") diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/AutoSklearn/components/classification/gradient_boosting.py index 400ce087da..2affc752ed 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/AutoSklearn/components/classification/gradient_boosting.py @@ -114,7 +114,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): learning_rate = UniformFloatHyperparameter( name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) subsample = UniformFloatHyperparameter( diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/AutoSklearn/components/classification/k_nearest_neighbors.py index b8b1b4695e..d32fe6c1b5 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/AutoSklearn/components/classification/k_nearest_neighbors.py @@ -61,7 +61,7 @@ def get_properties(): 'preferred_dtype' : None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): n_neighbors = UniformIntegerHyperparameter( name="n_neighbors", lower=1, upper=100, default=1) diff --git a/AutoSklearn/components/classification/liblinear.py b/AutoSklearn/components/classification/liblinear.py index a3d250b175..aa6299f21c 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/AutoSklearn/components/classification/liblinear.py @@ -81,7 +81,7 @@ def get_properties(): 'preferred_dtype' : None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], default="l2") loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/AutoSklearn/components/classification/libsvm_svc.py index 182f4f4837..45c592c0f0 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/AutoSklearn/components/classification/libsvm_svc.py @@ -87,7 +87,7 @@ def get_properties(): 'preferred_dtype': None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, default=1.0) # No linear kernel here, because we have liblinear diff --git a/AutoSklearn/components/classification/random_forest.py b/AutoSklearn/components/classification/random_forest.py index e4ca54c71a..8d4ea9cda6 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/AutoSklearn/components/classification/random_forest.py @@ -98,7 +98,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): #n_estimators = UniformIntegerHyperparameter( # "n_estimators", 10, 100, default=10) n_estimators = Constant("n_estimators", 100) diff --git a/AutoSklearn/components/classification/sgd.py b/AutoSklearn/components/classification/sgd.py index 8c08a0b7a1..247cb313ec 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/AutoSklearn/components/classification/sgd.py @@ -89,7 +89,7 @@ def get_properties(): 'preferred_dtype' : None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): loss = CategoricalHyperparameter("loss", ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], default="hinge") diff --git a/AutoSklearn/components/classification_base.py b/AutoSklearn/components/classification_base.py index fdf947654b..cbeadce25a 100644 --- a/AutoSklearn/components/classification_base.py +++ b/AutoSklearn/components/classification_base.py @@ -42,7 +42,7 @@ def get_properties(): raise NotImplementedError() @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): """Return the configuration space of this classification algorithm. Returns diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py index 19b82e60af..fbe600a6d7 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -41,7 +41,7 @@ def get_properties(): 'preferred_dtype': None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter( "strategy", ["mean", "median", "most_frequent"], default="mean") diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/AutoSklearn/components/preprocessing/kitchen_sinks.py index d05a56235a..6a32ef46aa 100644 --- a/AutoSklearn/components/preprocessing/kitchen_sinks.py +++ b/AutoSklearn/components/preprocessing/kitchen_sinks.py @@ -48,7 +48,7 @@ def get_properties(): 'preferred_dtype': None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): gamma = UniformFloatHyperparameter( "gamma", 0.3, 2., default=1.0) n_components = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index f15d975d00..812e8edfdc 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -63,7 +63,7 @@ def get_properties(): 'preferred_dtype': None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): keep_variance = UniformFloatHyperparameter( "keep_variance", 0.5, 1.0, default=1.0) whiten = CategoricalHyperparameter( diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index e0b2d85a7f..74bc5cbd6c 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -46,7 +46,7 @@ def get_properties(): 'preferred_dtype': None} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter( "strategy", ["min/max", "standard"], default="min/max") diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/AutoSklearn/components/preprocessing/sparse_filtering.py index 12ddf13c91..8e9adab40c 100644 --- a/AutoSklearn/components/preprocessing/sparse_filtering.py +++ b/AutoSklearn/components/preprocessing/sparse_filtering.py @@ -42,7 +42,7 @@ def get_properties(): @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): N = UniformIntegerHyperparameter( "N", 50, 2000, default=100) maxiter = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/preprocessor_base.py b/AutoSklearn/components/preprocessor_base.py index c2914ed71e..c3d502ab6d 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/AutoSklearn/components/preprocessor_base.py @@ -45,7 +45,7 @@ def get_properties(): raise NotImplementedError() @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): """Return the configuration space of this preprocessing algorithm. Returns diff --git a/AutoSklearn/components/regression/gradient_boosting.py b/AutoSklearn/components/regression/gradient_boosting.py index 9181b08ddd..9af650450f 100644 --- a/AutoSklearn/components/regression/gradient_boosting.py +++ b/AutoSklearn/components/regression/gradient_boosting.py @@ -119,7 +119,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): loss = CategoricalHyperparameter( name="loss", choices=["ls", "lad"], default='ls') #, "huber", "quantile"], default='ls') diff --git a/AutoSklearn/components/regression/random_forest.py b/AutoSklearn/components/regression/random_forest.py index 5d85b01ae1..96a396b03f 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/AutoSklearn/components/regression/random_forest.py @@ -106,7 +106,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): criterion = Constant(name="criterion", value="mse") # Copied from classification/random_forest.py #n_estimators = UniformIntegerHyperparameter( diff --git a/AutoSklearn/components/regression/ridge_regression.py b/AutoSklearn/components/regression/ridge_regression.py index 8ba944a905..b49e983ea6 100644 --- a/AutoSklearn/components/regression/ridge_regression.py +++ b/AutoSklearn/components/regression/ridge_regression.py @@ -58,7 +58,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): alpha = UniformFloatHyperparameter( name="alpha", lower=0.0001, upper=10, default=1.0, log=True) diff --git a/AutoSklearn/components/regression/support_vector_regression.py b/AutoSklearn/components/regression/support_vector_regression.py index ffe92de1fd..3f21664b58 100644 --- a/AutoSklearn/components/regression/support_vector_regression.py +++ b/AutoSklearn/components/regression/support_vector_regression.py @@ -90,7 +90,7 @@ def get_properties(): 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties): # Copied from libsvm_c C = UniformFloatHyperparameter( name="C", lower=0.03125, upper=32768, log=True, default=1.0) diff --git a/AutoSklearn/components/regression_base.py b/AutoSklearn/components/regression_base.py index 14c55a29e8..5bae0e91b8 100644 --- a/AutoSklearn/components/regression_base.py +++ b/AutoSklearn/components/regression_base.py @@ -38,7 +38,7 @@ def get_properties(): raise NotImplementedError() @staticmethod - def get_hyperparameter_search_space(): + def get_hyperparameter_search_space(dataset_properties=None): """Return the configuration space of this regression algorithm. Returns diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py index d5e4a0df90..7391167bf3 100644 --- a/AutoSklearn/regression.py +++ b/AutoSklearn/regression.py @@ -90,7 +90,7 @@ def get_hyperparameter_search_space(include_estimators=None, exclude_estimators=None, include_preprocessors=None, exclude_preprocessors=None, - sparse=False): + dataset_properties=None): """Return the configuration space for the CASH problem. Parameters @@ -126,7 +126,6 @@ def get_hyperparameter_search_space(include_estimators=None, cs : HPOlibConfigSpace.configuration_space.Configuration The configuration space describing the AutoSklearnClassifier. """ - if include_estimators is not None and exclude_estimators is not None: raise ValueError("The arguments include_estimators and " "exclude_regressors cannot be used together.") @@ -135,6 +134,9 @@ def get_hyperparameter_search_space(include_estimators=None, raise ValueError("The arguments include_preprocessors and " "exclude_preprocessors cannot be used together.") + if dataset_properties is None or not isinstance(dataset_properties, dict): + dataset_properties = dict() + # Compile a list of all estimator objects for this problem available_regressors = AutoSklearnRegressor._get_estimator_components() @@ -149,8 +151,9 @@ def get_hyperparameter_search_space(include_estimators=None, elif exclude_estimators is not None and \ name in exclude_estimators: continue - if sparse is True and available_regressors[name]. \ - get_properties()['handles_sparse'] is False: + if dataset_properties.get('sparse') is True and \ + available_regressors[name].get_properties()[ + 'handles_sparse'] is False: continue regressors[name] = available_regressors[name] @@ -185,8 +188,9 @@ def get_hyperparameter_search_space(include_estimators=None, name in exclude_preprocessors: continue - if sparse is True and available_preprocessors[name]. \ - get_properties()['handles_sparse'] is False: + if dataset_properties.get('sparse') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_sparse'] is False: continue elif available_preprocessors[name]. \ get_properties()['handles_regression'] is False: @@ -198,8 +202,8 @@ def get_hyperparameter_search_space(include_estimators=None, configuration_space = AutoSklearnBaseEstimator \ ._get_hyperparameter_search_space( AutoSklearnRegressor._get_estimator_hyperparameter_name(), - regressors, preprocessors, - AutoSklearnRegressor._pipeline, regressor_default) + regressor_default, regressors, preprocessors, dataset_properties, + AutoSklearnRegressor._pipeline, ) # And now add forbidden parameter configurations which would take too # long diff --git a/tests/test_classification.py b/tests/test_classification.py index 2dae3707ee..d751420b4b 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -80,31 +80,33 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): def test_get_hyperparameter_search_space_dataset_properties(self): full_cs = AutoSklearnClassifier.get_hyperparameter_search_space() cs_mc = AutoSklearnClassifier.get_hyperparameter_search_space( - multiclass=True) + dataset_properties={'multiclass': True}) self.assertEqual(full_cs, cs_mc) cs_ml = AutoSklearnClassifier.get_hyperparameter_search_space( - multilabel=True) + dataset_properties={'multilabel': True}) self.assertNotIn('k_nearest_neighbors', str(cs_ml)) self.assertNotIn('liblinear', str(cs_ml)) self.assertNotIn('libsvm_svc', str(cs_ml)) self.assertNotIn('sgd', str(cs_ml)) cs_sp = AutoSklearnClassifier.get_hyperparameter_search_space( - sparse=True) + dataset_properties={'sparse': True}) self.assertNotIn('extra_trees', str(cs_sp)) self.assertNotIn('gradient_boosting', str(cs_sp)) self.assertNotIn('random_forest', str(cs_sp)) cs_mc_ml = AutoSklearnClassifier.get_hyperparameter_search_space( - multiclass=True, multilabel=True) + dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) self.assertRaisesRegexp(ValueError, "No classifier to build a configuration space " "for...", AutoSklearnClassifier. get_hyperparameter_search_space, - multiclass=True, multilabel=True, sparse=True) + dataset_properties={'multilabel': True, + 'multiclass': True, + 'sparse': True}) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From 07bda268f9a9c2f41bf4adda8209adf5b4f8084b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 15 Jan 2015 12:42:58 +0100 Subject: [PATCH 110/352] Add select_Percentile as feature selection method --- .../preprocessing/select_percentile.py | 16 +++++ .../select_percentile_classification.py | 69 +++++++++++++++++++ .../select_percentile_regression.py | 61 ++++++++++++++++ .../test_select_percentile_classification.py | 19 +++++ .../test_select_percentile_regression.py | 12 ++++ 5 files changed, 177 insertions(+) create mode 100644 AutoSklearn/components/preprocessing/select_percentile.py create mode 100644 AutoSklearn/components/preprocessing/select_percentile_classification.py create mode 100644 AutoSklearn/components/preprocessing/select_percentile_regression.py create mode 100644 tests/components/preprocessing/test_select_percentile_classification.py create mode 100644 tests/components/preprocessing/test_select_percentile_regression.py diff --git a/AutoSklearn/components/preprocessing/select_percentile.py b/AutoSklearn/components/preprocessing/select_percentile.py new file mode 100644 index 0000000000..8ee52702b5 --- /dev/null +++ b/AutoSklearn/components/preprocessing/select_percentile.py @@ -0,0 +1,16 @@ +import sklearn.feature_selection + + +class SelectPercentileBase(object): + + def fit(self, X, Y): + self.preprocessor = sklearn.feature_selection.SelectPercentile( + score_func=self.score_func, + percentile=self.percentile) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) diff --git a/AutoSklearn/components/preprocessing/select_percentile_classification.py b/AutoSklearn/components/preprocessing/select_percentile_classification.py new file mode 100644 index 0000000000..a4a0798388 --- /dev/null +++ b/AutoSklearn/components/preprocessing/select_percentile_classification.py @@ -0,0 +1,69 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, CategoricalHyperparameter, Constant + +import sklearn.feature_selection + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from select_percentile import SelectPercentileBase + + +class SelectPercentileClassification(SelectPercentileBase, AutoSklearnPreprocessingAlgorithm): + + def __init__(self, percentile, score_func="chi2", random_state=None): + """ Parameters: + random state : ignored + + score_func : callable, Function taking two arrays X and y, and + returning a pair of arrays (scores, pvalues). + """ + self.random_state = random_state # We don't use this + self.percentile = int(float(percentile)) + if score_func == "chi2": + self.score_func = sklearn.feature_selection.chi2 + elif score_func == "f_classif": + self.score_func = sklearn.feature_selection.f_classif + else: + raise ValueError("score_func must be in ('chi2, 'f_classif'), " + "but is: %s" % score_func) + + + @staticmethod + def get_properties(): + return {'shortname': 'SPC', + 'name': 'Select Percentile Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + percentile = UniformFloatHyperparameter( + name="percentile", lower=10, upper=90, default=50) + + score_func = CategoricalHyperparameter( + name="score_func", choices=["chi2", "f_classif"], default="chi2") + if dataset_properties is not None: + # Chi2 can handle sparse data, so we respect this + if 'sparse' in dataset_properties and dataset_properties['sparse']: + score_func = Constant( + name="score_func", value="chi2") + + cs = ConfigurationSpace() + cs.add_hyperparameter(percentile) + cs.add_hyperparameter(score_func) + + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name + diff --git a/AutoSklearn/components/preprocessing/select_percentile_regression.py b/AutoSklearn/components/preprocessing/select_percentile_regression.py new file mode 100644 index 0000000000..895a2fdd3f --- /dev/null +++ b/AutoSklearn/components/preprocessing/select_percentile_regression.py @@ -0,0 +1,61 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, UnParametrizedHyperparameter + +import sklearn.feature_selection + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from select_percentile import SelectPercentileBase + + +class SelectPercentileRegression(SelectPercentileBase, AutoSklearnPreprocessingAlgorithm): + + def __init__(self, percentile, score_func="f_classif", random_state=None): + """ Parameters: + random state : ignored + + score_func : callable, Function taking two arrays X and y, and + returning a pair of arrays (scores, pvalues). + """ + + self.random_state = random_state # We don't use this + self.percentile = int(float(percentile)) + if score_func == "f_regression": + self.score_func = sklearn.feature_selection.f_regression + else: + raise ValueError("Don't know this scoring function: %s" % score_func) + + + @staticmethod + def get_properties(): + return {'shortname': 'SPR', + 'name': 'Select Percentile Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + percentile = UniformFloatHyperparameter( + "percentile", lower=10, upper=90, default=50) + + score_func = UnParametrizedHyperparameter( + name="score_func", value="f_regression") + + cs = ConfigurationSpace() + cs.add_hyperparameter(percentile) + cs.add_hyperparameter(score_func) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name + diff --git a/tests/components/preprocessing/test_select_percentile_classification.py b/tests/components/preprocessing/test_select_percentile_classification.py new file mode 100644 index 0000000000..69a7b58c3f --- /dev/null +++ b/tests/components/preprocessing/test_select_percentile_classification.py @@ -0,0 +1,19 @@ +import unittest + +import scipy.sparse + +from AutoSklearn.components.preprocessing.select_percentile_classification import SelectPercentileClassification +from AutoSklearn.util import _test_preprocessing + + +class SelectPercentileClassificationTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(SelectPercentileClassification) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + self.assertFalse((transformation == 0).all()) + + transformation, original = _test_preprocessing(SelectPercentileClassification, make_sparse=True) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) diff --git a/tests/components/preprocessing/test_select_percentile_regression.py b/tests/components/preprocessing/test_select_percentile_regression.py new file mode 100644 index 0000000000..46531e692a --- /dev/null +++ b/tests/components/preprocessing/test_select_percentile_regression.py @@ -0,0 +1,12 @@ +import unittest + +from AutoSklearn.components.preprocessing.select_percentile_regression import SelectPercentileRegression +from AutoSklearn.util import _test_preprocessing + + +class SelectPercentileRegressionTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(dataset="boston", Preprocessor=SelectPercentileRegression) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + self.assertFalse((transformation == 0).all()) From 26b696e259ece3a89bc0662a4f36ff6fd72e5c1c Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 15 Jan 2015 12:43:35 +0100 Subject: [PATCH 111/352] fix typo --- tests/components/preprocessing/test_imputation.py | 2 +- tests/components/preprocessing/test_kitchen_sinks.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index e1a4688132..c2cb9707e6 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -6,7 +6,7 @@ from AutoSklearn.util import _test_preprocessing -class LibLinearComponentTest(unittest.TestCase): +class ImputationTest(unittest.TestCase): def test_default_configuration(self): transformations = [] for i in range(10): diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/preprocessing/test_kitchen_sinks.py index abb30fd052..ea441a41f1 100644 --- a/tests/components/preprocessing/test_kitchen_sinks.py +++ b/tests/components/preprocessing/test_kitchen_sinks.py @@ -4,7 +4,7 @@ from AutoSklearn.util import _test_preprocessing -class PCAComponentTest(unittest.TestCase): +class KitchenSinkComponent(unittest.TestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(RandomKitchenSinks) self.assertEqual(transformation.shape[0], original.shape[0]) From c20d711a0e40f8a6485efd046ecdc27134d96825 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Thu, 15 Jan 2015 12:43:54 +0100 Subject: [PATCH 112/352] adjust #hyperparameter --- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index d751420b4b..e60ea6c253 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -55,7 +55,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(67, len(hyperparameters)) + self.assertEqual(69, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_regression.py b/tests/test_regression.py index d5bf125f38..e3444fc87d 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(26, len(hyperparameters)) + self.assertEqual(28, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 2c5838d0377e945beb114f3cd4c119eb4444e659 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 15 Jan 2015 14:19:05 +0100 Subject: [PATCH 113/352] Update information about implemented models --- AutoSklearn/util.py | 43 ++++---------------- misc/classifiers.csv | 86 +++++++++++++++++++-------------------- misc/regressors.csv | 92 +++++++++++++++++++++--------------------- misc/transformers.csv | 94 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 190 insertions(+), 125 deletions(-) create mode 100644 misc/transformers.csv diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index c899373598..c95151972f 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -10,7 +10,7 @@ import sklearn.datasets -def find_sklearn_classifiers(): +def find_sklearn_classes(class_): classifiers = set() all_subdirectories = [] sklearn_path = sklearn.__path__[0] @@ -33,42 +33,12 @@ def find_sklearn_classifiers(): for member_name, obj in inspect.getmembers(pkg): if inspect.isclass(obj) and \ - issubclass(obj, sklearn.base.ClassifierMixin): + issubclass(obj, class_): classifier = obj - print member_name, obj - classifiers.add(classifier) - - print classifiers - - -def find_sklearn_regressor(): - classifiers = set() - all_subdirectories = [] - sklearn_path = sklearn.__path__[0] - for root, dirs, files in os.walk(sklearn_path): - all_subdirectories.append(root) - - for module_loader, module_name, ispkg in \ - pkgutil.iter_modules(all_subdirectories): - - # Work around some issues... - if module_name in ["hmm", "mixture"]: - print "Skipping %s" % module_name - continue - - module_file = module_loader.__dict__["path"] - sklearn_module = module_file.replace(sklearn_path, "").replace("/", ".") - full_module_name = "sklearn" + sklearn_module + "." + module_name - - pkg = importlib.import_module(full_module_name) - - for member_name, obj in inspect.getmembers(pkg): - if inspect.isclass(obj) and \ - issubclass(obj, sklearn.base.RegressorMixin): - classifier = obj - print member_name, obj + # print member_name, obj classifiers.add(classifier) + print print classifiers @@ -141,5 +111,6 @@ def _test_regressor(Regressor, dataset='diabetes'): if __name__ == "__main__": - find_sklearn_classifiers() - find_sklearn_regressor() \ No newline at end of file + find_sklearn_classes(sklearn.base.ClassifierMixin) + find_sklearn_classes(sklearn.base.RegressorMixin) + find_sklearn_classes(sklearn.base.TransformerMixin) \ No newline at end of file diff --git a/misc/classifiers.csv b/misc/classifiers.csv index cecd71df3b..9e03fdc8f8 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,43 +1,43 @@ -Name,class,added,comment -,,False,Mixin class which adds no functionality except the score function -,,False,Outlier detection -,,False,Please read the module name;) -,,False,This can blow up the configuration space, because we need to define a configured base object. Maybe consider later. -,ExtraTreesClassifier,,True, -,,False,Scikit-learn source code says: This class should not be used directly -RandomForestClassifier,,True, -GradientBoostingClassifier,,True, -,,, -,,, -,,False,Mixin but no full model -,,, -,,, -,,, -,,, -,,, -,,False,This class has abstract methods -StochasticGradientDescentClassifier,,True, -,,False,This classifier is in a test module -,,False,This classifier is in a test module -,,False,Is a meta-estimator -,,False,Is a meta-estimator -,,False,Is a meta-estimator -,,, -,,False,Abstract base class for naive Bayes estimators -,,False,Abstract base class for naive Bayes on discrete/categorical data -,,, -,,, -KnearestNeighborsClassifier,,True, -,,, -,,, -,,, -,,False,semi-supervised learning -,,False,semi-supervised learning -,,False,semi-supervised learning -,,False,ABC for LibSVM-based classifiers -LinearSVC,,True, -,,, -SVC,,True, -,,False,This classifier is in a test module -,,, -,,, +class,added,comment +,False,Mixin class which adds no functionality except the score function +,False,Outlier detection +,False,Please read the module name +,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. +,True, +,False,Scikit-learn source code says: This class should not be used directly +,True, +,True, +,FALSE,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. +,, +,False,Mixin but no full model +,, +,, +,, +,, +,, +,False,This class has abstract methods +,True, +,False,This classifier is in a test module +,False,This classifier is in a test module +,False,Is a meta-estimator +,False,Is a meta-estimator +,False,Is a meta-estimator +,False,Abstract base class for naive Bayes estimators +,False,Abstract base class for naive Bayes on discrete/categorical data +,, +,, +,, +,True, +,,Can crash when there is no neighbour within the radius +,, +,, +,False,semi-supervised learning +,False,semi-supervised learning +,False,semi-supervised learning +,False,ABC for LibSVM-based classifiers +,True, +,, +,True, +,False,This classifier is in a test module +,FALSE,Rfs are considered better (and are most likely faster to train) +,FALSE,ExtraTreeForests are considered better diff --git a/misc/regressors.csv b/misc/regressors.csv index db52d29cef..7ee65254e3 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -1,46 +1,46 @@ -Name,class,added,comment -,,, -,,, -RidgeRegression,,True,Check range for alpha -,,, -WeDoNotAddThis,,False,we already have this method -WeDoNotAddThis,,False,See module name -,,,Crashes when predicting a training input and weighted distances -Preprocessing,,False,Preprocessing -,,, -Multitask,,False,MultiTask -Preprocessing,,False,Preprocessing -,,, -,,, -WeDoNotAddThis,,False,We already have this method -Support_vector_regression,,True,Check searchspace -,,, -,,, -,,,Can crash when there is no neighbour within the radius -WeDoNotAddThis,,False,BaseClass -,,, -WeDoNotAddThis,,False,We alreday have this method -MultiTask,,False,MultiTask -WeDoNotAddThis,,False,We already have this method -MultiTask,,False,MultiTask -,,, -,,, -WeDoNotAddThis,,False, We already have this method -,,, -,,, -Preprocessing,,False,Preprocessing -,,, -,,, -WeDoNotAddThis,,False,We already have this method -WeDoNotAddThis,,False,We alreday have this method -,,, -,,, -,,, -Gradient_boosting,,True, -,,, -,,, -WeDoNotAddThis,,False,We already have this method -,,,Crashes when getting two similar inputs -,,, -RandomForest,,True, -Preprocessing,,False,Preprocessing +class,added,comment +,False,BaseClass +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,See module name +,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. +,, +,, +,True, +,True, +,, +,,Crashes when getting two similar inputs +,, +,, +,, +,, +,, +,False, +,, +,False,We want to perform CV ourselves +,False,MultiTask +,False,We want to perform CV ourselves +,False,MultiTask +,False,MultiTask +,, +,False,We want to perform CV ourselves +,, +,False,We want to perform CV ourselves +,False,We want to perform CV ourselves +,, +,False,We want to perform CV ourselves +,, +,, +,True,Check range for alpha +,False,We want to perform CV ourselves +,, +,, +,FALSE,This regressor is inside a test module +,,Crashes when predicting a training input and weighted distances +,,Can crash when there is no neighbour within the radius +,, +,True,Check searchspace +,FALSE,Rfs are considered better (and are most likely faster to train) +,FALSE,ExtraTreeForests are considered better diff --git a/misc/transformers.csv b/misc/transformers.csv new file mode 100644 index 0000000000..380df7bad0 --- /dev/null +++ b/misc/transformers.csv @@ -0,0 +1,94 @@ +class,added,comment +,FALSE,BaseClass +,FALSE,Mixin class for feature agglomeration. +,, +,, +,, +,, +,, +,,Base class; https://www.stat.washington.edu/research/reports/2000/tr371.pdf +,, +,, +,, +,, +,, +,,What is the difference to Tobis implementation? +,FALSE,Mixin class for sparse coding +,, +,, +,, +,, +,, +,TRUE, +,, +,, +,, +,, +,, +,FALSE,Base class +,FALSE,Prefer Forests +,FALSE,Prefer Forests +,FALSE,Base class +,FALSE,Base class +,, +,, +,, +,FALSE,Base class +,, +,, +,FALSE,Similar to 1HotEncoding +,FALSE,Useful when working with strings +,, +,FALSE,subclass of TransformerMixin +,FALSE,subclass of TransformerMixin +,FALSE,"Metaclass, can blow up the configuration space" +,FALSE,"Metaclass, with cross validation" +,, +,FALSE,Base class +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,, +,FALSE,Base class +,, +,, +,, +,, +,, +,, +,, +,, +,, +,FALSE,"Right now, we do not have different feature sources." +,FALSE,"Right now, we have no need to binarize data" +,FALSE,"Right now, we have no need to center a kernel" +,TRUE, +,, +,TRUE, +,, +,TRUE, +,TRUE, +,FALSE,"Right now, we have 1HotEncoding" +,FALSE,This should be done before passing data to scikit-learn and thus not configured. +,FALSE,… +,, +,, +,, +,, +,FALSE,Is in a test package +,FALSE,Base class +,FALSE,Use forests +,FALSE,Use forests +,FALSE,Use forests +,FALSE,Use forests From 25c3caab375186daf3443552d9d9e4809b8d87d6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 15 Jan 2015 17:15:46 +0100 Subject: [PATCH 114/352] Clean up calls to parent classes in AutoSklearnEstimators --- AutoSklearn/base.py | 4 ++-- AutoSklearn/classification.py | 28 +++++++++++++++++----------- AutoSklearn/regression.py | 12 ++++++------ 3 files changed, 25 insertions(+), 19 deletions(-) diff --git a/AutoSklearn/base.py b/AutoSklearn/base.py index cef2dfd394..ba6be1a66b 100644 --- a/AutoSklearn/base.py +++ b/AutoSklearn/base.py @@ -222,8 +222,8 @@ def predict(self, X): self._validate_input_X(X) return self._pipeline.predict(X) - @staticmethod - def _get_hyperparameter_search_space(estimator_name, + @classmethod + def get_hyperparameter_search_space(cls, estimator_name, default_estimator, estimator_components, preprocessor_components, diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index bcf75d586e..92af0f5a4d 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -1,4 +1,3 @@ -import copy from itertools import product from sklearn.base import ClassifierMixin @@ -54,8 +53,6 @@ class AutoSklearnClassifier(ClassifierMixin, AutoSklearnBaseEstimator): -------- """ - _pipeline = ["imputation", "rescaling", "__preprocessor__", - "__estimator__"] def predict_proba(self, X): """predict_proba. @@ -75,13 +72,18 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) - @staticmethod - def get_hyperparameter_search_space(include_estimators=None, + @classmethod + def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, include_preprocessors=None, exclude_preprocessors=None, dataset_properties=None): + print cls + print include_estimators + print exclude_estimators + print exclude_preprocessors + if include_estimators is not None and exclude_estimators is not None: raise ValueError("The arguments include_estimators and " "exclude_estimators cannot be used together.") @@ -140,7 +142,7 @@ def get_hyperparameter_search_space(include_estimators=None, preprocessors = dict() for name in available_preprocessors: - if name in ["imputation", "rescaling"]: + if name in cls._get_pipeline(): preprocessors[name] = available_preprocessors[name] continue elif include_preprocessors is not None and \ @@ -169,11 +171,11 @@ def get_hyperparameter_search_space(include_estimators=None, preprocessors[name] = available_preprocessors[name] # Get the configuration space - configuration_space = AutoSklearnBaseEstimator\ - ._get_hyperparameter_search_space( - AutoSklearnClassifier._get_estimator_hyperparameter_name(), + configuration_space = super(AutoSklearnClassifier, cls)\ + .get_hyperparameter_search_space( + cls._get_estimator_hyperparameter_name(), classifier_default, classifiers, preprocessors, dataset_properties, - AutoSklearnClassifier._pipeline) + cls._get_pipeline()) # And now add forbidden parameter configurations which would take too # long @@ -201,4 +203,8 @@ def _get_estimator_hyperparameter_name(): @staticmethod def _get_estimator_components(): - return components.classification_components._classifiers \ No newline at end of file + return components.classification_components._classifiers + + @staticmethod + def _get_pipeline(): + return ["imputation", "rescaling", "__preprocessor__", "__estimator__"] \ No newline at end of file diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py index 7391167bf3..f73a554618 100644 --- a/AutoSklearn/regression.py +++ b/AutoSklearn/regression.py @@ -85,8 +85,8 @@ def add_model_class(self, model): """ raise NotImplementedError() - @staticmethod - def get_hyperparameter_search_space(include_estimators=None, + @classmethod + def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, include_preprocessors=None, exclude_preprocessors=None, @@ -199,11 +199,11 @@ def get_hyperparameter_search_space(include_estimators=None, preprocessors[name] = available_preprocessors[name] # Get the configuration space - configuration_space = AutoSklearnBaseEstimator \ - ._get_hyperparameter_search_space( - AutoSklearnRegressor._get_estimator_hyperparameter_name(), + configuration_space = super(AutoSklearnRegressor, cls).\ + get_hyperparameter_search_space( + cls._get_estimator_hyperparameter_name(), regressor_default, regressors, preprocessors, dataset_properties, - AutoSklearnRegressor._pipeline, ) + cls._pipeline, ) # And now add forbidden parameter configurations which would take too # long From ffb1a9db29cf29b8117f2ccea52954316f323f05 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 15 Jan 2015 17:16:54 +0100 Subject: [PATCH 115/352] Add AutoSklearnTextClassification skeleton --- AutoSklearn/textclassification.py | 36 +++++++++++++++++++++++++++++++ tests/test_classification.py | 2 ++ tests/test_textclassification.py | 24 +++++++++++++++++++++ 3 files changed, 62 insertions(+) create mode 100644 AutoSklearn/textclassification.py create mode 100644 tests/test_textclassification.py diff --git a/AutoSklearn/textclassification.py b/AutoSklearn/textclassification.py new file mode 100644 index 0000000000..617f1cdab0 --- /dev/null +++ b/AutoSklearn/textclassification.py @@ -0,0 +1,36 @@ +from .classification import AutoSklearnClassifier + + +class AutoSklearnTextClassifier(AutoSklearnClassifier): + @classmethod + def get_hyperparameter_search_space(cls, include_estimators=None, + exclude_estimators=None, + include_preprocessors=None, + exclude_preprocessors=None, + dataset_properties=None): + if include_preprocessors is None: + if exclude_preprocessors is None: + exclude_preprocessors = ["rescaling"] + elif isinstance(exclude_preprocessors, list): + exclude_preprocessors.append(exclude_preprocessors) + else: + raise TypeError() + + # @Stefan: you can exclude classifiers and preprocessing methods here + # From here: http://blog.devzero.com/2013/01/28/how-to-override-a-class-method-in-python/ + cs = super(AutoSklearnTextClassifier, cls).\ + get_hyperparameter_search_space( + include_estimators=include_estimators, + exclude_estimators=exclude_estimators, + include_preprocessors=include_preprocessors, + exclude_preprocessors=exclude_preprocessors, + dataset_properties=dataset_properties + ) + + return cs + + @staticmethod + def _get_pipeline(): + # TODO @Stefan: you probably want to add row normalization after the + # preprocessing step + return ["imputation", "__preprocessor__", "__estimator__"] \ No newline at end of file diff --git a/tests/test_classification.py b/tests/test_classification.py index e60ea6c253..d295cc6ed4 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -56,6 +56,8 @@ def test_get_hyperparameter_search_space(self): conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() self.assertEqual(69, len(hyperparameters)) + # The four parameters which are always active are classifier, + # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py new file mode 100644 index 0000000000..873a1861b3 --- /dev/null +++ b/tests/test_textclassification.py @@ -0,0 +1,24 @@ +import unittest + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from AutoSklearn.textclassification import AutoSklearnTextClassifier + + +class TextClassificationTest(unittest.TestCase): + def test_get_hyperparameter_search_space(self): + cs = AutoSklearnTextClassifier.get_hyperparameter_search_space() + self.assertIsInstance(cs, ConfigurationSpace) + conditions = cs.get_conditions() + hyperparameters = cs.get_hyperparameters() + self.assertEqual(68, len(hyperparameters)) + # The three parameters which are always active are classifier, + # preprocessor and imputation strategy + self.assertEqual(len(hyperparameters) - 3, len(conditions)) + self.assertNotIn("rescaling", cs.get_hyperparameter( + "preprocessor").choices) + self.assertRaisesRegexp(KeyError, "Hyperparameter " + "'rescaling:strategy' does not " + "exist in this configuration " + "space.", cs.get_hyperparameter, + "rescaling:strategy") \ No newline at end of file From e8bdb03e289c94a408f9f68f4e2b4878c94e0151 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 15 Jan 2015 17:58:12 +0100 Subject: [PATCH 116/352] Add forbidden configurations for select_percentile_classification --- AutoSklearn/base.py | 13 +++++++++++++ AutoSklearn/classification.py | 5 ----- .../select_percentile_classification.py | 2 +- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/AutoSklearn/base.py b/AutoSklearn/base.py index ba6be1a66b..dc1106e12f 100644 --- a/AutoSklearn/base.py +++ b/AutoSklearn/base.py @@ -15,6 +15,8 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ InactiveHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition +from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction, \ + ForbiddenEqualsClause from . import components as components @@ -359,6 +361,17 @@ def get_hyperparameter_search_space(cls, estimator_name, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) + # Now try to add things for which we know that they don't work + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "select_percentile_classification:score_func"), "chi2"), + ForbiddenEqualsClause(cs.get_hyperparameter( + "rescaling:strategy"), "standard") + )) + except: + pass + return cs @staticmethod diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 92af0f5a4d..33f0d99a3f 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -79,11 +79,6 @@ def get_hyperparameter_search_space(cls, include_estimators=None, exclude_preprocessors=None, dataset_properties=None): - print cls - print include_estimators - print exclude_estimators - print exclude_preprocessors - if include_estimators is not None and exclude_estimators is not None: raise ValueError("The arguments include_estimators and " "exclude_estimators cannot be used together.") diff --git a/AutoSklearn/components/preprocessing/select_percentile_classification.py b/AutoSklearn/components/preprocessing/select_percentile_classification.py index a4a0798388..70c8f00324 100644 --- a/AutoSklearn/components/preprocessing/select_percentile_classification.py +++ b/AutoSklearn/components/preprocessing/select_percentile_classification.py @@ -39,7 +39,7 @@ def get_properties(): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': True, + 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, 'preferred_dtype': None} From c9b5a8502b70c7c79e55d6d712309aff437383ba Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Fri, 16 Jan 2015 15:52:31 +0100 Subject: [PATCH 117/352] add Gaussian process regression --- .../components/regression/gaussian_process.py | 71 +++++++++++++++++++ .../regression/test_gaussian_process.py | 16 +++++ 2 files changed, 87 insertions(+) create mode 100644 AutoSklearn/components/regression/gaussian_process.py create mode 100644 tests/components/regression/test_gaussian_process.py diff --git a/AutoSklearn/components/regression/gaussian_process.py b/AutoSklearn/components/regression/gaussian_process.py new file mode 100644 index 0000000000..07d8d70ebb --- /dev/null +++ b/AutoSklearn/components/regression/gaussian_process.py @@ -0,0 +1,71 @@ +import numpy as np + +import sklearn.gaussian_process + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..regression_base import AutoSklearnRegressionAlgorithm + + +class GaussianProcess(AutoSklearnRegressionAlgorithm): + def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, + tol=0.001, optimizer='fmin_cobyla', random_state=None): + self.nugget = float(nugget) + self.thetaL = float(thetaL) + self.thetaU = float(thetaU) + self.normalize = normalize + self.copy_X = copy_X + self.optimizer = optimizer + # We ignore it + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + # Instanciate a Gaussian Process model + self.estimator = sklearn.gaussian_process.GaussianProcess( + corr='squared_exponential', + theta0=np.ones(X.shape[1]) * 1e-1, + thetaL=np.ones(X.shape[1]) * self.thetaL, + thetaU=np.ones(X.shape[1]) * self.thetaU, + nugget=self.nugget) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'GP', + 'name': 'Gaussian Process', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': True, + 'is_deterministic': True, + 'handles_sparse': False, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + nugget = UniformFloatHyperparameter( + name="nugget", lower=0.0001, upper=10, default=0.1, log=True) + thetaL = UniformFloatHyperparameter( + name="thetaL", lower=1e-5, upper=1e-3, default=1e-4, log=True) + thetaU = UniformFloatHyperparameter( + name="thetaU", lower=0.2, upper=10, default=1.0, log=True) + + cs = ConfigurationSpace() + cs.add_hyperparameter(nugget) + cs.add_hyperparameter(thetaL) + cs.add_hyperparameter(thetaU) + return cs diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py new file mode 100644 index 0000000000..f7d311e80c --- /dev/null +++ b/tests/components/regression/test_gaussian_process.py @@ -0,0 +1,16 @@ +import unittest + +from AutoSklearn.components.regression.gaussian_process import GaussianProcess +from AutoSklearn.util import _test_regressor + +import sklearn.metrics + + +class GaussianProcessComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + + predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') + self.assertAlmostEqual(0.28868771519194569, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + From 6e6089ab553c069256f218418297736904c74d0e Mon Sep 17 00:00:00 2001 From: sfalkner Date: Mon, 19 Jan 2015 22:25:25 +0100 Subject: [PATCH 118/352] added an attempt for tidf and truncatedSVD preprocessing. Needs some review! --- AutoSklearn/components/preprocessing/tfidf.py | 67 +++++++++++++++++++ .../components/preprocessing/truncatedSVD.py | 59 ++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 AutoSklearn/components/preprocessing/tfidf.py create mode 100644 AutoSklearn/components/preprocessing/truncatedSVD.py diff --git a/AutoSklearn/components/preprocessing/tfidf.py b/AutoSklearn/components/preprocessing/tfidf.py new file mode 100644 index 0000000000..0000127cff --- /dev/null +++ b/AutoSklearn/components/preprocessing/tfidf.py @@ -0,0 +1,67 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration + + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm + +import numpy as np + + +class TFIDF(AutoSklearnPreprocessingAlgorithm): + def __init__(self, random_state=None): + # + # This is implementation is for sparse data only! It will make inplace changes to the data! + # + # TODO: Define some meaningful parameter. Maybe some thresholding or so + # Should transform return X again? + # Should transform raise a NotImplementedError? + # 'handles_multilabel'??? + self.idf = None + self.random_state = random_state + + def fit(self, X, Y): + #count the number of docmunts in which each word occurs + weights = (X>0.0).sum(axis=0) + # words that never appear have to be treated differently! + indices = np.ravel(np.where(weights == 0)[1]) + + # calculate (the log of) the inverse document frequencies + self.idf = np.array(np.log(float(X_train.shape[0])/(weights)))[0] + # words that are not in the training data get will be set to zero + self.idf[indices] = 0 + + + return self + + def transform(self, X): + if self.idf is None: + raise NotImplementedError() + X.data *= self.idf[X.indices] + return X + + @staticmethod + def get_properties(): + return {'shortname': 'TFIDF', + 'name': 'Term Frequency (times) Inverse Document Frequency', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': None, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/truncatedSVD.py b/AutoSklearn/components/preprocessing/truncatedSVD.py new file mode 100644 index 0000000000..ed5904e6d0 --- /dev/null +++ b/AutoSklearn/components/preprocessing/truncatedSVD.py @@ -0,0 +1,59 @@ +import sklearn.decomposition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration + +from HPOlibConfigSpace.hyperparameters import IntegerHyperparameter + +from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +import numpy as np + + + +class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): + def __init__(self, target_dim, random_state=None): + # TODO: fill out handles_??? + # how to set the maximum of the hyperparameter search space for target dim in a meaningful way? + self.target_dim = target_dim + self.random_state = random_state + self.preprocessor=None + + def fit(self, X, Y): + self.preprocessor = sklearn.decomposition.TruncatedSVD(min(self.target_dim, X.shape[0]), algorithm='arpack') + self.preprocessor.fit(X, Y) + + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'TSVD', + 'name': 'Truncated Singular Value Decomposition', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': None, + 'handles_classification': None, + 'handles_multiclass': None, + 'handles_multilabel': None, + 'is_deterministic': True, + 'handles_sparse': True, + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + target_dim = IntegerHyperparameter( + "target_dim", 0, 256, default=128) + cs = ConfigurationSpace() + cs.add_hyperparameter(target_dim) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "AutoSklearn %" % name From 71e50680a29e714682b6a58bee70bd45e5981123 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 20 Jan 2015 11:28:24 +0100 Subject: [PATCH 119/352] Fix bug finding the default hyperparameter in AutoSklearnClassifier --- AutoSklearn/classification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 33f0d99a3f..631f1c6f60 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -129,7 +129,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifier_default = cd_ break if classifier_default is None: - classifier_default = classifiers[0] + classifier_default = classifiers.keys()[0] # Compile a list of preprocessor for this problem available_preprocessors = \ From 2b8745b9df5002487a618b52aa95276baeb10893 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 21 Jan 2015 13:43:23 +0100 Subject: [PATCH 120/352] Deactivate text components and add comments for Stefan --- AutoSklearn/components/preprocessing/tfidf.py | 24 +++++++-------- .../components/preprocessing/truncatedSVD.py | 30 +++++++++---------- 2 files changed, 25 insertions(+), 29 deletions(-) diff --git a/AutoSklearn/components/preprocessing/tfidf.py b/AutoSklearn/components/preprocessing/tfidf.py index 0000127cff..1e5f87ea39 100644 --- a/AutoSklearn/components/preprocessing/tfidf.py +++ b/AutoSklearn/components/preprocessing/tfidf.py @@ -7,29 +7,27 @@ import numpy as np -class TFIDF(AutoSklearnPreprocessingAlgorithm): +class TFIDF(object):#AutoSklearnPreprocessingAlgorithm): def __init__(self, random_state=None): - # # This is implementation is for sparse data only! It will make inplace changes to the data! - # - # TODO: Define some meaningful parameter. Maybe some thresholding or so - # Should transform return X again? - # Should transform raise a NotImplementedError? - # 'handles_multilabel'??? + self.idf = None self.random_state = random_state - def fit(self, X, Y): - #count the number of docmunts in which each word occurs + def fit(self, X, y): + #count the number of documents in which each word occurs + # @Stefan: Is there a reason why this is called weights and not + # document_frequency? weights = (X>0.0).sum(axis=0) # words that never appear have to be treated differently! + # @Stefan: Doesn't weights == 0 yield a boolean numpy array which can + # be directly used for indexing? indices = np.ravel(np.where(weights == 0)[1]) # calculate (the log of) the inverse document frequencies - self.idf = np.array(np.log(float(X_train.shape[0])/(weights)))[0] + self.idf = np.array(np.log(float(X.shape[0])/(weights)))[0] # words that are not in the training data get will be set to zero self.idf[indices] = 0 - return self @@ -42,7 +40,7 @@ def transform(self, X): @staticmethod def get_properties(): return {'shortname': 'TFIDF', - 'name': 'Term Frequency (times) Inverse Document Frequency', + 'name': 'Term Frequency / Inverse Document Frequency', 'handles_missing_values': False, 'handles_nominal_values': False, 'handles_numerical_features': True, @@ -51,7 +49,7 @@ def get_properties(): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': None, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, # TODO find out what is best used here! diff --git a/AutoSklearn/components/preprocessing/truncatedSVD.py b/AutoSklearn/components/preprocessing/truncatedSVD.py index ed5904e6d0..8d60952b73 100644 --- a/AutoSklearn/components/preprocessing/truncatedSVD.py +++ b/AutoSklearn/components/preprocessing/truncatedSVD.py @@ -1,25 +1,23 @@ import sklearn.decomposition -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration - -from HPOlibConfigSpace.hyperparameters import IntegerHyperparameter +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm import numpy as np -class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): +class TruncatedSVD():#AutoSklearnPreprocessingAlgorithm): def __init__(self, target_dim, random_state=None): - # TODO: fill out handles_??? - # how to set the maximum of the hyperparameter search space for target dim in a meaningful way? - self.target_dim = target_dim + self.target_dim = int(target_dim) self.random_state = random_state - self.preprocessor=None + self.preprocessor = None def fit(self, X, Y): - self.preprocessor = sklearn.decomposition.TruncatedSVD(min(self.target_dim, X.shape[0]), algorithm='arpack') + target_dim = min(self.target_dim, X.shape[0]) + self.preprocessor = sklearn.decomposition.TruncatedSVD( + target_dim, algorithm='arpack') self.preprocessor.fit(X, Y) return self @@ -38,18 +36,18 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, - 'handles_regression': None, - 'handles_classification': None, - 'handles_multiclass': None, - 'handles_multilabel': None, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, 'preferred_dtype': np.float32} @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - target_dim = IntegerHyperparameter( - "target_dim", 0, 256, default=128) + target_dim = UniformIntegerHyperparameter( + "target_dim", 10, 256, default=128) cs = ConfigurationSpace() cs.add_hyperparameter(target_dim) return cs From f09512c01b57b66d35a99763cab7331b8834f856 Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Fri, 23 Jan 2015 16:48:11 +0100 Subject: [PATCH 121/352] predictions are now done in batches in order to avoid memory problems --- AutoSklearn/components/regression/gaussian_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/regression/gaussian_process.py b/AutoSklearn/components/regression/gaussian_process.py index 07d8d70ebb..c47e499377 100644 --- a/AutoSklearn/components/regression/gaussian_process.py +++ b/AutoSklearn/components/regression/gaussian_process.py @@ -37,7 +37,7 @@ def fit(self, X, Y): def predict(self, X): if self.estimator is None: raise NotImplementedError - return self.estimator.predict(X) + return self.estimator.predict(X, batch_size=512) @staticmethod def get_properties(): From b79ce1893fd692599d5ba888dd9a935b8bdca2a9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Feb 2015 10:38:12 +0100 Subject: [PATCH 122/352] Prevent GPs with feature learning preprocessing --- AutoSklearn/regression.py | 2 +- tests/components/regression/test_gaussian_process.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py index f73a554618..7388f37263 100644 --- a/AutoSklearn/regression.py +++ b/AutoSklearn/regression.py @@ -209,7 +209,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # long # Combinations of tree-based models with feature learning: - regressors_ = ["random_forest", "gradient_boosting"] + regressors_ = ["random_forest", "gradient_boosting", "gaussian_process"] feature_learning_ = ["kitchen_sinks", "sparse_filtering"] for c, f in product(regressors_, feature_learning_): diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index f7d311e80c..736e71dbd7 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -11,6 +11,6 @@ def test_default_configuration(self): for i in range(10): predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') - self.assertAlmostEqual(0.28868771519194569, + self.assertAlmostEqual(0.28867320357768378, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) From c70ac81ab20068512a3e5c4b5b9f931e18c2b8ea Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Feb 2015 10:38:29 +0100 Subject: [PATCH 123/352] Fix doctests --- source/first_steps.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/source/first_steps.rst b/source/first_steps.rst index bc693fac9a..d7fac34546 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -6,7 +6,7 @@ AutoSklearn, feed it to the random search algorithm implemented by the HPOlibConfigSpace package and then train a classifier with a random configuration on the iris dataset. - >>> from AutoSklearn.autosklearn import AutoSklearnClassifier + >>> from AutoSklearn.classification import AutoSklearnClassifier >>> from HPOlibConfigSpace.random_sampler import RandomSampler >>> import sklearn.datasets >>> import sklearn.metrics @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = auto.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.92000000000000004 + 0.81999999999999995 From 5e66e20ebbdcca46932c84fb68949211e8c0d834 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Feb 2015 10:39:16 +0100 Subject: [PATCH 124/352] Add truncated SVD; plus add flag handles_dense --- AutoSklearn/classification.py | 4 ++++ AutoSklearn/components/preprocessing/imputation.py | 1 + AutoSklearn/components/preprocessing/kitchen_sinks.py | 1 + AutoSklearn/components/preprocessing/pca.py | 2 +- AutoSklearn/components/preprocessing/rescaling.py | 1 + .../preprocessing/select_percentile_classification.py | 1 + .../components/preprocessing/select_percentile_regression.py | 1 + AutoSklearn/components/preprocessing/sparse_filtering.py | 1 + AutoSklearn/components/preprocessing/tfidf.py | 1 + AutoSklearn/components/preprocessing/truncatedSVD.py | 3 ++- AutoSklearn/regression.py | 4 ++++ tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 14 files changed, 21 insertions(+), 5 deletions(-) diff --git a/AutoSklearn/classification.py b/AutoSklearn/classification.py index 631f1c6f60..59aab3541e 100644 --- a/AutoSklearn/classification.py +++ b/AutoSklearn/classification.py @@ -162,6 +162,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, available_preprocessors[name].get_properties()[ 'handles_sparse'] is False: continue + elif dataset_properties.get('sparse') is False and \ + available_preprocessors[name].get_properties()[ + 'handles_dense'] is False: + continue preprocessors[name] = available_preprocessors[name] diff --git a/AutoSklearn/components/preprocessing/imputation.py b/AutoSklearn/components/preprocessing/imputation.py index fbe600a6d7..fb30c69d38 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/AutoSklearn/components/preprocessing/imputation.py @@ -38,6 +38,7 @@ def get_properties(): 'is_deterministic': True, # TODO find out of this is right! 'handles_sparse': True, + 'handles_dense': True, 'preferred_dtype': None} @staticmethod diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/AutoSklearn/components/preprocessing/kitchen_sinks.py index 6a32ef46aa..d27971b079 100644 --- a/AutoSklearn/components/preprocessing/kitchen_sinks.py +++ b/AutoSklearn/components/preprocessing/kitchen_sinks.py @@ -45,6 +45,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, + 'handles_dense': True, 'preferred_dtype': None} @staticmethod diff --git a/AutoSklearn/components/preprocessing/pca.py b/AutoSklearn/components/preprocessing/pca.py index 812e8edfdc..f7a83bb6e2 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/AutoSklearn/components/preprocessing/pca.py @@ -57,8 +57,8 @@ def get_properties(): 'handles_multilabel': True, # TODO document that we have to be very careful 'is_deterministic': False, - # TODO find out of this is right! 'handles_sparse': False, + 'handles_dense': True, # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/AutoSklearn/components/preprocessing/rescaling.py index 74bc5cbd6c..200ab09d48 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/AutoSklearn/components/preprocessing/rescaling.py @@ -42,6 +42,7 @@ def get_properties(): 'is_deterministic': True, # TODO find out of this is right! 'handles_sparse': True, + 'handles_dense': True, # Add something here... 'preferred_dtype': None} diff --git a/AutoSklearn/components/preprocessing/select_percentile_classification.py b/AutoSklearn/components/preprocessing/select_percentile_classification.py index 70c8f00324..9c33b7ddc1 100644 --- a/AutoSklearn/components/preprocessing/select_percentile_classification.py +++ b/AutoSklearn/components/preprocessing/select_percentile_classification.py @@ -42,6 +42,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'handles_dense': True, 'preferred_dtype': None} @staticmethod diff --git a/AutoSklearn/components/preprocessing/select_percentile_regression.py b/AutoSklearn/components/preprocessing/select_percentile_regression.py index 895a2fdd3f..749f86d8a9 100644 --- a/AutoSklearn/components/preprocessing/select_percentile_regression.py +++ b/AutoSklearn/components/preprocessing/select_percentile_regression.py @@ -40,6 +40,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'handles_dense': True, 'preferred_dtype': None} @staticmethod diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/AutoSklearn/components/preprocessing/sparse_filtering.py index 8e9adab40c..314c51e0b9 100644 --- a/AutoSklearn/components/preprocessing/sparse_filtering.py +++ b/AutoSklearn/components/preprocessing/sparse_filtering.py @@ -37,6 +37,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': False, 'handles_sparse': False, + 'handles_dense': True, 'preferred_dtype': None} diff --git a/AutoSklearn/components/preprocessing/tfidf.py b/AutoSklearn/components/preprocessing/tfidf.py index 1e5f87ea39..833f29457d 100644 --- a/AutoSklearn/components/preprocessing/tfidf.py +++ b/AutoSklearn/components/preprocessing/tfidf.py @@ -52,6 +52,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, + 'handles_dense': True, # TODO find out what is best used here! 'preferred_dtype': np.float32} diff --git a/AutoSklearn/components/preprocessing/truncatedSVD.py b/AutoSklearn/components/preprocessing/truncatedSVD.py index 8d60952b73..1f437964b8 100644 --- a/AutoSklearn/components/preprocessing/truncatedSVD.py +++ b/AutoSklearn/components/preprocessing/truncatedSVD.py @@ -8,7 +8,7 @@ -class TruncatedSVD():#AutoSklearnPreprocessingAlgorithm): +class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): def __init__(self, target_dim, random_state=None): self.target_dim = int(target_dim) self.random_state = random_state @@ -42,6 +42,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, + 'handles_dense': False, 'preferred_dtype': np.float32} @staticmethod diff --git a/AutoSklearn/regression.py b/AutoSklearn/regression.py index 7388f37263..1672c78703 100644 --- a/AutoSklearn/regression.py +++ b/AutoSklearn/regression.py @@ -192,6 +192,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, available_preprocessors[name].get_properties()[ 'handles_sparse'] is False: continue + elif dataset_properties.get('sparse') is False and \ + available_preprocessors[name].get_properties()[ + 'handles_dense'] is False: + continue elif available_preprocessors[name]. \ get_properties()['handles_regression'] is False: continue diff --git a/tests/test_classification.py b/tests/test_classification.py index d295cc6ed4..a5580b97dc 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -55,7 +55,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(69, len(hyperparameters)) + self.assertEqual(70, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index e3444fc87d..a565c4ff59 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(28, len(hyperparameters)) + self.assertEqual(32, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 873a1861b3..17bff4ed16 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(68, len(hyperparameters)) + self.assertEqual(69, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 1d172febfc7bd0233c11e47b637daf874102217c Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 13 Feb 2015 14:17:53 +0100 Subject: [PATCH 125/352] add zero mean, unit variance scaling for Y data in GPs --- .../components/regression/gaussian_process.py | 16 +++++++++++++--- AutoSklearn/util.py | 8 ++++++++ .../regression/test_gaussian_process.py | 2 +- 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/AutoSklearn/components/regression/gaussian_process.py b/AutoSklearn/components/regression/gaussian_process.py index c47e499377..f4ba0b5962 100644 --- a/AutoSklearn/components/regression/gaussian_process.py +++ b/AutoSklearn/components/regression/gaussian_process.py @@ -1,6 +1,9 @@ +import copy + import numpy as np import sklearn.gaussian_process +import sklearn.preprocessing from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -22,22 +25,29 @@ def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, # We ignore it self.random_state = random_state self.estimator = None + self.scaler = None def fit(self, X, Y): # Instanciate a Gaussian Process model self.estimator = sklearn.gaussian_process.GaussianProcess( corr='squared_exponential', theta0=np.ones(X.shape[1]) * 1e-1, - thetaL=np.ones(X.shape[1]) * self.thetaL, + thetaL=np.ones(X.shape[1]) * self.thetaL, thetaU=np.ones(X.shape[1]) * self.thetaU, nugget=self.nugget) - self.estimator.fit(X, Y) + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) + self.scaler.fit(Y) + Y_scaled = self.scaler.transform(Y) + self.estimator.fit(X, Y_scaled) return self def predict(self, X): if self.estimator is None: raise NotImplementedError - return self.estimator.predict(X, batch_size=512) + if self.scaler is None: + raise NotImplementedError + Y_pred = self.estimator.predict(X, batch_size=512) + return self.scaler.inverse_transform(Y_pred) @staticmethod def get_properties(): diff --git a/AutoSklearn/util.py b/AutoSklearn/util.py index c95151972f..2e2b63ee4f 100644 --- a/AutoSklearn/util.py +++ b/AutoSklearn/util.py @@ -105,8 +105,16 @@ def _test_regressor(Regressor, dataset='diabetes'): regressor = Regressor(random_state=1, **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + # Dumb incomplete hacky test to check that we do not alter the data + X_train_hash = hash(str(X_train)) + X_test_hash = hash(str(X_test)) + Y_train_hash = hash(str(Y_train)) predictor = regressor.fit(X_train, Y_train) predictions = predictor.predict(X_test) + if X_train_hash != hash(str(X_train)) or \ + X_test_hash != hash(str(X_test)) or \ + Y_train_hash != hash(str(Y_train)): + raise ValueError("Model modified data") return predictions, Y_test diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index 736e71dbd7..9ce6f57ccd 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -11,6 +11,6 @@ def test_default_configuration(self): for i in range(10): predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') - self.assertAlmostEqual(0.28867320357768378, + self.assertAlmostEqual(0.28876614862410088, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) From 5c91d73a9f4d9d070ea6a2287a1b170f5bfea0c3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Feb 2015 14:24:46 +0100 Subject: [PATCH 126/352] Change range of thetaL parameter --- AutoSklearn/components/regression/gaussian_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AutoSklearn/components/regression/gaussian_process.py b/AutoSklearn/components/regression/gaussian_process.py index c47e499377..d1579755db 100644 --- a/AutoSklearn/components/regression/gaussian_process.py +++ b/AutoSklearn/components/regression/gaussian_process.py @@ -60,7 +60,7 @@ def get_hyperparameter_search_space(dataset_properties=None): nugget = UniformFloatHyperparameter( name="nugget", lower=0.0001, upper=10, default=0.1, log=True) thetaL = UniformFloatHyperparameter( - name="thetaL", lower=1e-5, upper=1e-3, default=1e-4, log=True) + name="thetaL", lower=1e-6, upper=1e-3, default=1e-4, log=True) thetaU = UniformFloatHyperparameter( name="thetaU", lower=0.2, upper=10, default=1.0, log=True) From 51cee3776e0be34058bd2c44b9aa7f5d3acb1ec6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 19 Feb 2015 11:14:08 +0100 Subject: [PATCH 127/352] Rename project from AutoSklearn to ParamSklearn --- Makefile | 8 ++-- {AutoSklearn => ParamSklearn}/__init__.py | 4 +- {AutoSklearn => ParamSklearn}/base.py | 8 ++-- .../classification.py | 12 ++--- .../components/__init__.py | 22 ++++----- .../components/classification/__init__.py | 4 +- .../components/classification/adaboost.py | 0 .../components/classification/extra_trees.py | 4 +- .../classification/gradient_boosting.py | 4 +- .../classification/k_nearest_neighbors.py | 4 +- .../components/classification/liblinear.py | 4 +- .../components/classification/libsvm_svc.py | 4 +- .../classification/random_forest.py | 4 +- .../components/classification/sgd.py | 6 +-- .../components/classification_base.py | 8 ++-- .../components/preprocessing/__init__.py | 4 +- .../components/preprocessing/imputation.py | 6 +-- .../components/preprocessing/kitchen_sinks.py | 6 +-- .../components/preprocessing/pca.py | 6 +-- .../components/preprocessing/rescaling.py | 6 +-- .../preprocessing/select_percentile.py | 0 .../select_percentile_classification.py | 7 +-- .../select_percentile_regression.py | 7 +-- .../preprocessing/sparse_filtering.py | 6 +-- .../components/preprocessing/tfidf.py | 6 +-- .../components/preprocessing/truncatedSVD.py | 6 +-- .../components/preprocessor_base.py | 8 ++-- .../components/regression/__init__.py | 4 +- .../components/regression/gaussian_process.py | 4 +- .../regression/gradient_boosting.py | 4 +- .../components/regression/random_forest.py | 4 +- .../components/regression/ridge_regression.py | 4 +- .../regression/support_vector_regression.py | 4 +- .../components/regression_base.py | 8 ++-- .../implementations/MinMaxScaler.py | 0 .../implementations/OneHotEncoder.py | 0 .../implementations/SparseFiltering.py | 0 .../implementations/StandardScaler.py | 0 .../implementations/__init__.py | 0 .../implementations/forest.py | 0 .../implementations/util.py | 0 {AutoSklearn => ParamSklearn}/regression.py | 18 +++---- .../textclassification.py | 6 +-- {AutoSklearn => ParamSklearn}/util.py | 0 README.md | 6 +-- make.bat | 4 +- misc/random_sampling.py | 6 +-- setup.py | 4 +- source/api.rst | 6 +-- source/components.rst | 20 ++++---- source/conf.py | 14 +++--- source/extending_AutoSklearn.rst | 4 -- source/extending_ParamSklearn.rst | 4 ++ source/first_steps.rst | 16 +++---- source/index.rst | 8 ++-- source/installation.rst | 4 +- source/introduction.rst | 12 ++--- .../classification/test_extra_trees.py | 4 +- .../classification/test_gradient_boosting.py | 4 +- .../classification/test_k_nearest_neighbor.py | 4 +- .../classification/test_liblinear.py | 4 +- .../classification/test_libsvm_svc.py | 4 +- .../classification/test_random_forest.py | 4 +- tests/components/classification/test_sgd.py | 4 +- .../preprocessing/test_imputation.py | 4 +- .../preprocessing/test_kitchen_sinks.py | 4 +- tests/components/preprocessing/test_pca.py | 4 +- .../components/preprocessing/test_scaling.py | 4 +- .../test_select_percentile_classification.py | 4 +- .../test_select_percentile_regression.py | 4 +- .../preprocessing/test_sparse_filtering.py | 4 +- .../regression/test_gaussian_process.py | 4 +- .../regression/test_gradient_boosting.py | 4 +- .../regression/test_random_forests.py | 4 +- .../regression/test_ridge_regression.py | 6 +-- .../test_support_vector_regression.py | 4 +- tests/implementations/test_OneHotEncoder.py | 2 +- tests/implementations/test_minmaxscaler.py | 4 +- .../implementations/test_sparse_filtering.py | 2 +- tests/implementations/test_standard_scaler.py | 4 +- tests/implementations/test_util.py | 2 +- tests/test_classification.py | 48 +++++++++---------- tests/test_doctests.py | 4 +- tests/test_regression.py | 44 ++++++++--------- tests/test_textclassification.py | 4 +- 85 files changed, 261 insertions(+), 259 deletions(-) rename {AutoSklearn => ParamSklearn}/__init__.py (59%) rename {AutoSklearn => ParamSklearn}/base.py (98%) rename {AutoSklearn => ParamSklearn}/classification.py (94%) rename {AutoSklearn => ParamSklearn}/components/__init__.py (54%) rename {AutoSklearn => ParamSklearn}/components/classification/__init__.py (82%) create mode 100644 ParamSklearn/components/classification/adaboost.py rename {AutoSklearn => ParamSklearn}/components/classification/extra_trees.py (98%) rename {AutoSklearn => ParamSklearn}/components/classification/gradient_boosting.py (98%) rename {AutoSklearn => ParamSklearn}/components/classification/k_nearest_neighbors.py (96%) rename {AutoSklearn => ParamSklearn}/components/classification/liblinear.py (97%) rename {AutoSklearn => ParamSklearn}/components/classification/libsvm_svc.py (97%) rename {AutoSklearn => ParamSklearn}/components/classification/random_forest.py (97%) rename {AutoSklearn => ParamSklearn}/components/classification/sgd.py (97%) rename {AutoSklearn => ParamSklearn}/components/classification_base.py (95%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/__init__.py (83%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/imputation.py (92%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/kitchen_sinks.py (92%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/pca.py (94%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/rescaling.py (92%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/select_percentile.py (100%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/select_percentile_classification.py (91%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/select_percentile_regression.py (90%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/sparse_filtering.py (92%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/tfidf.py (93%) rename {AutoSklearn => ParamSklearn}/components/preprocessing/truncatedSVD.py (91%) rename {AutoSklearn => ParamSklearn}/components/preprocessor_base.py (95%) rename {AutoSklearn => ParamSklearn}/components/regression/__init__.py (83%) rename {AutoSklearn => ParamSklearn}/components/regression/gaussian_process.py (96%) rename {AutoSklearn => ParamSklearn}/components/regression/gradient_boosting.py (98%) rename {AutoSklearn => ParamSklearn}/components/regression/random_forest.py (97%) rename {AutoSklearn => ParamSklearn}/components/regression/ridge_regression.py (95%) rename {AutoSklearn => ParamSklearn}/components/regression/support_vector_regression.py (97%) rename {AutoSklearn => ParamSklearn}/components/regression_base.py (95%) rename {AutoSklearn => ParamSklearn}/implementations/MinMaxScaler.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/OneHotEncoder.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/SparseFiltering.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/StandardScaler.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/__init__.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/forest.py (100%) rename {AutoSklearn => ParamSklearn}/implementations/util.py (100%) rename {AutoSklearn => ParamSklearn}/regression.py (93%) rename {AutoSklearn => ParamSklearn}/textclassification.py (89%) rename {AutoSklearn => ParamSklearn}/util.py (100%) delete mode 100644 source/extending_AutoSklearn.rst create mode 100644 source/extending_ParamSklearn.rst diff --git a/Makefile b/Makefile index 90b4515b7f..97be37a828 100644 --- a/Makefile +++ b/Makefile @@ -85,17 +85,17 @@ qthelp: @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/AutoSklearn.qhcp" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ParamSklearn.qhcp" @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/AutoSklearn.qhc" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ParamSklearn.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/AutoSklearn" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/AutoSklearn" + @echo "# mkdir -p $$HOME/.local/share/devhelp/ParamSklearn" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ParamSklearn" @echo "# devhelp" epub: diff --git a/AutoSklearn/__init__.py b/ParamSklearn/__init__.py similarity index 59% rename from AutoSklearn/__init__.py rename to ParamSklearn/__init__.py index c217c9ba88..0743d23c81 100644 --- a/AutoSklearn/__init__.py +++ b/ParamSklearn/__init__.py @@ -1,7 +1,7 @@ -"""AutoSklearn is a python package to solve the Combined Algorithm Selection and +"""ParamSklearn is a python package to solve the Combined Algorithm Selection and Hyperparameter Optimization problem (CASH) for the scikit-learn package. -AutoSklearn provides a configuration space spanning a huge part of the +ParamSklearn provides a configuration space spanning a huge part of the scikit-learn models. This configuration space can be searched by one of the hyperparameter optimization algorithms in HPOlib.""" diff --git a/AutoSklearn/base.py b/ParamSklearn/base.py similarity index 98% rename from AutoSklearn/base.py rename to ParamSklearn/base.py index dc1106e12f..f533f6d34b 100644 --- a/AutoSklearn/base.py +++ b/ParamSklearn/base.py @@ -4,7 +4,7 @@ import sklearn if sklearn.__version__ != "0.15.2": - raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator @@ -21,8 +21,8 @@ from . import components as components -class AutoSklearnBaseEstimator(BaseEstimator): - """Base class for all AutoSklearn task models. +class ParamSklearnBaseEstimator(BaseEstimator): + """Base class for all ParamSklearn task models. Notes ----- @@ -266,7 +266,7 @@ def get_hyperparameter_search_space(cls, estimator_name, Returns ------- cs : HPOlibConfigSpace.configuration_space.Configuration - The configuration space describing the AutoSklearnClassifier. + The configuration space describing the ParamSklearnClassifier. """ diff --git a/AutoSklearn/classification.py b/ParamSklearn/classification.py similarity index 94% rename from AutoSklearn/classification.py rename to ParamSklearn/classification.py index 59aab3541e..8b28fb106f 100644 --- a/AutoSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -5,10 +5,10 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from . import components as components -from .base import AutoSklearnBaseEstimator +from .base import ParamSklearnBaseEstimator -class AutoSklearnClassifier(ClassifierMixin, AutoSklearnBaseEstimator): +class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): """This class implements the classification task. It implements a pipeline, which includes one preprocessing step and one @@ -36,12 +36,12 @@ class AutoSklearnClassifier(ClassifierMixin, AutoSklearnBaseEstimator): ---------- _estimator : The underlying scikit-learn classification model. This variable is assigned after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + :meth:`ParamSklearn.classification.ParamSklearnClassifier.fit` method. _preprocessor : The underlying scikit-learn preprocessing algorithm. This variable is only assigned if a preprocessor is specified and after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnClassifier.fit` method. + :meth:`ParamSklearn.classification.ParamSklearnClassifier.fit` method. See also -------- @@ -91,7 +91,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, dataset_properties = dict() # Compile a list of all estimator objects for this problem - available_classifiers = AutoSklearnClassifier._get_estimator_components() + available_classifiers = ParamSklearnClassifier._get_estimator_components() classifiers = dict() for name in available_classifiers: @@ -170,7 +170,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preprocessors[name] = available_preprocessors[name] # Get the configuration space - configuration_space = super(AutoSklearnClassifier, cls)\ + configuration_space = super(ParamSklearnClassifier, cls)\ .get_hyperparameter_search_space( cls._get_estimator_hyperparameter_name(), classifier_default, classifiers, preprocessors, dataset_properties, diff --git a/AutoSklearn/components/__init__.py b/ParamSklearn/components/__init__.py similarity index 54% rename from AutoSklearn/components/__init__.py rename to ParamSklearn/components/__init__.py index 6d86e4f5d2..f146c07fb4 100644 --- a/AutoSklearn/components/__init__.py +++ b/ParamSklearn/components/__init__.py @@ -1,7 +1,7 @@ -"""AutoSklearn can be easily extended with new classification and -preprocessing methods. At import time, AutoSklearn checks the directory -``AutoSklearn/components/classification`` for classification algorithms and -``AutoSklearn/components/preprocessing`` for preprocessing algorithms. To be +"""ParamSklearn can be easily extended with new classification and +preprocessing methods. At import time, ParamSklearn checks the directory +``ParamSklearn/components/classification`` for classification algorithms and +``ParamSklearn/components/preprocessing`` for preprocessing algorithms. To be found, the algorithm must be provide a class implementing one of the given interfaces. @@ -16,21 +16,21 @@ but rather recommend to implement an algorithm in a scikit-learn compatible way (`see here `_). Such an implementation should then be put into the `implementation` directory. -and can then be easily wrapped with to become a component in AutoSklearn. +and can then be easily wrapped with to become a component in ParamSklearn. Classification ============== -The AutoSklearnClassificationAlgorithm provides an interface for -Classification Algorithms inside AutoSklearn. It provides four important +The ParamSklearnClassificationAlgorithm provides an interface for +Classification Algorithms inside ParamSklearn. It provides four important functions. Two of them, -:meth:`get_hyperparameter_search_space() ` +:meth:`get_hyperparameter_search_space() ` and -:meth:`get_properties() ` +:meth:`get_properties() ` are used to automatically create a valid configuration space. The other two, -:meth:`fit() ` and -:meth:`predict() ` +:meth:`fit() ` and +:meth:`predict() ` are an implementation of the `scikit-learn predictor API `_. Preprocessing diff --git a/AutoSklearn/components/classification/__init__.py b/ParamSklearn/components/classification/__init__.py similarity index 82% rename from AutoSklearn/components/classification/__init__.py rename to ParamSklearn/components/classification/__init__.py index 32960f493b..abb44e479b 100644 --- a/AutoSklearn/components/classification/__init__.py +++ b/ParamSklearn/components/classification/__init__.py @@ -5,7 +5,7 @@ import pkgutil import sys -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm classifier_directory = os.path.split(__file__)[0] _classifiers = {} @@ -17,7 +17,7 @@ module = module_loader.find_module(module_name).load_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and AutoSklearnClassificationAlgorithm in obj.__bases__: + if inspect.isclass(obj) and ParamSklearnClassificationAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/AutoSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py similarity index 98% rename from AutoSklearn/components/classification/extra_trees.py rename to ParamSklearn/components/classification/extra_trees.py index 57f80174e2..dc7a081bcc 100644 --- a/AutoSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -6,11 +6,11 @@ UnParametrizedHyperparameter, Constant from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm # get our own forests to replace the sklearn ones from ...implementations import forest -class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): +class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", #use_max_depth=False, diff --git a/AutoSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py similarity index 98% rename from AutoSklearn/components/classification/gradient_boosting.py rename to ParamSklearn/components/classification/gradient_boosting.py index 2affc752ed..169321d9f8 100644 --- a/AutoSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -8,10 +8,10 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm -class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): +class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, learning_rate, n_estimators, subsample, min_samples_split, min_samples_leaf, max_features, max_depth, diff --git a/AutoSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py similarity index 96% rename from AutoSklearn/components/classification/k_nearest_neighbors.py rename to ParamSklearn/components/classification/k_nearest_neighbors.py index d32fe6c1b5..b2aa9405d5 100644 --- a/AutoSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -5,10 +5,10 @@ Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm -class KNearestNeighborsClassifier(AutoSklearnClassificationAlgorithm): +class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, leaf_size=30, random_state=None): diff --git a/AutoSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear.py similarity index 97% rename from AutoSklearn/components/classification/liblinear.py rename to ParamSklearn/components/classification/liblinear.py index aa6299f21c..36293a41c1 100644 --- a/AutoSklearn/components/classification/liblinear.py +++ b/ParamSklearn/components/classification/liblinear.py @@ -7,10 +7,10 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm from ...implementations.util import softmax -class LibLinear_SVC(AutoSklearnClassificationAlgorithm): +class LibLinear_SVC(ParamSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside # TODO: maybe add dual and crammer-singer? def __init__(self, penalty, loss, dual, tol, C, multi_class, diff --git a/AutoSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py similarity index 97% rename from AutoSklearn/components/classification/libsvm_svc.py rename to ParamSklearn/components/classification/libsvm_svc.py index 45c592c0f0..84eb07da5a 100644 --- a/AutoSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -7,9 +7,9 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm -class LibSVM_SVC(AutoSklearnClassificationAlgorithm): +class LibSVM_SVC(ParamSklearnClassificationAlgorithm): def __init__(self, C, kernel, gamma, shrinking, tol, class_weight, max_iter, degree=3, coef0=0, random_state=None): self.C = C diff --git a/AutoSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py similarity index 97% rename from AutoSklearn/components/classification/random_forest.py rename to ParamSklearn/components/classification/random_forest.py index 8d4ea9cda6..bc65f244ac 100644 --- a/AutoSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm # get our own forests to replace the sklearn ones from ...implementations import forest -class RandomForest(AutoSklearnClassificationAlgorithm): +class RandomForest(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, bootstrap, max_leaf_nodes, random_state=None, n_jobs=1): diff --git a/AutoSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py similarity index 97% rename from AutoSklearn/components/classification/sgd.py rename to ParamSklearn/components/classification/sgd.py index 247cb313ec..c064d13530 100644 --- a/AutoSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -7,10 +7,10 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction -from ..classification_base import AutoSklearnClassificationAlgorithm +from ..classification_base import ParamSklearnClassificationAlgorithm from ...implementations.util import softmax -class SGD(AutoSklearnClassificationAlgorithm): +class SGD(ParamSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, learning_rate, class_weight, l1_ratio=0.15, epsilon=0.1, eta0=0.01, power_t=0.5, random_state=None): @@ -141,4 +141,4 @@ def get_hyperparameter_search_space(dataset_properties=None): return cs def __str__(self): - return "AutoSklearn StochasticGradientClassifier" + return "ParamSklearn StochasticGradientClassifier" diff --git a/AutoSklearn/components/classification_base.py b/ParamSklearn/components/classification_base.py similarity index 95% rename from AutoSklearn/components/classification_base.py rename to ParamSklearn/components/classification_base.py index cbeadce25a..d9ab97bf8c 100644 --- a/AutoSklearn/components/classification_base.py +++ b/ParamSklearn/components/classification_base.py @@ -1,9 +1,9 @@ -class AutoSklearnClassificationAlgorithm(object): +class ParamSklearnClassificationAlgorithm(object): """Provide an abstract interface for classification algorithms in - AutoSklearn. + ParamSklearn. Make a subclass of this and put it into the directory - `AutoSklearn/components/classification` to make it available.""" + `ParamSklearn/components/classification` to make it available.""" def __init__(self): self.estimator = None self.properties = None @@ -119,4 +119,4 @@ def get_estimator(self): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/__init__.py b/ParamSklearn/components/preprocessing/__init__.py similarity index 83% rename from AutoSklearn/components/preprocessing/__init__.py rename to ParamSklearn/components/preprocessing/__init__.py index ffdb628b8f..7617039858 100644 --- a/AutoSklearn/components/preprocessing/__init__.py +++ b/ParamSklearn/components/preprocessing/__init__.py @@ -5,7 +5,7 @@ import pkgutil import sys -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm preprocessors_directory = os.path.split(__file__)[0] _preprocessors = {} @@ -17,7 +17,7 @@ module = module_loader.find_module(module_name).load_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: + if inspect.isclass(obj) and ParamSklearnPreprocessingAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/AutoSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py similarity index 92% rename from AutoSklearn/components/preprocessing/imputation.py rename to ParamSklearn/components/preprocessing/imputation.py index fb30c69d38..4c2efc113f 100644 --- a/AutoSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -3,10 +3,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -class Imputation(AutoSklearnPreprocessingAlgorithm): +class Imputation(ParamSklearnPreprocessingAlgorithm): def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made (CSR matrices) self.strategy = strategy @@ -52,4 +52,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py similarity index 92% rename from AutoSklearn/components/preprocessing/kitchen_sinks.py rename to ParamSklearn/components/preprocessing/kitchen_sinks.py index d27971b079..b4c4397648 100644 --- a/AutoSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -4,9 +4,9 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -class RandomKitchenSinks(AutoSklearnPreprocessingAlgorithm): +class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): def __init__(self, gamma, n_components, random_state = None): """ Parameters: @@ -61,5 +61,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py similarity index 94% rename from AutoSklearn/components/preprocessing/pca.py rename to ParamSklearn/components/preprocessing/pca.py index f7a83bb6e2..895b234d8b 100644 --- a/AutoSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -5,10 +5,10 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -class PCA(AutoSklearnPreprocessingAlgorithm): +class PCA(ParamSklearnPreprocessingAlgorithm): def __init__(self, keep_variance, whiten, random_state=None): # TODO document that this implementation does not allow the number of # components to be specified, but rather the amount of variance to @@ -75,4 +75,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py similarity index 92% rename from AutoSklearn/components/preprocessing/rescaling.py rename to ParamSklearn/components/preprocessing/rescaling.py index 200ab09d48..f723f49092 100644 --- a/AutoSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -3,10 +3,10 @@ from ...implementations.StandardScaler import StandardScaler from ...implementations.MinMaxScaler import MinMaxScaler -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -class Rescaling(AutoSklearnPreprocessingAlgorithm): +class Rescaling(ParamSklearnPreprocessingAlgorithm): def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made self.strategy = strategy @@ -57,4 +57,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/select_percentile.py b/ParamSklearn/components/preprocessing/select_percentile.py similarity index 100% rename from AutoSklearn/components/preprocessing/select_percentile.py rename to ParamSklearn/components/preprocessing/select_percentile.py diff --git a/AutoSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py similarity index 91% rename from AutoSklearn/components/preprocessing/select_percentile_classification.py rename to ParamSklearn/components/preprocessing/select_percentile_classification.py index 9c33b7ddc1..a1d317a5d3 100644 --- a/AutoSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -3,11 +3,12 @@ import sklearn.feature_selection -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm from select_percentile import SelectPercentileBase -class SelectPercentileClassification(SelectPercentileBase, AutoSklearnPreprocessingAlgorithm): +class SelectPercentileClassification(SelectPercentileBase, + ParamSklearnPreprocessingAlgorithm): def __init__(self, percentile, score_func="chi2", random_state=None): """ Parameters: @@ -66,5 +67,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py similarity index 90% rename from AutoSklearn/components/preprocessing/select_percentile_regression.py rename to ParamSklearn/components/preprocessing/select_percentile_regression.py index 749f86d8a9..9844d813f5 100644 --- a/AutoSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -3,11 +3,12 @@ import sklearn.feature_selection -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm from select_percentile import SelectPercentileBase -class SelectPercentileRegression(SelectPercentileBase, AutoSklearnPreprocessingAlgorithm): +class SelectPercentileRegression(SelectPercentileBase, + ParamSklearnPreprocessingAlgorithm): def __init__(self, percentile, score_func="f_classif", random_state=None): """ Parameters: @@ -58,5 +59,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/sparse_filtering.py b/ParamSklearn/components/preprocessing/sparse_filtering.py similarity index 92% rename from AutoSklearn/components/preprocessing/sparse_filtering.py rename to ParamSklearn/components/preprocessing/sparse_filtering.py index 314c51e0b9..184bd4a1a8 100644 --- a/AutoSklearn/components/preprocessing/sparse_filtering.py +++ b/ParamSklearn/components/preprocessing/sparse_filtering.py @@ -2,10 +2,10 @@ Configuration from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm from ...implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl -class SparseFiltering(AutoSklearnPreprocessingAlgorithm): +class SparseFiltering(ParamSklearnPreprocessingAlgorithm): def __init__(self, N, maxiter=100, random_state=None): self.N = N @@ -55,4 +55,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/tfidf.py b/ParamSklearn/components/preprocessing/tfidf.py similarity index 93% rename from AutoSklearn/components/preprocessing/tfidf.py rename to ParamSklearn/components/preprocessing/tfidf.py index 833f29457d..ea765186fd 100644 --- a/AutoSklearn/components/preprocessing/tfidf.py +++ b/ParamSklearn/components/preprocessing/tfidf.py @@ -2,12 +2,12 @@ Configuration -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm import numpy as np -class TFIDF(object):#AutoSklearnPreprocessingAlgorithm): +class TFIDF(object):#ParamSklearnPreprocessingAlgorithm): def __init__(self, random_state=None): # This is implementation is for sparse data only! It will make inplace changes to the data! @@ -63,4 +63,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py similarity index 91% rename from AutoSklearn/components/preprocessing/truncatedSVD.py rename to ParamSklearn/components/preprocessing/truncatedSVD.py index 1f437964b8..a45456b092 100644 --- a/AutoSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -3,12 +3,12 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import AutoSklearnPreprocessingAlgorithm +from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm import numpy as np -class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): +class TruncatedSVD(ParamSklearnPreprocessingAlgorithm): def __init__(self, target_dim, random_state=None): self.target_dim = int(target_dim) self.random_state = random_state @@ -55,4 +55,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/preprocessor_base.py b/ParamSklearn/components/preprocessor_base.py similarity index 95% rename from AutoSklearn/components/preprocessor_base.py rename to ParamSklearn/components/preprocessor_base.py index c3d502ab6d..cb6fe6727f 100644 --- a/AutoSklearn/components/preprocessor_base.py +++ b/ParamSklearn/components/preprocessor_base.py @@ -1,9 +1,9 @@ -class AutoSklearnPreprocessingAlgorithm(object): +class ParamSklearnPreprocessingAlgorithm(object): """Provide an abstract interface for preprocessing algorithms in - AutoSklearn. + ParamSklearn. Make a subclass of this and put it into the directory - `AutoSklearn/components/preprocessing` to make it available.""" + `ParamSklearn/components/preprocessing` to make it available.""" def __init__(self): self.preprocessor = None @@ -108,4 +108,4 @@ def get_preprocessor(self): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/components/regression/__init__.py b/ParamSklearn/components/regression/__init__.py similarity index 83% rename from AutoSklearn/components/regression/__init__.py rename to ParamSklearn/components/regression/__init__.py index ea64ab5909..8a843f0232 100644 --- a/AutoSklearn/components/regression/__init__.py +++ b/ParamSklearn/components/regression/__init__.py @@ -5,7 +5,7 @@ import pkgutil import sys -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm regressor_directory = os.path.split(__file__)[0] _regressors = {} @@ -17,7 +17,7 @@ module = module_loader.find_module(module_name).load_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and AutoSklearnRegressionAlgorithm in obj.__bases__: + if inspect.isclass(obj) and ParamSklearnRegressionAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/AutoSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py similarity index 96% rename from AutoSklearn/components/regression/gaussian_process.py rename to ParamSklearn/components/regression/gaussian_process.py index 0473af5f18..1de9b0a60c 100644 --- a/AutoSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -10,10 +10,10 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm -class GaussianProcess(AutoSklearnRegressionAlgorithm): +class GaussianProcess(ParamSklearnRegressionAlgorithm): def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, tol=0.001, optimizer='fmin_cobyla', random_state=None): self.nugget = float(nugget) diff --git a/AutoSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py similarity index 98% rename from AutoSklearn/components/regression/gradient_boosting.py rename to ParamSklearn/components/regression/gradient_boosting.py index 9af650450f..ddc1625f0f 100644 --- a/AutoSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -7,10 +7,10 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm -class GradientBoosting(AutoSklearnRegressionAlgorithm): +class GradientBoosting(ParamSklearnRegressionAlgorithm): def __init__(self, loss, learning_rate, subsample, min_samples_split, min_samples_leaf, max_depth, max_features, alpha=0.9, diff --git a/AutoSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py similarity index 97% rename from AutoSklearn/components/regression/random_forest.py rename to ParamSklearn/components/regression/random_forest.py index 96a396b03f..bca510a932 100644 --- a/AutoSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm # get our own forests to replace the sklearn ones from ...implementations import forest -class RandomForest(AutoSklearnRegressionAlgorithm): +class RandomForest(ParamSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, bootstrap, diff --git a/AutoSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py similarity index 95% rename from AutoSklearn/components/regression/ridge_regression.py rename to ParamSklearn/components/regression/ridge_regression.py index b49e983ea6..029c89b826 100644 --- a/AutoSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -6,10 +6,10 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm -class RidgeRegression(AutoSklearnRegressionAlgorithm): +class RidgeRegression(ParamSklearnRegressionAlgorithm): def __init__(self, alpha, fit_intercept=False, normalize=False, copy_X=False, max_iter=None, tol=0.001, solver='auto', random_state=None): diff --git a/AutoSklearn/components/regression/support_vector_regression.py b/ParamSklearn/components/regression/support_vector_regression.py similarity index 97% rename from AutoSklearn/components/regression/support_vector_regression.py rename to ParamSklearn/components/regression/support_vector_regression.py index 3f21664b58..e803f7f75c 100644 --- a/AutoSklearn/components/regression/support_vector_regression.py +++ b/ParamSklearn/components/regression/support_vector_regression.py @@ -9,11 +9,11 @@ UnParametrizedHyperparameter -from ..regression_base import AutoSklearnRegressionAlgorithm +from ..regression_base import ParamSklearnRegressionAlgorithm # Something is wrong here... """ -class SupportVectorRegression(AutoSklearnRegressionAlgorithm): +class SupportVectorRegression(ParamSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, degree, coef0, tol, shrinking, gamma=0.0, probability=False, cache_size=2000, verbose=False, max_iter=-1, random_state=None diff --git a/AutoSklearn/components/regression_base.py b/ParamSklearn/components/regression_base.py similarity index 95% rename from AutoSklearn/components/regression_base.py rename to ParamSklearn/components/regression_base.py index 5bae0e91b8..f1cfee4486 100644 --- a/AutoSklearn/components/regression_base.py +++ b/ParamSklearn/components/regression_base.py @@ -1,9 +1,9 @@ -class AutoSklearnRegressionAlgorithm(object): +class ParamSklearnRegressionAlgorithm(object): """Provide an abstract interface for regression algorithms in - AutoSklearn. + ParamSklearn. Make a subclass of this and put it into the directory - `AutoSklearn/components/regression` to make it available.""" + `ParamSklearn/components/regression` to make it available.""" def __init__(self): self.estimator = None self.properties = None @@ -115,4 +115,4 @@ def get_estimator(self): def __str__(self): name = self.get_properties()['name'] - return "AutoSklearn %" % name + return "ParamSklearn %" % name diff --git a/AutoSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py similarity index 100% rename from AutoSklearn/implementations/MinMaxScaler.py rename to ParamSklearn/implementations/MinMaxScaler.py diff --git a/AutoSklearn/implementations/OneHotEncoder.py b/ParamSklearn/implementations/OneHotEncoder.py similarity index 100% rename from AutoSklearn/implementations/OneHotEncoder.py rename to ParamSklearn/implementations/OneHotEncoder.py diff --git a/AutoSklearn/implementations/SparseFiltering.py b/ParamSklearn/implementations/SparseFiltering.py similarity index 100% rename from AutoSklearn/implementations/SparseFiltering.py rename to ParamSklearn/implementations/SparseFiltering.py diff --git a/AutoSklearn/implementations/StandardScaler.py b/ParamSklearn/implementations/StandardScaler.py similarity index 100% rename from AutoSklearn/implementations/StandardScaler.py rename to ParamSklearn/implementations/StandardScaler.py diff --git a/AutoSklearn/implementations/__init__.py b/ParamSklearn/implementations/__init__.py similarity index 100% rename from AutoSklearn/implementations/__init__.py rename to ParamSklearn/implementations/__init__.py diff --git a/AutoSklearn/implementations/forest.py b/ParamSklearn/implementations/forest.py similarity index 100% rename from AutoSklearn/implementations/forest.py rename to ParamSklearn/implementations/forest.py diff --git a/AutoSklearn/implementations/util.py b/ParamSklearn/implementations/util.py similarity index 100% rename from AutoSklearn/implementations/util.py rename to ParamSklearn/implementations/util.py diff --git a/AutoSklearn/regression.py b/ParamSklearn/regression.py similarity index 93% rename from AutoSklearn/regression.py rename to ParamSklearn/regression.py index 1672c78703..cc24a3c545 100644 --- a/AutoSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -4,7 +4,7 @@ import sklearn if sklearn.__version__ != "0.15.2": - raise ValueError("AutoSklearn supports only sklearn version 0.15.2, " + raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " "you installed %s." % sklearn.__version__) from sklearn.base import RegressorMixin @@ -18,10 +18,10 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from . import components as components -from .base import AutoSklearnBaseEstimator +from .base import ParamSklearnBaseEstimator -class AutoSklearnRegressor(RegressorMixin, AutoSklearnBaseEstimator): +class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): """This class implements the regression task. It implements a pipeline, which includes one preprocessing step and one @@ -49,12 +49,12 @@ class AutoSklearnRegressor(RegressorMixin, AutoSklearnBaseEstimator): ---------- _estimator : The underlying scikit-learn regression model. This variable is assigned after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + :meth:`ParamSklearn.regression.ParamSklearnRegressor.fit` method. _preprocessor : The underlying scikit-learn preprocessing algorithm. This variable is only assigned if a preprocessor is specified and after a call to the - :meth:`AutoSklearn.autosklearn.AutoSklearnRegressor.fit` method. + :meth:`ParamSklearn.regression.ParamSklearnRegressor.fit` method. See also -------- @@ -124,7 +124,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, Returns ------- cs : HPOlibConfigSpace.configuration_space.Configuration - The configuration space describing the AutoSklearnClassifier. + The configuration space describing the ParamSklearnClassifier. """ if include_estimators is not None and exclude_estimators is not None: raise ValueError("The arguments include_estimators and " @@ -138,7 +138,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, dataset_properties = dict() # Compile a list of all estimator objects for this problem - available_regressors = AutoSklearnRegressor._get_estimator_components() + available_regressors = ParamSklearnRegressor._get_estimator_components() # We assume that there exists only a single regression task. which # is different to classification where we have multiclass, @@ -178,7 +178,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preprocessors = dict() for name in available_preprocessors: - if name in AutoSklearnRegressor._pipeline: + if name in ParamSklearnRegressor._pipeline: preprocessors[name] = available_preprocessors[name] continue elif include_preprocessors is not None and \ @@ -203,7 +203,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preprocessors[name] = available_preprocessors[name] # Get the configuration space - configuration_space = super(AutoSklearnRegressor, cls).\ + configuration_space = super(ParamSklearnRegressor, cls).\ get_hyperparameter_search_space( cls._get_estimator_hyperparameter_name(), regressor_default, regressors, preprocessors, dataset_properties, diff --git a/AutoSklearn/textclassification.py b/ParamSklearn/textclassification.py similarity index 89% rename from AutoSklearn/textclassification.py rename to ParamSklearn/textclassification.py index 617f1cdab0..1092354ad9 100644 --- a/AutoSklearn/textclassification.py +++ b/ParamSklearn/textclassification.py @@ -1,7 +1,7 @@ -from .classification import AutoSklearnClassifier +from .classification import ParamSklearnClassifier -class AutoSklearnTextClassifier(AutoSklearnClassifier): +class ParamSklearnTextClassifier(ParamSklearnClassifier): @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, @@ -18,7 +18,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # @Stefan: you can exclude classifiers and preprocessing methods here # From here: http://blog.devzero.com/2013/01/28/how-to-override-a-class-method-in-python/ - cs = super(AutoSklearnTextClassifier, cls).\ + cs = super(ParamSklearnTextClassifier, cls).\ get_hyperparameter_search_space( include_estimators=include_estimators, exclude_estimators=exclude_estimators, diff --git a/AutoSklearn/util.py b/ParamSklearn/util.py similarity index 100% rename from AutoSklearn/util.py rename to ParamSklearn/util.py diff --git a/README.md b/README.md index 19ac7560ce..c623439769 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ pip install scikit-learn==0.15.2 pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev - git clone https://bitbucket.org/mfeurer/autosklearn.git - cd autosklearn + git clone https://bitbucket.org/mfeurer/paramsklearn.git + cd paramsklearn python setup.py install python setup.py test @@ -11,7 +11,7 @@ Installation with `pip` pip install numpy scipy scikit-learn==0.15.2 numpydoc sphinx pip install git+https://github.com/mfeurer/HPOlibConfigSpace#egg=HPOlibConfigSpace0.1dev - pip install --editable git+https://bitbucket.org/mfeurer/autosklearn#egg=AutoSklearn + pip install --editable git+https://bitbucket.org/mfeurer/paramsklearn#egg=ParamSklearn To build the documentation you also need the packages `sphinx` and `numpydoc`. diff --git a/make.bat b/make.bat index cceba6a6dd..10e0b3175a 100644 --- a/make.bat +++ b/make.bat @@ -115,9 +115,9 @@ if "%1" == "qthelp" ( echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\AutoSklearn.qhcp + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\ParamSklearn.qhcp echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\AutoSklearn.ghc + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\ParamSklearn.ghc goto end ) diff --git a/misc/random_sampling.py b/misc/random_sampling.py index 24fd13c5f3..315a7d3adb 100644 --- a/misc/random_sampling.py +++ b/misc/random_sampling.py @@ -1,4 +1,4 @@ -from AutoSklearn.autosklearn import AutoSklearnClassifier +from ParamSklearn.classification import ParamSklearnClassifier from HPOlibConfigSpace.random_sampler import RandomSampler import sklearn.datasets import sklearn.metrics @@ -9,11 +9,11 @@ Y = iris.target indices = np.arange(X.shape[0]) np.random.shuffle(indices) -configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() +configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() sampler = RandomSampler(configuration_space, 1) for i in range(10000): configuration = sampler.sample_configuration() - auto = AutoSklearnClassifier(configuration) + auto = ParamSklearnClassifier(configuration) try: auto = auto.fit(X[indices[:100]], Y[indices[:100]]) except Exception as e: diff --git a/setup.py b/setup.py index 15eb7532e5..eb4074d6e7 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ import setuptools -setuptools.setup(name="AutoSklearn", +setuptools.setup(name="ParamSklearn", description="Scikit-Learn wrapper for automatic " "hyperparameter configuration.", version="0.1dev", @@ -17,4 +17,4 @@ license="BSD", platforms=['Linux'], classifiers=[], - url="github.com/mfeurer/autosklearn") + url="github.com/mfeurer/paramsklearn") diff --git a/source/api.rst b/source/api.rst index 7e3110ad57..7a99c6a0b2 100644 --- a/source/api.rst +++ b/source/api.rst @@ -8,12 +8,12 @@ APIs Main modules ============ -.. autoclass:: AutoSklearn.autosklearn.AutoSklearnClassifier +.. autoclass:: ParamSklearn.classification.ParamSklearnClassifier Extension Interfaces ==================== -.. autoclass:: AutoSklearn.components.classification_base.AutoSklearnClassificationAlgorithm +.. autoclass:: ParamSklearn.components.classification_base.ParamSklearnClassificationAlgorithm -.. autoclass:: AutoSklearn.components.preprocessor_base.AutoSklearnPreprocessingAlgorithm \ No newline at end of file +.. autoclass:: ParamSklearn.components.preprocessor_base.ParamSklearnPreprocessingAlgorithm \ No newline at end of file diff --git a/source/components.rst b/source/components.rst index d3f234a0c8..f83f890856 100644 --- a/source/components.rst +++ b/source/components.rst @@ -8,35 +8,35 @@ Available Components Classification ============== -A list of all classification algorithms considered in the AutoSklearn search space. +A list of all classification algorithms considered in the ParamSklearn search space. -.. autoclass:: AutoSklearn.components.classification.extra_trees.ExtraTreesClassifier +.. autoclass:: ParamSklearn.components.classification.extra_trees.ExtraTreesClassifier :members: -.. autoclass:: AutoSklearn.components.classification.gradient_boosting.GradientBoostingClassifier +.. autoclass:: ParamSklearn.components.classification.gradient_boosting.GradientBoostingClassifier :members: -.. autoclass:: AutoSklearn.components.classification.k_nearest_neighbors.KNearestNeighborsClassifier +.. autoclass:: ParamSklearn.components.classification.k_nearest_neighbors.KNearestNeighborsClassifier :members: -.. autoclass:: AutoSklearn.components.classification.liblinear.LibLinear_SVC +.. autoclass:: ParamSklearn.components.classification.liblinear.LibLinear_SVC :members: -.. autoclass:: AutoSklearn.components.classification.libsvm_svc.LibSVM_SVC +.. autoclass:: ParamSklearn.components.classification.libsvm_svc.LibSVM_SVC :members: -.. autoclass:: AutoSklearn.components.classification.random_forest.RandomForest +.. autoclass:: ParamSklearn.components.classification.random_forest.RandomForest :members: -.. autoclass:: AutoSklearn.components.classification.sgd.SGD +.. autoclass:: ParamSklearn.components.classification.sgd.SGD :members: Regression ========== -Currently there is no AutoSklearnRegressor. +Currently there is no ParamSklearnRegressor. Preprocessing ============= -.. autoclass:: AutoSklearn.components.preprocessing.pca.PCA +.. autoclass:: ParamSklearn.components.preprocessing.pca.PCA diff --git a/source/conf.py b/source/conf.py index 5e55d86ab3..edd9e03e94 100644 --- a/source/conf.py +++ b/source/conf.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# AutoSklearn documentation build configuration file, created by +# ParamSklearn documentation build configuration file, created by # sphinx-quickstart on Mon Oct 6 11:12:25 2014. # # This file is execfile()d with the current directory set to its @@ -56,7 +56,7 @@ master_doc = 'index' # General information about the project. -project = u'AutoSklearn' +project = u'ParamSklearn' copyright = u'2014, Matthias Feurer' # The version info for the project you're documenting, acts as replacement for @@ -189,7 +189,7 @@ #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'AutoSklearndoc' +htmlhelp_basename = 'ParamSklearndoc' # -- Options for LaTeX output --------------------------------------------- @@ -209,7 +209,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'AutoSklearn.tex', u'AutoSklearn Documentation', + ('index', 'ParamSklearn.tex', u'ParamSklearn Documentation', u'Matthias Feurer', 'manual'), ] @@ -239,7 +239,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'autosklearn', u'AutoSklearn Documentation', + ('index', 'ParamSklearn', u'ParamSklearn Documentation', [u'Matthias Feurer'], 1) ] @@ -253,8 +253,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'AutoSklearn', u'AutoSklearn Documentation', - u'Matthias Feurer', 'AutoSklearn', 'One line description of project.', + ('index', 'ParamSklearn', u'ParamSklearn Documentation', + u'Matthias Feurer', 'ParamSklearn', 'One line description of project.', 'Miscellaneous'), ] diff --git a/source/extending_AutoSklearn.rst b/source/extending_AutoSklearn.rst deleted file mode 100644 index 5469691413..0000000000 --- a/source/extending_AutoSklearn.rst +++ /dev/null @@ -1,4 +0,0 @@ -Extending AutoSklearn -********************* - -.. automodule:: AutoSklearn.components diff --git a/source/extending_ParamSklearn.rst b/source/extending_ParamSklearn.rst new file mode 100644 index 0000000000..4b1123bf49 --- /dev/null +++ b/source/extending_ParamSklearn.rst @@ -0,0 +1,4 @@ +Extending ParamSklearn +********************** + +.. automodule:: ParamSklearn.components diff --git a/source/first_steps.rst b/source/first_steps.rst index d7fac34546..471a999d3a 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -1,12 +1,12 @@ -First Steps with AutoSklearn -**************************** +First Steps with ParamSklearn +***************************** This example demonstrates how to get the whole configuration space covered by -AutoSklearn, feed it to the random search algorithm implemented by the +ParamSklearn, feed it to the random search algorithm implemented by the HPOlibConfigSpace package and then train a classifier with a random configuration on the iris dataset. - >>> from AutoSklearn.classification import AutoSklearnClassifier + >>> from ParamSklearn.classification import ParamSklearnClassifier >>> from HPOlibConfigSpace.random_sampler import RandomSampler >>> import sklearn.datasets >>> import sklearn.metrics @@ -17,11 +17,11 @@ configuration on the iris dataset. >>> indices = np.arange(X.shape[0]) >>> np.random.seed(1) >>> np.random.shuffle(indices) - >>> configuration_space = AutoSklearnClassifier.get_hyperparameter_search_space() + >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() >>> sampler = RandomSampler(configuration_space, 5) >>> configuration = sampler.sample_configuration() - >>> auto = AutoSklearnClassifier(configuration, random_state=1) - >>> auto = auto.fit(X[indices[:100]], Y[indices[:100]]) - >>> predictions = auto.predict(X[indices[100:]]) + >>> cls = ParamSklearnClassifier(configuration, random_state=1) + >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) + >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) 0.81999999999999995 diff --git a/source/index.rst b/source/index.rst index 27718c75ef..400a08a65b 100644 --- a/source/index.rst +++ b/source/index.rst @@ -1,10 +1,10 @@ -.. AutoSklearn documentation master file, created by +.. ParamSklearn documentation master file, created by sphinx-quickstart on Mon Oct 6 11:12:25 2014. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to AutoSklearn's documentation! -======================================= +Welcome to ParamSklearn's documentation! +======================================== Contents: @@ -14,7 +14,7 @@ Contents: introduction installation first_steps - extending_AutoSklearn + extending_ParamSklearn Indices and Tables diff --git a/source/installation.rst b/source/installation.rst index 70e4decf11..9c8eaa0d42 100644 --- a/source/installation.rst +++ b/source/installation.rst @@ -1,4 +1,4 @@ -Install AutoSklearn -******************* +Install ParamSklearn +******************** Please see the file `README.md`. \ No newline at end of file diff --git a/source/introduction.rst b/source/introduction.rst index 6b64d8ba43..43a62256ad 100644 --- a/source/introduction.rst +++ b/source/introduction.rst @@ -1,17 +1,17 @@ -Introduction to AutoSklearn -*************************** +Introduction to ParamSklearn +**************************** -What is AutoSklearn? -==================== +What is ParamSklearn? +===================== -.. automodule:: AutoSklearn +.. automodule:: ParamSklearn Get involved ============ License ======= -We chose to license AutoSklearn the same way as scikit-learn. It is available under the open source and commercially usable 3-clause BSD license. +We chose to license ParamSklearn the same way as scikit-learn. It is available under the open source and commercially usable 3-clause BSD license. Copyright (c) 2014, Matthias Feurer All rights reserved. diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index 98999577c3..056148db4f 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -1,8 +1,8 @@ import unittest -from AutoSklearn.components.classification.extra_trees import \ +from ParamSklearn.components.classification.extra_trees import \ ExtraTreesClassifier -from AutoSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index fbf5ea83c9..4b733944b9 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -1,8 +1,8 @@ import unittest -from AutoSklearn.components.classification.gradient_boosting import \ +from ParamSklearn.components.classification.gradient_boosting import \ GradientBoostingClassifier -from AutoSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py index 5699cc497c..47c924c6e3 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -1,8 +1,8 @@ import unittest -from AutoSklearn.components.classification.k_nearest_neighbors import \ +from ParamSklearn.components.classification.k_nearest_neighbors import \ KNearestNeighborsClassifier -from AutoSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_liblinear.py b/tests/components/classification/test_liblinear.py index 3f732412ac..7242f946ab 100644 --- a/tests/components/classification/test_liblinear.py +++ b/tests/components/classification/test_liblinear.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.classification.liblinear import LibLinear_SVC -from AutoSklearn.util import _test_classifier +from ParamSklearn.components.classification.liblinear import LibLinear_SVC +from ParamSklearn.util import _test_classifier class LibLinearComponentTest(unittest.TestCase): diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index 483cbb4e19..0f936cddbe 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.classification.libsvm_svc import LibSVM_SVC -from AutoSklearn.util import _test_classifier +from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index d1a7fb12dd..63d7f11c3f 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.classification.random_forest import RandomForest -from AutoSklearn.util import _test_classifier +from ParamSklearn.components.classification.random_forest import RandomForest +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py index 9b3aacade7..be81c9ae62 100644 --- a/tests/components/classification/test_sgd.py +++ b/tests/components/classification/test_sgd.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.classification.sgd import SGD -from AutoSklearn.util import _test_classifier +from ParamSklearn.components.classification.sgd import SGD +from ParamSklearn.util import _test_classifier import sklearn.metrics diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index c2cb9707e6..532535cd1a 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -2,8 +2,8 @@ from scipy import sparse -from AutoSklearn.components.preprocessing.imputation import Imputation -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.imputation import Imputation +from ParamSklearn.util import _test_preprocessing class ImputationTest(unittest.TestCase): diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/preprocessing/test_kitchen_sinks.py index ea441a41f1..5fd5629c75 100644 --- a/tests/components/preprocessing/test_kitchen_sinks.py +++ b/tests/components/preprocessing/test_kitchen_sinks.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from ParamSklearn.util import _test_preprocessing class KitchenSinkComponent(unittest.TestCase): diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index 67fb51b452..092447eac7 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.preprocessing.pca import PCA -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.pca import PCA +from ParamSklearn.util import _test_preprocessing class PCAComponentTest(unittest.TestCase): diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index 6858f594e8..cd05dafde3 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -3,8 +3,8 @@ import numpy as np import sklearn.datasets -from AutoSklearn.components.preprocessing.rescaling import Rescaling -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.rescaling import Rescaling +from ParamSklearn.util import _test_preprocessing class ScalingComponentTest(unittest.TestCase): diff --git a/tests/components/preprocessing/test_select_percentile_classification.py b/tests/components/preprocessing/test_select_percentile_classification.py index 69a7b58c3f..7a75093830 100644 --- a/tests/components/preprocessing/test_select_percentile_classification.py +++ b/tests/components/preprocessing/test_select_percentile_classification.py @@ -2,8 +2,8 @@ import scipy.sparse -from AutoSklearn.components.preprocessing.select_percentile_classification import SelectPercentileClassification -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.select_percentile_classification import SelectPercentileClassification +from ParamSklearn.util import _test_preprocessing class SelectPercentileClassificationTest(unittest.TestCase): diff --git a/tests/components/preprocessing/test_select_percentile_regression.py b/tests/components/preprocessing/test_select_percentile_regression.py index 46531e692a..1b29349823 100644 --- a/tests/components/preprocessing/test_select_percentile_regression.py +++ b/tests/components/preprocessing/test_select_percentile_regression.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.preprocessing.select_percentile_regression import SelectPercentileRegression -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.select_percentile_regression import SelectPercentileRegression +from ParamSklearn.util import _test_preprocessing class SelectPercentileRegressionTest(unittest.TestCase): diff --git a/tests/components/preprocessing/test_sparse_filtering.py b/tests/components/preprocessing/test_sparse_filtering.py index d461321205..ae78943ad8 100644 --- a/tests/components/preprocessing/test_sparse_filtering.py +++ b/tests/components/preprocessing/test_sparse_filtering.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.preprocessing.sparse_filtering import SparseFiltering -from AutoSklearn.util import _test_preprocessing +from ParamSklearn.components.preprocessing.sparse_filtering import SparseFiltering +from ParamSklearn.util import _test_preprocessing class SparseFilteringComponentTest(unittest.TestCase): diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index 9ce6f57ccd..f08a549c20 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.regression.gaussian_process import GaussianProcess -from AutoSklearn.util import _test_regressor +from ParamSklearn.components.regression.gaussian_process import GaussianProcess +from ParamSklearn.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_gradient_boosting.py b/tests/components/regression/test_gradient_boosting.py index 1c9af229d8..17dc88fddf 100644 --- a/tests/components/regression/test_gradient_boosting.py +++ b/tests/components/regression/test_gradient_boosting.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.regression.gradient_boosting import GradientBoosting -from AutoSklearn.util import _test_regressor +from ParamSklearn.components.regression.gradient_boosting import GradientBoosting +from ParamSklearn.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index b2b8372e7a..36bee53643 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -1,7 +1,7 @@ import unittest -from AutoSklearn.components.regression.random_forest import RandomForest -from AutoSklearn.util import _test_regressor +from ParamSklearn.components.regression.random_forest import RandomForest +from ParamSklearn.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index b2a85e19d8..061696f0cd 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -1,8 +1,8 @@ import unittest -from AutoSklearn.components.regression.ridge_regression import RidgeRegression -from AutoSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks -from AutoSklearn.util import _test_regressor, get_dataset +from ParamSklearn.components.regression.ridge_regression import RidgeRegression +from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from ParamSklearn.util import _test_regressor, get_dataset import sklearn.metrics diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py index 582de1d291..9d20282eb2 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/tests/components/regression/test_support_vector_regression.py @@ -1,7 +1,7 @@ import unittest """ -from AutoSklearn.components.regression.support_vector_regression import SupportVectorRegression -from AutoSklearn.util import _test_regressor +from ParamSklearn.components.regression.support_vector_regression import SupportVectorRegression +from ParamSklearn.util import _test_regressor import sklearn.metrics diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index cdcec7e937..ab8b698bd9 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -3,7 +3,7 @@ import numpy as np import scipy.sparse -from AutoSklearn.implementations.OneHotEncoder import OneHotEncoder +from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder dense1 = [[1, 5, 9], [1, 3, 9]] diff --git a/tests/implementations/test_minmaxscaler.py b/tests/implementations/test_minmaxscaler.py index 0323d26989..3beb8811bd 100644 --- a/tests/implementations/test_minmaxscaler.py +++ b/tests/implementations/test_minmaxscaler.py @@ -4,8 +4,8 @@ from scipy import sparse from sklearn.utils.testing import assert_array_almost_equal -from AutoSklearn.util import get_dataset -from AutoSklearn.implementations.MinMaxScaler import MinMaxScaler +from ParamSklearn.util import get_dataset +from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler class MinMaxScalerTest(unittest.TestCase): diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py index 47bc91bc64..42d504b53f 100644 --- a/tests/implementations/test_sparse_filtering.py +++ b/tests/implementations/test_sparse_filtering.py @@ -2,7 +2,7 @@ import os import numpy as np -from AutoSklearn.implementations.SparseFiltering import SparseFiltering +from ParamSklearn.implementations.SparseFiltering import SparseFiltering class TestSparseFiltering(unittest.TestCase): diff --git a/tests/implementations/test_standard_scaler.py b/tests/implementations/test_standard_scaler.py index 6d963f2aba..09f9d7fbd7 100644 --- a/tests/implementations/test_standard_scaler.py +++ b/tests/implementations/test_standard_scaler.py @@ -6,8 +6,8 @@ from sklearn.utils.testing import assert_array_almost_equal from sklearn.preprocessing.data import scale -from AutoSklearn.implementations.StandardScaler import StandardScaler -from AutoSklearn.util import get_dataset +from ParamSklearn.implementations.StandardScaler import StandardScaler +from ParamSklearn.util import get_dataset matrix1 = [[0, 1, 2], [0, 1, 2], diff --git a/tests/implementations/test_util.py b/tests/implementations/test_util.py index 196beb1fd2..8b688e7f74 100644 --- a/tests/implementations/test_util.py +++ b/tests/implementations/test_util.py @@ -3,7 +3,7 @@ import numpy as np from sklearn.utils.testing import assert_array_almost_equal -from AutoSklearn.implementations.util import softmax +from ParamSklearn.implementations.util import softmax class UtilTest(unittest.TestCase): def test_softmax_binary(self): diff --git a/tests/test_classification.py b/tests/test_classification.py index a5580b97dc..938321b0b8 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -13,37 +13,37 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from AutoSklearn.classification import AutoSklearnClassifier -from AutoSklearn.components.classification_base import AutoSklearnClassificationAlgorithm -from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm -import AutoSklearn.components.classification as classification_components -import AutoSklearn.components.preprocessing as preprocessing_components -from AutoSklearn.util import get_dataset - -class TestAutoSKlearnClassifier(unittest.TestCase): - # TODO: test for both possible ways to initialize AutoSklearn +from ParamSklearn.classification import ParamSklearnClassifier +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +import ParamSklearn.components.classification as classification_components +import ParamSklearn.components.preprocessing as preprocessing_components +from ParamSklearn.util import get_dataset + +class TestParamSklearnClassifier(unittest.TestCase): + # TODO: test for both possible ways to initialize ParamSklearn # parameters and other... def test_find_classifiers(self): classifiers = classification_components._classifiers self.assertGreaterEqual(len(classifiers), 1) for key in classifiers: - self.assertIn(AutoSklearnClassificationAlgorithm, + self.assertIn(ParamSklearnClassificationAlgorithm, classifiers[key].__bases__) def test_find_preprocessors(self): preprocessors = preprocessing_components._preprocessors self.assertGreaterEqual(len(preprocessors), 1) for key in preprocessors: - self.assertIn(AutoSklearnPreprocessingAlgorithm, + self.assertIn(ParamSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) def test_default_configuration(self): for i in range(2): - cs = AutoSklearnClassifier.get_hyperparameter_search_space() + cs = ParamSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') - auto = AutoSklearnClassifier(default) + auto = ParamSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) self.assertAlmostEqual(0.95999999999999996, @@ -51,7 +51,7 @@ def test_default_configuration(self): scores = auto.predict_proba(X_test) def test_get_hyperparameter_search_space(self): - cs = AutoSklearnClassifier.get_hyperparameter_search_space() + cs = ParamSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() @@ -61,50 +61,50 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): - cs = AutoSklearnClassifier.get_hyperparameter_search_space( + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_estimators=['libsvm_svc']) self.assertEqual(cs.get_hyperparameter('classifier'), CategoricalHyperparameter('classifier', ['libsvm_svc'])) - cs = AutoSklearnClassifier.get_hyperparameter_search_space( + cs = ParamSklearnClassifier.get_hyperparameter_search_space( exclude_estimators=['libsvm_svc']) self.assertNotIn('libsvm_svc', str(cs)) - cs = AutoSklearnClassifier.get_hyperparameter_search_space( + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), CategoricalHyperparameter('preprocessor', ["None", 'pca'])) - cs = AutoSklearnClassifier.get_hyperparameter_search_space( + cs = ParamSklearnClassifier.get_hyperparameter_search_space( exclude_preprocessors=['pca']) self.assertNotIn('pca', str(cs)) def test_get_hyperparameter_search_space_dataset_properties(self): - full_cs = AutoSklearnClassifier.get_hyperparameter_search_space() - cs_mc = AutoSklearnClassifier.get_hyperparameter_search_space( + full_cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) self.assertEqual(full_cs, cs_mc) - cs_ml = AutoSklearnClassifier.get_hyperparameter_search_space( + cs_ml = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multilabel': True}) self.assertNotIn('k_nearest_neighbors', str(cs_ml)) self.assertNotIn('liblinear', str(cs_ml)) self.assertNotIn('libsvm_svc', str(cs_ml)) self.assertNotIn('sgd', str(cs_ml)) - cs_sp = AutoSklearnClassifier.get_hyperparameter_search_space( + cs_sp = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) self.assertNotIn('extra_trees', str(cs_sp)) self.assertNotIn('gradient_boosting', str(cs_sp)) self.assertNotIn('random_forest', str(cs_sp)) - cs_mc_ml = AutoSklearnClassifier.get_hyperparameter_search_space( + cs_mc_ml = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) self.assertRaisesRegexp(ValueError, "No classifier to build a configuration space " - "for...", AutoSklearnClassifier. + "for...", ParamSklearnClassifier. get_hyperparameter_search_space, dataset_properties={'multilabel': True, 'multiclass': True, diff --git a/tests/test_doctests.py b/tests/test_doctests.py index fe513dd2f7..8082930b76 100644 --- a/tests/test_doctests.py +++ b/tests/test_doctests.py @@ -2,12 +2,12 @@ import os import unittest -import AutoSklearn +import ParamSklearn class DocumentationTest(unittest.TestCase): def test_first_steps(self): - filename = os.path.dirname(AutoSklearn.__file__) + filename = os.path.dirname(ParamSklearn.__file__) filename = os.path.join(filename, "..", "source", "first_steps.rst") failed, run = doctest.testfile(filename, module_relative=False) self.assertEqual(0, failed) \ No newline at end of file diff --git a/tests/test_regression.py b/tests/test_regression.py index a565c4ff59..2b1bc4109e 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -13,36 +13,36 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from AutoSklearn.regression import AutoSklearnRegressor -from AutoSklearn.components.regression_base import AutoSklearnRegressionAlgorithm -from AutoSklearn.components.preprocessor_base import AutoSklearnPreprocessingAlgorithm -import AutoSklearn.components.regression as regression_components -import AutoSklearn.components.preprocessing as preprocessing_components -from AutoSklearn.util import get_dataset +from ParamSklearn.regression import ParamSklearnRegressor +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +import ParamSklearn.components.regression as regression_components +import ParamSklearn.components.preprocessing as preprocessing_components +from ParamSklearn.util import get_dataset -class TestAutoSKlearnRegressor(unittest.TestCase): +class TestParamSKlearnRegressor(unittest.TestCase): def test_find_regressors(self): regressors = regression_components._regressors self.assertGreaterEqual(len(regressors), 1) for key in regressors: - self.assertIn(AutoSklearnRegressionAlgorithm, + self.assertIn(ParamSklearnRegressionAlgorithm, regressors[key].__bases__) def test_find_preprocessors(self): preprocessors = preprocessing_components._preprocessors self.assertGreaterEqual(len(preprocessors), 1) for key in preprocessors: - self.assertIn(AutoSklearnPreprocessingAlgorithm, + self.assertIn(ParamSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) def test_default_configuration(self): for i in range(2): - cs = AutoSklearnRegressor.get_hyperparameter_search_space() + cs = ParamSklearnRegressor.get_hyperparameter_search_space() default = cs.get_default_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') - auto = AutoSklearnRegressor(default) + auto = ParamSklearnRegressor(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse @@ -52,7 +52,7 @@ def test_default_configuration(self): self.assertEqual(model_score, r2_score) def test_get_hyperparameter_search_space(self): - cs = AutoSklearnRegressor.get_hyperparameter_search_space() + cs = ParamSklearnRegressor.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() @@ -60,22 +60,22 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): - cs = AutoSklearnRegressor.get_hyperparameter_search_space( + cs = ParamSklearnRegressor.get_hyperparameter_search_space( include_estimators=['random_forest']) self.assertEqual(cs.get_hyperparameter('regressor'), CategoricalHyperparameter('regressor', ['random_forest'])) # TODO add this test when more than one regressor is present - cs = AutoSklearnRegressor.get_hyperparameter_search_space( + cs = ParamSklearnRegressor.get_hyperparameter_search_space( exclude_estimators=['random_forest']) self.assertNotIn('random_forest', str(cs)) - cs = AutoSklearnRegressor.get_hyperparameter_search_space( + cs = ParamSklearnRegressor.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), CategoricalHyperparameter('preprocessor', ["None", 'pca'])) - cs = AutoSklearnRegressor.get_hyperparameter_search_space( + cs = ParamSklearnRegressor.get_hyperparameter_search_space( exclude_preprocessors=['pca']) self.assertNotIn('pca', str(cs)) @@ -86,28 +86,28 @@ def test_get_hyperparameter_search_space_dataset_properties(self): # test is somewhat stupid pass """ - full_cs = AutoSklearnRegressor.get_hyperparameter_search_space() - cs_mc = AutoSklearnRegressor.get_hyperparameter_search_space() + full_cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs_mc = ParamSklearnRegressor.get_hyperparameter_search_space() self.assertEqual(full_cs, cs_mc) - cs_ml = AutoSklearnRegressor.get_hyperparameter_search_space() + cs_ml = ParamSklearnRegressor.get_hyperparameter_search_space() self.assertNotIn('k_nearest_neighbors', str(cs_ml)) self.assertNotIn('liblinear', str(cs_ml)) self.assertNotIn('libsvm_svc', str(cs_ml)) self.assertNotIn('sgd', str(cs_ml)) - cs_sp = AutoSklearnRegressor.get_hyperparameter_search_space( + cs_sp = ParamSklearnRegressor.get_hyperparameter_search_space( sparse=True) self.assertNotIn('extra_trees', str(cs_sp)) self.assertNotIn('gradient_boosting', str(cs_sp)) self.assertNotIn('random_forest', str(cs_sp)) - cs_mc_ml = AutoSklearnRegressor.get_hyperparameter_search_space() + cs_mc_ml = ParamSklearnRegressor.get_hyperparameter_search_space() self.assertEqual(cs_ml, cs_mc_ml) self.assertRaisesRegexp(ValueError, "No regressor to build a configuration space " - "for...", AutoSklearnRegressor. + "for...", ParamSklearnRegressor. get_hyperparameter_search_space, multiclass=True, multilabel=True, sparse=True) """ diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 17bff4ed16..95caea833f 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -2,12 +2,12 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from AutoSklearn.textclassification import AutoSklearnTextClassifier +from ParamSklearn.textclassification import ParamSklearnTextClassifier class TextClassificationTest(unittest.TestCase): def test_get_hyperparameter_search_space(self): - cs = AutoSklearnTextClassifier.get_hyperparameter_search_space() + cs = ParamSklearnTextClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() From be26987d7f2d8f445591f62b7af3ad861a015b51 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 19 Feb 2015 11:20:21 +0100 Subject: [PATCH 128/352] Update documentation --- source/api.rst | 4 +++- source/components.rst | 14 +++++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/source/api.rst b/source/api.rst index 7a99c6a0b2..23b72523e2 100644 --- a/source/api.rst +++ b/source/api.rst @@ -15,5 +15,7 @@ Extension Interfaces ==================== .. autoclass:: ParamSklearn.components.classification_base.ParamSklearnClassificationAlgorithm + +.. autoclass:: ParamSklearn.components.regression_base.ParamSklearnRegressionAlgorithm -.. autoclass:: ParamSklearn.components.preprocessor_base.ParamSklearnPreprocessingAlgorithm \ No newline at end of file +.. autoclass:: ParamSklearn.components.preprocessor_base.ParamSklearnPreprocessingAlgorithm diff --git a/source/components.rst b/source/components.rst index f83f890856..9f873c203f 100644 --- a/source/components.rst +++ b/source/components.rst @@ -34,7 +34,19 @@ A list of all classification algorithms considered in the ParamSklearn search sp Regression ========== -Currently there is no ParamSklearnRegressor. +A list of all regression algorithms considered in the ParamSklearn search space. + +.. autoclass:: ParamSklearn.components.regression.gaussian_process.GaussianProcess + :members: + +.. autoclass:: ParamSklearn.components.regression.gradient_boosting.GradientBoosting + :members: + +.. autoclass:: ParamSklearn.components.regression.random_forest.RandomForest + :members: + +.. autoclass:: ParamSklearn.components.regression.ridge_regression.RidgeRegression + Preprocessing ============= From 79b8bf6602922781d52ec8b203ba4ada8266cfa4 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 27 Feb 2015 14:10:24 +0100 Subject: [PATCH 129/352] Tests OneHotEncoder on sparse matrices --- ParamSklearn/implementations/OneHotEncoder.py | 127 +++++++++++++----- tests/implementations/test_OneHotEncoder.py | 110 +++++++++++---- 2 files changed, 179 insertions(+), 58 deletions(-) diff --git a/ParamSklearn/implementations/OneHotEncoder.py b/ParamSklearn/implementations/OneHotEncoder.py index 43d7d8bc2a..66055d5c8b 100644 --- a/ParamSklearn/implementations/OneHotEncoder.py +++ b/ParamSklearn/implementations/OneHotEncoder.py @@ -36,11 +36,7 @@ def _transform_selected(X, transform, selected="all", copy=True): X = safe_asarray(X, copy=copy, force_all_finite=False) return transform(X) - X = atleast2d_or_csc(X, copy=copy, force_all_finite=False) - - if len(selected) == 0: - return X - + X = check_arrays(X, allow_nans=True)[0] n_features = X.shape[1] ind = np.arange(n_features) sel = np.zeros(n_features, dtype=bool) @@ -48,6 +44,18 @@ def _transform_selected(X, transform, selected="all", copy=True): not_sel = np.logical_not(sel) n_selected = np.sum(sel) + # Add 1 to all categorical colums to avoid loosing them due to slicing + subtract = False + if sparse.isspmatrix_csr(X): + X.data += 1 + subtract = True + X = atleast2d_or_csc(X, copy=copy, force_all_finite=False) + if subtract: + X.data -= 1 + + if len(selected) == 0: + return X + if n_selected == 0: # No features selected. return X @@ -55,7 +63,20 @@ def _transform_selected(X, transform, selected="all", copy=True): # All features selected. return transform(X) else: - X_sel = transform(X[:, ind[sel]]) + # Add 1 to all categorical columns to avoid loosing them due to slicing + if sparse.issparse(X): + for idx in range(n_features): + if idx in ind[sel]: + X.data[X.indptr[idx]:X.indptr[idx + 1]] += 1 + X_ = X[:, ind[sel]] + for idx in range(n_features): + if idx in ind[sel]: + X.data[X.indptr[idx]:X.indptr[idx + 1]] -= 1 + X_.data -= 1 + else: + X_ = X[:, ind[sel]] + + X_sel = transform(X_) X_not_sel = X[:, ind[not_sel]] if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): @@ -159,47 +180,68 @@ def fit(self, X, y=None): return self def _fit_transform(self, X): - """Assumes X contains only categorical features.""" + + # Add 1 to all categorical colums to avoid loosing them due to slicing + subtract = False + if sparse.isspmatrix_csr(X): + X.data += 1 + subtract = True + X = check_arrays(X, sparse_format="csc", allow_nans=True)[0] + if subtract: + X.data -= 1 + n_samples, n_features = X.shape - uniques = [np.unique(X[:,i], False, True, False) - for i in range(n_features)] - n_values = [0] + # By replacing NaNs (which means a column full on NaNs in the + # original data matrix) with a 1, we add a column full of zeros to + # the array + if sparse.isspmatrix_csc(X): + n_values = [0] + for idx in range(n_features): + if X.indptr[idx] == X.indptr[idx+1]: + values_for_idx = 1 + else: + values_for_idx = np.nanmax( + X.data[X.indptr[idx]:X.indptr[idx + 1]]) + 1 + n_values.append(values_for_idx if + np.isfinite(values_for_idx) else 1) + row_indices = X.indices + else: + n_values = np.hstack([[0], np.nanmax(X, axis=0) + 1]) + n_values[~np.isfinite(n_values)] = 1 + row_indices = np.tile(np.arange(n_samples, dtype=np.int32), + n_features) + + total_num_values = np.nansum(n_values) column_indices = [] data = [] feature_indices = [] - for idx, values_ in enumerate(uniques): - unique_elements, inverse = values_ - - # Number of unique elements in that column (without np.NaN) - n_uniques = np.sum(np.isfinite(unique_elements)) + for idx in range(X.shape[1]): + if sparse.isspmatrix_csc(X): + values_ = X.getcol(idx).data + else: + values_ = X[:, idx] - n_values.append(n_uniques) - offset = np.sum(n_values[:-1]) - - column_indices_idx = [offset if index >= n_uniques - else index + offset - for index in inverse] - data_idx = [0 if index >= n_uniques else 1 for index in inverse] - feature_indices_idx = {unique: index + offset - for index, unique in enumerate(unique_elements) - if np.isfinite(unique)} + offset = np.nansum(n_values[:idx+1]) + column_indices_idx = [offset + value if np.isfinite(value) + else offset for value in values_] + data_idx = [1 if np.isfinite(value) else 0 for value in values_] + feature_indices_idx = {value: value + offset + for value in values_ + if np.isfinite(value)} column_indices.extend(column_indices_idx) data.extend(data_idx) feature_indices.append(feature_indices_idx) - row_indices = np.tile(np.arange(n_samples, dtype=np.int32), - n_features) - self.feature_indices_ = feature_indices self.n_values = n_values + # tocsr() removes zeros in the data which represent NaNs out = sparse.coo_matrix((data, (row_indices, column_indices)), - shape=(n_samples, np.sum(n_values)), + shape=(n_samples, total_num_values), dtype=self.dtype).tocsr() - return out if self.sparse else out.toarray() def fit_transform(self, X, y=None): @@ -213,7 +255,14 @@ def fit_transform(self, X, y=None): def _transform(self, X): """Assumes X contains only categorical features.""" - X = check_arrays(X, sparse_format='csc', allow_nans=True)[0] + # Add 1 to all categorical colums to avoid loosing them due to slicing + subtract = False + if sparse.isspmatrix_csr(X): + X.data += 1 + subtract = True + X = check_arrays(X, sparse_format="csc", allow_nans=True)[0] + if subtract: + X.data -= 1 n_samples, n_features = X.shape indices = self.feature_indices_ @@ -222,19 +271,27 @@ def _transform(self, X): " Expected %d, got %d." % (len(indices), n_features)) - row_indices = np.tile(np.arange(n_samples, dtype=np.int32), - n_features) + if sparse.isspmatrix_csc(X): + row_indices = X.indices + else: + row_indices = np.tile(np.arange(n_samples, dtype=np.int32), + n_features) data = [] column_indices = [] for idx, feature in enumerate(range(n_features)): + if sparse.isspmatrix_csc(X): + values_ = X.getcol(idx).data + else: + values_ = X[:, idx] + offset = np.sum(self.n_values[:idx+1]) feature_indices_idx = self.feature_indices_[idx] column_indices_idx = [feature_indices_idx.get(x, offset) - for x in X[:,idx]] + for x in values_] data_idx = [1 if feature_indices_idx.get(x) is not None else 0 - for x in X[:, idx]] + for x in values_] column_indices.extend(column_indices_idx) data.extend(data_idx) diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index ab8b698bd9..23db1230d8 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -2,29 +2,56 @@ import numpy as np import scipy.sparse +from sklearn.utils.testing import assert_array_almost_equal from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -dense1 = [[1, 5, 9], - [1, 3, 9]] +dense1 = [[0, 1, 0], + [0, 0, 0]] dense1_1h = [[1, 0, 1, 1], [1, 1, 0, 1]] -dense2 = [[1, np.NaN, 9], - [np.NaN, 3, 9], - [2, 1, 7]] -dense2_1h = [[1, 0, 0, 0, 0, 1], - [0, 0, 0, 1, 0, 1], - [0, 1, 1, 0, 1, 0]] -dense2_partial_1h = [[1., 0., 0., 0., 9.], - [0., 0., 0., 1., 9.], - [0., 1., 1., 0., 7.]] +dense2 = [[0, np.NaN, 0], + [np.NaN, 0, 2], + [1, 1, 1]] +dense2_1h = [[1, 0, 0, 0, 1, 0, 0], + [0, 0, 1, 0, 0, 0, 1], + [0, 1, 0, 1, 0, 1, 0]] +dense2_partial_1h = [[1., 0., 0., 0., 0.], + [0., 0., 1., 0., 2.], + [0., 1., 0., 1., 1.]] -with_string_1h = [[1, 0, 0, 5, 9], - [0, 1, 0, 3, 7], - [0, 0, 1, 2, 5], - [0, 0, 0, 3, 1], - [1, 0, 0, 1, 1]] +# All NaN slice +dense3 = [[0, 1, np.NaN], + [1, 0, np.NaN]] +dense3_1h = [[1, 0, 0, 1, 0], + [0, 1, 1, 0, 0]] + +sparse1 = scipy.sparse.csc_matrix(([2, 1, 0, 0, 1, 2], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse1_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 4, 5))), shape=(6, 6)) +sparse1_paratial_1h = scipy.sparse.csc_matrix(([1, 1, 1, 0, 1, 2], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), + shape=(6, 4)) + +# All zeros slice +sparse2 = scipy.sparse.csc_matrix(([2, 1, 0, 0, 0, 0], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse2_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), shape=(6, 4)) + +sparse2_csr = scipy.sparse.csr_matrix(([2, 1, 0, 0, 0, 0], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse2_csr_1h = scipy.sparse.csr_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), shape=(6, 4)) class OneHotEncoderTest(unittest.TestCase): @@ -42,18 +69,55 @@ def test_dense2_with_non_sparse_components(self): self.fit_then_transform_dense(dense2_partial_1h, dense2, categorical_features=[True, True, False]) + def test_dense3(self): + self.fit_then_transform(dense3_1h, dense3) + self.fit_then_transform_dense(dense3_1h, dense3) + + def test_sparse1(self): + self.fit_then_transform(sparse1_1h.todense(), sparse1) + self.fit_then_transform_dense(sparse1_1h.todense(), sparse1) + + def test_sparse1_with_non_sparse_components(self): + self.fit_then_transform(sparse1_paratial_1h.todense(), sparse1, + categorical_features=[True, False]) + # This test does not apply here. The sparse matrix will be cut into a + # continouos and a categorical part, after one hot encoding only the + # categorical part is an array, the continuous part will still be a + # sparse matrix. Therefore, the OHE will only return a sparse matrix + #self.fit_then_transform_dense(sparse1_paratial_1h.todense(), sparse1, + # categorical_features=[True, False]) + + def test_sparse2(self): + self.fit_then_transform(sparse2_1h.todense(), sparse2) + self.fit_then_transform_dense(sparse2_1h.todense(), sparse2) + + def test_sparse2_csr(self): + self.fit_then_transform(sparse2_csr_1h.todense(), sparse2_csr) + self.fit_then_transform_dense(sparse2_csr_1h.todense(), sparse2_csr) + def fit_then_transform(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features) - ohe.fit(input) - transformation = ohe.transform(input) + transformation = ohe.fit_transform(input) + self.assertIsInstance(transformation, scipy.sparse.csr_matrix) + assert_array_almost_equal(expected, transformation.todense()) + + ohe2 = OneHotEncoder(categorical_features=categorical_features) + ohe2.fit(input) + transformation = ohe2.transform(input) self.assertIsInstance(transformation, scipy.sparse.csr_matrix) - transformation = transformation.todense() - self.assertTrue((expected == transformation).all()) + assert_array_almost_equal(expected, transformation.todense()) def fit_then_transform_dense(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features, sparse=False) - ohe.fit(input) - transformation = ohe.transform(input) + transformation = ohe.fit_transform(input) self.assertIsInstance(transformation, np.ndarray) - self.assertTrue((expected == transformation).all()) + assert_array_almost_equal(expected, transformation) + + ohe2 = OneHotEncoder(categorical_features=categorical_features, + sparse=False) + ohe2.fit(input) + transformation = ohe2.transform(input) + self.assertIsInstance(transformation, np.ndarray) + assert_array_almost_equal(expected, transformation) + From 61c90c6dde9f30c74c6ca30bc1d92a1794e218e9 Mon Sep 17 00:00:00 2001 From: Stefan Falkner Date: Mon, 2 Mar 2015 11:02:08 +0100 Subject: [PATCH 130/352] Added the three naive bayes classifier. --- .../components/classification/bernoulli_nb.py | 71 +++++++++++++++++++ .../components/classification/gaussian_nb.py | 57 +++++++++++++++ .../classification/multinomial_nb.py | 71 +++++++++++++++++++ 3 files changed, 199 insertions(+) create mode 100644 ParamSklearn/components/classification/bernoulli_nb.py create mode 100644 ParamSklearn/components/classification/gaussian_nb.py create mode 100644 ParamSklearn/components/classification/multinomial_nb.py diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py new file mode 100644 index 0000000000..bd06749e32 --- /dev/null +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -0,0 +1,71 @@ +import numpy as np +import sklearn.naive_bayes + +from HPOlibConfigSpace.conditions import EqualsCondition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class BernoulliNB(ParamSklearnClassificationAlgorithm): + + def __init__(self, alpha, fit_prior, random_state=None, verbose=0): + + self.alpha = alpha + self.fit_prior = fit_prior + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'MultinomialNB', + 'name': 'Multinomial Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + # sklearn website says: ... BernoulliNB is designed for binary/boolean features. + 'handles_numerical_features': False, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': np.bool} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # the smoothing parameter is a non-negative float + # I will limit it to 1000 and put it on a logarithmic scale. (SF) + # Please adjust that, if you know a proper range, this is just a guess. + alpha = UniformFloatHyperparameter(name="alpha", lower=0, upper=1000, default=1, log=True) + + fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_prior) + + return cs + diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py new file mode 100644 index 0000000000..38b6973439 --- /dev/null +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -0,0 +1,57 @@ +import numpy as np +import sklearn.naive_bayes + +from HPOlibConfigSpace.conditions import EqualsCondition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class GaussianNB(ParamSklearnClassificationAlgorithm): + + def __init__(self, random_state=None, verbose=0): + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + num_features = X.shape[1] + self.estimator = sklearn.naive_bayes.GaussianNB() + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'GaussianNB', + 'name': 'Gaussian Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py new file mode 100644 index 0000000000..aab31a4dd5 --- /dev/null +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -0,0 +1,71 @@ +import numpy as np +import sklearn.naive_bayes + +from HPOlibConfigSpace.conditions import EqualsCondition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class MultinomialNB(ParamSklearnClassificationAlgorithm): + + def __init__(self, alpha, fit_prior, random_state=None, verbose=0): + + self.alpha = alpha + self.fit_prior = fit_prior + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'MultinomialNB', + 'name': 'Multinomial Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + # sklearn website says: The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work. + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # the smoothing parameter is a non-negative float + # I will limit it to 1000 and put it on a logarithmic scale. (SF) + # Please adjust that, if you know a proper range, this is just a guess. + alpha = UniformFloatHyperparameter(name="alpha", lower=0, upper=1000, default=1, log=True) + + fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_prior) + + return cs + From 9f5067a4684815b51edd88f88be11722e41f0a49 Mon Sep 17 00:00:00 2001 From: Stefan Falkner Date: Mon, 2 Mar 2015 13:46:47 +0100 Subject: [PATCH 131/352] added bagged naive bayes classifier --- .../classification/bagged_gaussian_nb.py | 75 ++++++++++++++++ .../classification/bagged_multinomial_nb.py | 87 +++++++++++++++++++ .../classification/multinomial_nb.py | 4 +- 3 files changed, 164 insertions(+), 2 deletions(-) create mode 100644 ParamSklearn/components/classification/bagged_gaussian_nb.py create mode 100644 ParamSklearn/components/classification/bagged_multinomial_nb.py diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py new file mode 100644 index 0000000000..5891e7e744 --- /dev/null +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -0,0 +1,75 @@ +import numpy as np +import sklearn.ensemble +import sklearn.naive_bayes + +from HPOlibConfigSpace.conditions import EqualsCondition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class BaggedGaussianNB(ParamSklearnClassificationAlgorithm): + + def __init__(self, n_estimators, max_samples, max_features, random_state=None, verbose=0): + + self.n_estimators = n_estimators + self.max_samples = max_samples + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.ensemble.BaggingClassifier(base_estimator=sklearn.naive_bayes.GaussianNB(), n_estimators = self.n_estimators, max_samples = self.max_samples, max_features = self.max_features) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'BaggedGaussianNB', + 'name': 'Bagging of Gaussian Naive Bayes classifiers', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + + # The three parameters of the bagging ensemble are set to constants for now (SF) + + #UniformIntegerHyperparameter('n_estimators', lower=10, upper = 100) + n_estimators = Constant('n_estimators', 100) + + #max_samples = UniformFloatHyperparameter('max_samples', lower = 0.5, upper=1.0) + max_samples = Constant('max_samples' ,1.0) # caution: has to be float! + + #max_features = UniformFloatHyperparameter('max_features', lower = 0.5, upper=1.0) + max_features = Constant('max_features', 1.0) # caution: has to be float! + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(max_samples) + cs.add_hyperparameter(max_features) + + return cs + diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py new file mode 100644 index 0000000000..74f03f03da --- /dev/null +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -0,0 +1,87 @@ +import numpy as np +import sklearn.naive_bayes + +from HPOlibConfigSpace.conditions import EqualsCondition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): + + def __init__(self, alpha, fit_prior, random_state=None, verbose=0): + + self.alpha = alpha + self.fit_prior = fit_prior + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'MultinomialNB', + 'name': 'Multinomial Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + # sklearn website says: The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work. + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + + # The three parameters of the bagging ensamble are set to constants for now (SF) + + #UniformIntegerHyperparameter('n_estimators', lower=10, upper = 100) + n_estimators = Constant('n_estimators', 100) + + #max_samples = UniformFloatHyperparameter('max_samples', lower = 0.5, upper=1.0) + max_samples = Constant('max_samples' ,1.0) # caution: has to be float! + + #max_features = UniformFloatHyperparameter('max_features', lower = 0.5, upper=1.0) + max_features = Constant('max_features', 1.0) # caution: has to be float! + + + cs = ConfigurationSpace() + + # the smoothing parameter is a non-negative float + # I will limit it to 100 and put it on a logarithmic scale. (SF) + # Please adjust that, if you know a proper range, this is just a guess. + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) + + fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_prior) + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(max_samples) + cs.add_hyperparameter(max_features) + + return cs + diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index aab31a4dd5..5cc84ba0ad 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -58,9 +58,9 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() # the smoothing parameter is a non-negative float - # I will limit it to 1000 and put it on a logarithmic scale. (SF) + # I will limit it to 100 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=0, upper=1000, default=1, log=True) + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) From 1549acdf82f2602dcee6b4986ce53342da76e10c Mon Sep 17 00:00:00 2001 From: Stefan Falkner Date: Wed, 4 Mar 2015 10:59:43 +0100 Subject: [PATCH 132/352] fixed negative value on log scale bug in beroullinb.py --- ParamSklearn/components/classification/bernoulli_nb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index bd06749e32..d0b2064889 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -60,7 +60,7 @@ def get_hyperparameter_search_space(dataset_properties=None): # the smoothing parameter is a non-negative float # I will limit it to 1000 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=0, upper=1000, default=1, log=True) + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) From 953dcea66e1af6cd3b32e78925d0495a6c1a0e1c Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 4 Mar 2015 11:05:06 +0100 Subject: [PATCH 133/352] adjust #hyperparameter to fix unittest --- tests/test_classification.py | 2 +- tests/test_textclassification.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index 938321b0b8..d649a7c479 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -55,7 +55,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(70, len(hyperparameters)) + self.assertEqual(82, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 95caea833f..1786592ccd 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(69, len(hyperparameters)) + self.assertEqual(81, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From b7fbc6b451fe70260e3a37cc1e485549bf280d53 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 4 Mar 2015 11:06:05 +0100 Subject: [PATCH 134/352] adjust to new repo name --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6427e91061..261f8211f6 100644 --- a/.gitignore +++ b/.gitignore @@ -3,5 +3,5 @@ .idea dist/ build/ -AutoSklearn.egg-info +ParamSklearn.egg-info From 57320f8be0b58db8e059d4f8cb87e3559e7d4c24 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 4 Mar 2015 16:54:10 +0100 Subject: [PATCH 135/352] Naive Bayes: forbid with preprocessors which exhibit negative values --- ParamSklearn/classification.py | 24 +++++++++++ .../classification/bagged_gaussian_nb.py | 20 +++++----- .../classification/bagged_multinomial_nb.py | 40 +++++++++++-------- .../components/classification/bernoulli_nb.py | 17 ++++---- .../classification/multinomial_nb.py | 7 +++- 5 files changed, 70 insertions(+), 38 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 8b28fb106f..c5d581995a 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -194,6 +194,30 @@ def get_hyperparameter_search_space(cls, include_estimators=None, except: pass + # Multinomial NB does not work with negative values -> so don't use + # it with standardization, features learning, pca + classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", + "bernoulli_nb"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] + for c in classifiers_: + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "rescaling:strategy"), "standard"), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + for c, f in product(classifiers_, feature_learning_): + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + return configuration_space @staticmethod diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py index 5891e7e744..c69f293038 100644 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -14,16 +14,20 @@ class BaggedGaussianNB(ParamSklearnClassificationAlgorithm): - def __init__(self, n_estimators, max_samples, max_features, random_state=None, verbose=0): - + def __init__(self, n_estimators, max_samples, max_features, + random_state=None, verbose=0): self.n_estimators = n_estimators self.max_samples = max_samples + self.max_features = max_features self.random_state = random_state self.verbose = int(verbose) self.estimator = None def fit(self, X, Y): - self.estimator = sklearn.ensemble.BaggingClassifier(base_estimator=sklearn.naive_bayes.GaussianNB(), n_estimators = self.n_estimators, max_samples = self.max_samples, max_features = self.max_features) + self.estimator = sklearn.ensemble.BaggingClassifier( + base_estimator=sklearn.naive_bayes.GaussianNB(), + n_estimators=self.n_estimators, max_samples=self.max_samples, + max_features=self.max_features) self.estimator.fit(X, Y) return self @@ -54,16 +58,10 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - - # The three parameters of the bagging ensemble are set to constants for now (SF) - - #UniformIntegerHyperparameter('n_estimators', lower=10, upper = 100) + # The three parameters of the bagging ensemble are set to + # constants for now (SF) n_estimators = Constant('n_estimators', 100) - - #max_samples = UniformFloatHyperparameter('max_samples', lower = 0.5, upper=1.0) max_samples = Constant('max_samples' ,1.0) # caution: has to be float! - - #max_features = UniformFloatHyperparameter('max_features', lower = 0.5, upper=1.0) max_features = Constant('max_features', 1.0) # caution: has to be float! cs = ConfigurationSpace() diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index 74f03f03da..e08ec789fa 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -1,5 +1,6 @@ import numpy as np import sklearn.naive_bayes +import sklearn.ensemble from HPOlibConfigSpace.conditions import EqualsCondition @@ -13,17 +14,25 @@ class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): - def __init__(self, alpha, fit_prior, random_state=None, verbose=0): - + def __init__(self, alpha, fit_prior, n_estimators, max_samples, + max_features, random_state=None, verbose=0): self.alpha = alpha self.fit_prior = fit_prior + self.n_estimators = n_estimators + self.max_samples = max_samples + self.max_features = max_features + self.random_state = random_state self.verbose = int(verbose) self.estimator = None def fit(self, X, Y): - self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) + self.estimator = sklearn.ensemble.BaggingClassifier( + base_estimator=sklearn.naive_bayes.MultinomialNB( + alpha=self.alpha, fit_prior=self.fit_prior), + n_estimators=self.n_estimators, max_samples=self.max_samples, + max_features=self.max_features) self.estimator.fit(X, Y) return self @@ -43,7 +52,9 @@ def get_properties(): 'name': 'Multinomial Naive Bayes classifier', 'handles_missing_values': False, 'handles_nominal_values': False, - # sklearn website says: The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work. + # sklearn website says: The multinomial distribution normally + # requires integer feature counts. However, in practice, + # fractional counts such as tf-idf may also work. 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, @@ -55,27 +66,22 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - - # The three parameters of the bagging ensamble are set to constants for now (SF) - - #UniformIntegerHyperparameter('n_estimators', lower=10, upper = 100) + # The three parameters of the bagging ensamble are set to constants + # for now (SF) n_estimators = Constant('n_estimators', 100) - - #max_samples = UniformFloatHyperparameter('max_samples', lower = 0.5, upper=1.0) max_samples = Constant('max_samples' ,1.0) # caution: has to be float! - - #max_features = UniformFloatHyperparameter('max_features', lower = 0.5, upper=1.0) max_features = Constant('max_features', 1.0) # caution: has to be float! - - + cs = ConfigurationSpace() # the smoothing parameter is a non-negative float # I will limit it to 100 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) - - fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, + default=1, log=True) + fit_prior = CategoricalHyperparameter( name="fit_prior", + choices=[True, False], + default=True) cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index d0b2064889..95aa4b6921 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -12,9 +12,7 @@ class BernoulliNB(ParamSklearnClassificationAlgorithm): - def __init__(self, alpha, fit_prior, random_state=None, verbose=0): - self.alpha = alpha self.fit_prior = fit_prior @@ -22,9 +20,9 @@ def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y): + def fit(self, X, y): self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) - self.estimator.fit(X, Y) + self.estimator.fit(X, y) return self def predict(self, X): @@ -43,7 +41,8 @@ def get_properties(): 'name': 'Multinomial Naive Bayes classifier', 'handles_missing_values': False, 'handles_nominal_values': False, - # sklearn website says: ... BernoulliNB is designed for binary/boolean features. + # sklearn website says: ... BernoulliNB is designed for + # binary/boolean features. 'handles_numerical_features': False, 'prefers_data_scaled': False, 'prefers_data_normalized': False, @@ -60,12 +59,14 @@ def get_hyperparameter_search_space(dataset_properties=None): # the smoothing parameter is a non-negative float # I will limit it to 1000 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, + default=1, log=True) - fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + fit_prior = CategoricalHyperparameter(name="fit_prior", + choices=[True, False], + default=True) cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) return cs - diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 5cc84ba0ad..e891c81fbc 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -23,7 +23,8 @@ def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.estimator = None def fit(self, X, Y): - self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) + self.estimator = sklearn.naive_bayes.MultinomialNB(alpha=self.alpha, + fit_prior=self.fit_prior) self.estimator.fit(X, Y) return self @@ -43,7 +44,9 @@ def get_properties(): 'name': 'Multinomial Naive Bayes classifier', 'handles_missing_values': False, 'handles_nominal_values': False, - # sklearn website says: The multinomial distribution normally requires integer feature counts. However, in practice, fractional counts such as tf-idf may also work. + # sklearn website says: The multinomial distribution normally + # requires integer feature counts. However, in practice, + # fractional counts such as tf-idf may also work. 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, From 378e828ea556c1d06d68a85b9cb344f87833a471 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 4 Mar 2015 17:02:01 +0100 Subject: [PATCH 136/352] Adapt tests to new NB classifiers --- source/first_steps.rst | 2 +- tests/test_classification.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/source/first_steps.rst b/source/first_steps.rst index 471a999d3a..e4a9a930eb 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.81999999999999995 + 0.83999999999999997 diff --git a/tests/test_classification.py b/tests/test_classification.py index d649a7c479..ca84c2a5d9 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -80,10 +80,9 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): self.assertNotIn('pca', str(cs)) def test_get_hyperparameter_search_space_dataset_properties(self): - full_cs = ParamSklearnClassifier.get_hyperparameter_search_space() cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) - self.assertEqual(full_cs, cs_mc) + self.assertNotIn('bernoulli_nb', str(cs_mc)) cs_ml = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multilabel': True}) From d4c5bdbad3340d65fe9b728c3e06752ee0d653f1 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 11:20:10 +0100 Subject: [PATCH 137/352] minor --- ParamSklearn/util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 2e2b63ee4f..ce9ba412ae 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -70,12 +70,12 @@ def get_dataset(dataset='iris', make_sparse=False): return X_train, Y_train, X_test, Y_test -def _test_classifier(Classifier, dataset='iris'): +def _test_classifier(classifier, dataset='iris'): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, make_sparse=False) - configuration_space = Classifier.get_hyperparameter_search_space() + configuration_space = classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() - classifier = Classifier(random_state=1, + classifier = classifier(random_state=1, **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) predictor = classifier.fit(X_train, Y_train) From b3939a47ef4256ca2a28d44ba02264d04cd74ec6 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 11:20:53 +0100 Subject: [PATCH 138/352] add adaboost for classification --- .../components/classification/adaboost.py | 88 +++++++++++++++++++ .../classification/test_adaboost.py | 24 +++++ tests/test_classification.py | 2 +- tests/test_textclassification.py | 2 +- 4 files changed, 114 insertions(+), 2 deletions(-) create mode 100644 tests/components/classification/test_adaboost.py diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index e69de29bb2..46c9a7d17b 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -0,0 +1,88 @@ +import numpy as np +import sklearn.ensemble + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, Constant + +from ..classification_base import ParamSklearnClassificationAlgorithm + + +class AdaboostClassifier(ParamSklearnClassificationAlgorithm): + + def __init__(self, n_estimators, learning_rate, algorithm='SAMME.R', + base_estimator=None, random_state=None): + self.n_estimators = int(n_estimators) + self.learning_rate = float(learning_rate) + + if algorithm not in ('SAMME.R', "SAMME"): + raise ValueError("Illegal 'algorithm': %s" % algorithm) + self.algorithm = algorithm + self.random_state = random_state + + if base_estimator is None: + self.base_estimator = base_estimator + elif base_estimator == "None": + self.base_estimator = None + else: + raise ValueError("Illegal ") + + self.estimator = None + + def fit(self, X, Y): + self.estimator = sklearn.ensemble.AdaBoostClassifier( + base_estimator=self.base_estimator, + n_estimators=self.n_estimators, + learning_rate=self.learning_rate, + algorithm=self.algorithm, + random_state=self.random_state + ) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'AB', + 'name': 'AdaBoost Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + learning_rate = UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) + algorithm = Constant(name="algorithm", value="SAMME.R") + base_estimator = Constant(name="base_estimator", value="None") + + n_estimators = UniformIntegerHyperparameter( + name="n_estimators", lower=5, upper=50, default=10, log=False) + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(learning_rate) + cs.add_hyperparameter(base_estimator) + cs.add_hyperparameter(algorithm) + + return cs + diff --git a/tests/components/classification/test_adaboost.py b/tests/components/classification/test_adaboost.py new file mode 100644 index 0000000000..0497681945 --- /dev/null +++ b/tests/components/classification/test_adaboost.py @@ -0,0 +1,24 @@ +import unittest + +from ParamSklearn.components.classification.adaboost import \ + AdaboostClassifier +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class AdaBoostComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(AdaboostClassifier) + self.assertAlmostEqual(0.93999999999999995, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=AdaboostClassifier, + dataset='digits') + self.assertAlmostEqual(0.48791985857395404, + sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/test_classification.py b/tests/test_classification.py index ca84c2a5d9..c41d9e2345 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -55,7 +55,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(82, len(hyperparameters)) + self.assertEqual(86, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 1786592ccd..de55673b23 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(81, len(hyperparameters)) + self.assertEqual(85, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From ff90dc51c9dd9fca07ce467b970338d43da30e0c Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 13:54:37 +0100 Subject: [PATCH 139/352] add input/output key; reorder inputs --- .../components/classification/adaboost.py | 5 ++++- .../classification/bagged_gaussian_nb.py | 15 +++++++-------- .../classification/bagged_multinomial_nb.py | 10 +++++----- .../components/classification/bernoulli_nb.py | 10 +++++----- .../components/classification/extra_trees.py | 10 +++++++--- .../components/classification/gaussian_nb.py | 10 ++++------ .../classification/gradient_boosting.py | 10 +++++----- .../classification/k_nearest_neighbors.py | 5 ++++- .../components/classification/liblinear.py | 11 +++++++---- .../components/classification/libsvm_svc.py | 9 ++++++--- .../components/classification/multinomial_nb.py | 10 +++++----- .../components/classification/random_forest.py | 8 ++++++-- ParamSklearn/components/classification/sgd.py | 11 +++++++---- ParamSklearn/util.py | 6 ++++++ 14 files changed, 78 insertions(+), 52 deletions(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 46c9a7d17b..6c65373256 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -5,7 +5,8 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS class AdaboostClassifier(ParamSklearnClassificationAlgorithm): @@ -64,6 +65,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py index c69f293038..bfa9b5e7f2 100644 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -2,14 +2,11 @@ import sklearn.ensemble import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant +from HPOlibConfigSpace.hyperparameters import Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BaggedGaussianNB(ParamSklearnClassificationAlgorithm): @@ -54,6 +51,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod @@ -61,8 +60,8 @@ def get_hyperparameter_search_space(dataset_properties=None): # The three parameters of the bagging ensemble are set to # constants for now (SF) n_estimators = Constant('n_estimators', 100) - max_samples = Constant('max_samples' ,1.0) # caution: has to be float! - max_features = Constant('max_features', 1.0) # caution: has to be float! + max_samples = Constant('max_samples', 1.0) # caution: has to be float + max_features = Constant('max_features', 1.0) # caution: has to be float cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index e08ec789fa..35580a007d 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -2,14 +2,12 @@ import sklearn.naive_bayes import sklearn.ensemble -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): @@ -62,6 +60,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 95aa4b6921..7d88166c7a 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BernoulliNB(ParamSklearnClassificationAlgorithm): @@ -50,6 +48,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.bool} @staticmethod diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index dc7a081bcc..9f964b16e2 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -4,11 +4,13 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS + # get our own forests to replace the sklearn ones -from ...implementations import forest +from ParamSklearn.implementations import forest + class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): @@ -104,6 +106,8 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 38b6973439..0e7a104cd8 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -1,14 +1,10 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GaussianNB(ParamSklearnClassificationAlgorithm): @@ -48,6 +44,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 169321d9f8..2ac6ffd18c 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.ensemble -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): @@ -109,6 +107,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index b2aa9405d5..ba71f4cd15 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -5,7 +5,8 @@ Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): @@ -57,6 +58,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear.py index 36293a41c1..1eaf94c901 100644 --- a/ParamSklearn/components/classification/liblinear.py +++ b/ParamSklearn/components/classification/liblinear.py @@ -1,4 +1,3 @@ -import numpy as np import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -7,8 +6,10 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ..classification_base import ParamSklearnClassificationAlgorithm -from ...implementations.util import softmax +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.implementations.util import softmax +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + class LibLinear_SVC(ParamSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside @@ -77,8 +78,10 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, # TODO find out what is best used here! - 'preferred_dtype' : None} + 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(dataset_properties=None): diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 84eb07da5a..85036ea919 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -1,13 +1,14 @@ import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction, \ - InCondition +from HPOlibConfigSpace.conditions import EqualsCondition, InCondition from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS + class LibSVM_SVC(ParamSklearnClassificationAlgorithm): def __init__(self, C, kernel, gamma, shrinking, tol, class_weight, max_iter, @@ -82,6 +83,8 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, # TODO find out what is best used here! # C-continouos and double precision... 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index e891c81fbc..97151616ee 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class MultinomialNB(ParamSklearnClassificationAlgorithm): @@ -54,6 +52,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index bc65f244ac..65a5a01a4a 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -5,9 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones -from ...implementations import forest +from ParamSklearn.implementations import forest + class RandomForest(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, @@ -93,6 +95,8 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index c064d13530..da0dd6b3d5 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -1,14 +1,15 @@ -import numpy as np from sklearn.linear_model.stochastic_gradient import SGDClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, UnParametrizedHyperparameter, \ UniformIntegerHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction +from HPOlibConfigSpace.conditions import EqualsCondition + +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax -from ..classification_base import ParamSklearnClassificationAlgorithm -from ...implementations.util import softmax class SGD(ParamSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, @@ -85,6 +86,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index ce9ba412ae..c61011f18e 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -10,6 +10,12 @@ import sklearn.datasets +SPARSE = 'SPARSE' +DENSE = 'DENSE' +PREDICTIONS = 'PREDICTIONS' +INPUT = 'INPUT' + + def find_sklearn_classes(class_): classifiers = set() all_subdirectories = [] From a3360163883fcfbb5d48cca4269544888b65e49c Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 13:55:11 +0100 Subject: [PATCH 140/352] add input/output key; resort imports --- ParamSklearn/components/preprocessing/imputation.py | 7 +++++-- .../components/preprocessing/kitchen_sinks.py | 7 +++++-- ParamSklearn/components/preprocessing/pca.py | 10 ++++++---- ParamSklearn/components/preprocessing/rescaling.py | 11 +++++++---- .../select_percentile_classification.py | 9 ++++++--- .../preprocessing/select_percentile_regression.py | 9 ++++++--- .../components/preprocessing/sparse_filtering.py | 13 ++++++++----- .../components/preprocessing/truncatedSVD.py | 11 +++++++---- 8 files changed, 50 insertions(+), 27 deletions(-) diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index 4c2efc113f..fafd08b2e1 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -3,7 +3,8 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT class Imputation(ParamSklearnPreprocessingAlgorithm): @@ -39,6 +40,8 @@ def get_properties(): # TODO find out of this is right! 'handles_sparse': True, 'handles_dense': True, + 'input': (DENSE, SPARSE), + 'output': INPUT, 'preferred_dtype': None} @staticmethod @@ -52,4 +55,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index b4c4397648..9f4c968a02 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -4,7 +4,8 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): @@ -46,6 +47,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'ouput': INPUT, 'preferred_dtype': None} @staticmethod @@ -61,5 +64,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 895b234d8b..5e21acddbb 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -1,11 +1,11 @@ import sklearn.decomposition -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE class PCA(ParamSklearnPreprocessingAlgorithm): @@ -59,6 +59,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, # TODO find out what is best used here! 'preferred_dtype': None} @@ -75,4 +77,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index f723f49092..586fbc0759 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -1,9 +1,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ...implementations.StandardScaler import StandardScaler -from ...implementations.MinMaxScaler import MinMaxScaler -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.StandardScaler import StandardScaler +from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT class Rescaling(ParamSklearnPreprocessingAlgorithm): @@ -43,6 +44,8 @@ def get_properties(): # TODO find out of this is right! 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, # Add something here... 'preferred_dtype': None} @@ -57,4 +60,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index a1d317a5d3..a3a5873ccc 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -3,8 +3,9 @@ import sklearn.feature_selection -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from select_percentile import SelectPercentileBase +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.util import DENSE, SPARSE, INPUT class SelectPercentileClassification(SelectPercentileBase, @@ -44,6 +45,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, 'preferred_dtype': None} @staticmethod @@ -67,5 +70,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 9844d813f5..9668da6fdd 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -3,8 +3,9 @@ import sklearn.feature_selection -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from select_percentile import SelectPercentileBase +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.util import DENSE class SelectPercentileRegression(SelectPercentileBase, @@ -42,6 +43,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, 'preferred_dtype': None} @staticmethod @@ -59,5 +62,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/sparse_filtering.py b/ParamSklearn/components/preprocessing/sparse_filtering.py index 184bd4a1a8..026dcacfd1 100644 --- a/ParamSklearn/components/preprocessing/sparse_filtering.py +++ b/ParamSklearn/components/preprocessing/sparse_filtering.py @@ -1,9 +1,10 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ...implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl +from ParamSklearn.util import DENSE + class SparseFiltering(ParamSklearnPreprocessingAlgorithm): @@ -38,6 +39,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, 'preferred_dtype': None} @@ -55,4 +58,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index a45456b092..9e70e5871b 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -1,11 +1,12 @@ +import numpy as np + import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -import numpy as np - +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE class TruncatedSVD(ParamSklearnPreprocessingAlgorithm): @@ -43,6 +44,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': False, + 'input': (SPARSE, ), + 'output': DENSE, 'preferred_dtype': np.float32} @staticmethod @@ -55,4 +58,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name From 506bbce1da3ba05064d5f54b0c5751a9f69f6ece Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 14:04:23 +0100 Subject: [PATCH 141/352] add unittest to assert keys in preperty dict --- tests/test_classification.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index c41d9e2345..50225cc23b 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,8 +1,5 @@ __author__ = 'feurerm' -import copy -import numpy as np -import StringIO import unittest import sklearn.datasets @@ -18,12 +15,27 @@ from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.classification as classification_components import ParamSklearn.components.preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset +from ParamSklearn.util import get_dataset, DENSE, SPARSE, PREDICTIONS + class TestParamSklearnClassifier(unittest.TestCase): # TODO: test for both possible ways to initialize ParamSklearn # parameters and other... + def test_io_dict(self): + classifiers = classification_components._classifiers + for c in classifiers: + self.assertIn('input', c.get_properties()) + self.assertIn('output', c.get_properties()) + inp = c.get_properties()['input'] + output = c.get_properties()['output'] + + self.assertIsInstance(inp, tuple) + self.assertIsInstance(output, str) + for i in inp: + self.assertIn(i, (SPARSE, DENSE)) + self.assertEqual(output, PREDICTIONS) + def test_find_classifiers(self): classifiers = classification_components._classifiers self.assertGreaterEqual(len(classifiers), 1) From fb8b974bd4fad46b4af41c67c4adf261f4902918 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 16:44:45 +0100 Subject: [PATCH 142/352] minor --- .../components/preprocessing/select_percentile_classification.py | 1 - .../components/preprocessing/select_percentile_regression.py | 1 - 2 files changed, 2 deletions(-) diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index a3a5873ccc..7ab6d58a23 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -28,7 +28,6 @@ def __init__(self, percentile, score_func="chi2", random_state=None): raise ValueError("score_func must be in ('chi2, 'f_classif'), " "but is: %s" % score_func) - @staticmethod def get_properties(): return {'shortname': 'SPC', diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 9668da6fdd..0ac5e668ce 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -26,7 +26,6 @@ def __init__(self, percentile, score_func="f_classif", random_state=None): else: raise ValueError("Don't know this scoring function: %s" % score_func) - @staticmethod def get_properties(): return {'shortname': 'SPR', From 59cb50b1e11c3919ba5ccd063dad5efebd7f4c07 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 16:44:53 +0100 Subject: [PATCH 143/352] fix typo --- ParamSklearn/components/preprocessing/kitchen_sinks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index 9f4c968a02..ebab4bd265 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -48,7 +48,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'ouput': INPUT, + 'output': INPUT, 'preferred_dtype': None} @staticmethod From eb8df6c867f5d016559d737812dd240b280512c0 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 13:57:59 +0100 Subject: [PATCH 144/352] add input/output and generic searchspace generation --- ParamSklearn/classification.py | 267 ++++++++++++++++++++++++++++++++- tests/test_classification.py | 8 +- 2 files changed, 268 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index c5d581995a..43be0e69d9 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -1,11 +1,15 @@ +from collections import OrderedDict from itertools import product +import numpy as np + from sklearn.base import ClassifierMixin from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction -from . import components as components -from .base import ParamSklearnBaseEstimator +from ParamSklearn import components as components +from ParamSklearn.base import ParamSklearnBaseEstimator +from ParamSklearn.util import SPARSE, DENSE, INPUT class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): @@ -72,6 +76,83 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) + @classmethod + def create_match_array(cls, preprocessors, classifiers, sparse): + # Now select combinations that work + # We build a binary matrix, where a 1 indicates, that a combination + # work on this dataset based in the dataset and the input/output formats + # A 'zero'-row (column) is an unusable preprocessor (classifier) + # A single zero results in an forbidden condition + preprocessors_list = preprocessors.keys() + classifiers_list = classifiers.keys() + matches = np.zeros([len(preprocessors), len(classifiers)]) + for pidx, p in enumerate(preprocessors_list): + p_out = preprocessors[p].get_properties()['output'] + for cidx, c in enumerate(classifiers_list): + c_in = classifiers[c].get_properties()['input'] + if p_out == INPUT: + # Preprocessor does not change the format + if (sparse and SPARSE in c_in) or \ + (not sparse and DENSE in c_in): + # Classifier input = Dataset format + matches[pidx, cidx] = 1 + continue + else: + # These won't work + pass + elif p_out == DENSE and DENSE in c_in: + matches[pidx, cidx] = 1 + continue + elif p_out == SPARSE and SPARSE in c_in: + matches[pidx, cidx] = 1 + continue + else: + # These won't work + pass + return matches, preprocessors_list, classifiers_list + + @classmethod + def remove_non_matches(cls, matches, preprocessors_list, classifiers_list): + # We might delete some rows/columns + l = len(preprocessors_list) + for pidx, p in enumerate(preprocessors_list): + # We use the reverse idx as it stays correct + # when we start removing rows + reverse_idx = -l + pidx + if (matches[pidx, :] == 0).all(): + # unusable preprocessor, delete row + matches = np.delete(matches, reverse_idx, axis=0) + #del preprocessors[p] + del preprocessors_list[reverse_idx] + l = len(classifiers_list) + for cidx, c in enumerate(classifiers_list): + # We use the reverse idx as it stays correct + # when we start removing cols + reverse_idx = -l + cidx + if (matches[:, cidx] == 0).all(): + # unusable preprocessor, delete row + matches = np.delete(matches, reverse_idx, axis=1) + #del classifiers[c] + del classifiers_list[reverse_idx] + return matches, preprocessors_list, classifiers_list + + @classmethod + def add_forbidden_clauses(cls, configuration_space, preprocessors_list, classifiers_list, matches): + for pdx, p in enumerate(preprocessors_list): + if np.sum(matches[pdx, :]) == matches.shape[1]: + continue + for cdx, c in enumerate(classifiers_list): + if matches[pdx, cdx] == 0: + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), p))) + except: + pass + return configuration_space + @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, @@ -88,11 +169,190 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "exclude_preprocessors cannot be used together.") if dataset_properties is None or not isinstance(dataset_properties, dict): - dataset_properties = dict() + dataset_properties = {} + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + + # Compile a list of legal preprocessors for this problem + available_preprocessors = \ + components.preprocessing_components._preprocessors + preprocessors = OrderedDict() + for name in available_preprocessors: + if name in cls._get_pipeline(): + # We don't want these preprocessors, as they are always included + # preprocessors[name] = available_preprocessors[name] + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + + if available_preprocessors[name]. \ + get_properties()['handles_classification'] is False: + continue + if dataset_properties.get('multiclass') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multiclass'] is False: + continue + if dataset_properties.get('multilabel') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multilabel'] is False: + continue + if dataset_properties.get('sparse') is True and \ + SPARSE not in available_preprocessors[name].get_properties()['input']: + continue + elif dataset_properties.get('sparse') is False and \ + DENSE not in available_preprocessors[name].get_properties()['input']: + continue + + preprocessors[name] = available_preprocessors[name] # Compile a list of all estimator objects for this problem available_classifiers = ParamSklearnClassifier._get_estimator_components() + # Remove unwanted classifiers + classifiers = OrderedDict() + for name in available_classifiers: + if include_estimators is not None and name not in include_estimators: + continue + elif exclude_estimators is not None and name in exclude_estimators: + continue + + if dataset_properties.get('multiclass') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multiclass'] is False: + continue + if dataset_properties.get('multilabel') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multilabel'] is False: + continue + classifiers[name] = available_classifiers[name] + if len(classifiers) == 0: + raise ValueError("No classifier to build a configuration space " + "for...") + + matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ + create_match_array(preprocessors=preprocessors, + classifiers=classifiers, + sparse=dataset_properties.get('sparse')) + + # Now we have only legal preprocessors/classifiers we combine them + # Simple sanity checks + assert np.sum(matches) != 0, "No valid preprocessor/classifier " \ + "combination found, this might be a bug" + assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ + "'matches' is not binary; %s <= %d, [%d*%d]" % \ + (str(np.sum(matches)), matches.shape[0]*matches.shape[1], + matches.shape[0], matches.shape[1]) + + if np.sum(matches) < (matches.shape[0] * matches.shape[1]): + matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ + remove_non_matches(matches=matches, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list) + for p in preprocessors.keys(): + if p not in preprocessors_list: + del preprocessors[p] + for c in classifiers.keys(): + if c not in classifiers_list: + del classifiers[c] + + # Sanity checks + assert len(preprocessors_list) == matches.shape[0], \ + "Preprocessor deleting went wrong" + assert len(classifiers_list) == matches.shape[1], \ + "Classifier deleting went wrong" + assert [c in classifiers_list for c in classifiers] + assert [p in preprocessors_list for p in preprocessors] + + # Now add always present preprocessors + for name in available_preprocessors: + if name in cls._get_pipeline(): + preprocessors[name] = available_preprocessors[name] + + # Hardcode the defaults based on some educated guesses + classifier_defaults = ['random_forest', 'liblinear', 'sgd', + 'libsvm_svc'] + classifier_default = None + for cd_ in classifier_defaults: + if cd_ in classifiers: + classifier_default = cd_ + break + if classifier_default is None: + classifier_default = classifiers.keys()[0] + + # Get the configuration space + configuration_space = super(ParamSklearnClassifier, cls)\ + .get_hyperparameter_search_space( + cls._get_estimator_hyperparameter_name(), + classifier_default, classifiers, preprocessors, dataset_properties, + cls._get_pipeline()) + + # And now add forbidden parameter configurations + # According to matches + configuration_space = ParamSklearnClassifier.add_forbidden_clauses( + configuration_space=configuration_space, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list, matches=matches) + + # which would take too long + # Combinations of tree-based models with feature learning: + classifiers_ = ["extra_trees", "gradient_boosting", + "k_nearest_neighbors", "libsvm_svc", "random_forest"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + + for c, f in product(classifiers_, feature_learning_): + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f))) + except: + pass + + # Won't work + # Multinomial NB does not work with negative values, don't use + # it with standardization, features learning, pca + classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", + "bernoulli_nb"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] + for c in classifiers_: + if c not in classifiers_list: + continue + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "rescaling:strategy"), "standard"), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + + for c, f in product(classifiers_, feature_learning_): + if c not in classifiers_list: + continue + if f not in preprocessors_list: + continue + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + + return configuration_space + + + """ + # Compile a list of all estimator objects for this problem + available_classifiers = ParamSklearnClassifier._get_estimator_components() + classifiers = dict() for name in available_classifiers: if include_estimators is not None and \ @@ -219,6 +479,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, pass return configuration_space + """ @staticmethod def _get_estimator_hyperparameter_name(): diff --git a/tests/test_classification.py b/tests/test_classification.py index 50225cc23b..6eb9aa59b1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -25,10 +25,10 @@ class TestParamSklearnClassifier(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: - self.assertIn('input', c.get_properties()) - self.assertIn('output', c.get_properties()) - inp = c.get_properties()['input'] - output = c.get_properties()['output'] + self.assertIn('input', classifiers[c].get_properties()) + self.assertIn('output', classifiers[c].get_properties()) + inp = classifiers[c].get_properties()['input'] + output = classifiers[c].get_properties()['output'] self.assertIsInstance(inp, tuple) self.assertIsInstance(output, str) From ae7d19aee70b9fb1259f52a49e0926d9b03588a2 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 16:20:59 +0100 Subject: [PATCH 145/352] adjust property dicts, all components should have the same keys --- ParamSklearn/components/classification/adaboost.py | 2 ++ ParamSklearn/components/classification/bagged_gaussian_nb.py | 2 ++ ParamSklearn/components/classification/bagged_multinomial_nb.py | 2 ++ ParamSklearn/components/classification/bernoulli_nb.py | 2 ++ ParamSklearn/components/classification/extra_trees.py | 2 ++ ParamSklearn/components/classification/gaussian_nb.py | 2 ++ ParamSklearn/components/classification/gradient_boosting.py | 2 ++ ParamSklearn/components/classification/k_nearest_neighbors.py | 2 ++ ParamSklearn/components/classification/liblinear.py | 2 ++ ParamSklearn/components/classification/libsvm_svc.py | 2 ++ ParamSklearn/components/classification/multinomial_nb.py | 2 ++ ParamSklearn/components/classification/random_forest.py | 2 ++ ParamSklearn/components/classification/sgd.py | 2 ++ 13 files changed, 26 insertions(+) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 6c65373256..0675182b57 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -61,6 +61,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py index bfa9b5e7f2..63dbb77408 100644 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -47,6 +47,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index 35580a007d..fed6c67ade 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -56,6 +56,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 7d88166c7a..7a3698f900 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -44,6 +44,8 @@ def get_properties(): 'handles_numerical_features': False, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': False, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 9f964b16e2..c4cf44a5af 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -102,6 +102,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 0e7a104cd8..771adf0945 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -40,6 +40,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 2ac6ffd18c..61d120836c 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -103,6 +103,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index ba71f4cd15..e0c656e918 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -54,6 +54,8 @@ def get_properties(): 'prefers_data_scaled': True, # Find out if this is good because of sparsity 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear.py index 1eaf94c901..c53ac60c67 100644 --- a/ParamSklearn/components/classification/liblinear.py +++ b/ParamSklearn/components/classification/liblinear.py @@ -72,6 +72,8 @@ def get_properties(): 'prefers_data_scaled': True, # Find out if this is good because of sparsity 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': False, diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 85036ea919..bcdc2382ec 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -77,6 +77,8 @@ def get_properties(): 'prefers_data_scaled': True, # TODO find out if this is good because of sparsity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 97151616ee..65cabb790f 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -48,6 +48,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 65a5a01a4a..7c45259c94 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -91,6 +91,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index da0dd6b3d5..d1aecf3547 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -82,6 +82,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, From 451e444d75abacd42cac4bac1cf479429955fbf3 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 16:21:35 +0100 Subject: [PATCH 146/352] add a dummy preprocessor, that is always part of the cs --- .../preprocessing/NoPreprocessing.py | 50 +++++++++++++++++++ .../preprocessing/test_NoPreprocessing.py | 20 ++++++++ 2 files changed, 70 insertions(+) create mode 100644 ParamSklearn/components/preprocessing/NoPreprocessing.py create mode 100644 tests/components/preprocessing/test_NoPreprocessing.py diff --git a/ParamSklearn/components/preprocessing/NoPreprocessing.py b/ParamSklearn/components/preprocessing/NoPreprocessing.py new file mode 100644 index 0000000000..31d721d29c --- /dev/null +++ b/ParamSklearn/components/preprocessing/NoPreprocessing.py @@ -0,0 +1,50 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class NoPreprocessing(ParamSklearnPreprocessingAlgorithm): + + def __init__(self, random_state): + """ This preprocessors does not change the data """ + self.preprocessor = None + + def fit(self, X, Y): + self.preprocessor = 0 + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return X + + @staticmethod + def get_properties(): + return {'shortname': 'no', + 'name': 'NoPreprocessing', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py new file mode 100644 index 0000000000..4831d7e9a3 --- /dev/null +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -0,0 +1,20 @@ +import numpy as np +import unittest + +from ParamSklearn.components.preprocessing.NoPreprocessing import NoPreprocessing +from ParamSklearn.util import _test_preprocessing + + +class NoneComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(NoPreprocessing) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], original.shape[1]) + self.assertFalse((transformation == 0).all()) + self.assertEqual(np.sum(original), np.sum(transformation)) + self.assertEqual(np.min(original), np.min(transformation)) + self.assertEqual(np.max(original), np.max(transformation)) + self.assertEqual(np.std(original), np.std(transformation)) + self.assertEqual(np.mean(original), np.mean(transformation)) + + From 6d94904644eaa639bdd7a948595ba08a502825d3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 9 Mar 2015 18:46:13 +0100 Subject: [PATCH 147/352] Fix configuration spaces --- .../classification/bagged_multinomial_nb.py | 15 ++++++++++----- .../components/classification/bernoulli_nb.py | 11 ++++++++--- .../components/classification/multinomial_nb.py | 15 +++++++++++---- 3 files changed, 29 insertions(+), 12 deletions(-) diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index e08ec789fa..2c9c7d9fb4 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -17,7 +17,12 @@ class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): def __init__(self, alpha, fit_prior, n_estimators, max_samples, max_features, random_state=None, verbose=0): self.alpha = alpha - self.fit_prior = fit_prior + if fit_prior.lower() == "true": + self.fit_prior = True + elif fit_prior.lower() == "false": + self.fit_prior = False + else: + self.fit_prior = fit_prior self.n_estimators = n_estimators self.max_samples = max_samples @@ -69,7 +74,7 @@ def get_hyperparameter_search_space(dataset_properties=None): # The three parameters of the bagging ensamble are set to constants # for now (SF) n_estimators = Constant('n_estimators', 100) - max_samples = Constant('max_samples' ,1.0) # caution: has to be float! + max_samples = Constant('max_samples', 1.0) # caution: has to be float! max_features = Constant('max_features', 1.0) # caution: has to be float! cs = ConfigurationSpace() @@ -79,9 +84,9 @@ def get_hyperparameter_search_space(dataset_properties=None): # Please adjust that, if you know a proper range, this is just a guess. alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) - fit_prior = CategoricalHyperparameter( name="fit_prior", - choices=[True, False], - default=True) + fit_prior = CategoricalHyperparameter(name="fit_prior", + choices=["True", "False"], + default="True") cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 95aa4b6921..ea49703551 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -14,7 +14,12 @@ class BernoulliNB(ParamSklearnClassificationAlgorithm): def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.alpha = alpha - self.fit_prior = fit_prior + if fit_prior.lower() == "true": + self.fit_prior = True + elif fit_prior.lower() == "false": + self.fit_prior = False + else: + self.fit_prior = fit_prior self.random_state = random_state self.verbose = int(verbose) @@ -63,8 +68,8 @@ def get_hyperparameter_search_space(dataset_properties=None): default=1, log=True) fit_prior = CategoricalHyperparameter(name="fit_prior", - choices=[True, False], - default=True) + choices=["True", "False"], + default="True") cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index e891c81fbc..ebda12122a 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -14,9 +14,13 @@ class MultinomialNB(ParamSklearnClassificationAlgorithm): def __init__(self, alpha, fit_prior, random_state=None, verbose=0): - self.alpha = alpha - self.fit_prior = fit_prior + if fit_prior.lower() == "true": + self.fit_prior = True + elif fit_prior.lower() == "false": + self.fit_prior = False + else: + self.fit_prior = fit_prior self.random_state = random_state self.verbose = int(verbose) @@ -63,9 +67,12 @@ def get_hyperparameter_search_space(dataset_properties=None): # the smoothing parameter is a non-negative float # I will limit it to 100 and put it on a logarithmic scale. (SF) # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, + default=1, log=True) - fit_prior = CategoricalHyperparameter( name="fit_prior", choices=[True, False], default=True) + fit_prior = CategoricalHyperparameter( name="fit_prior", + choices=["True", "False"], + default="True") cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) From 43023451e2599aea44a07a9a1ffa3a4153294f1f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:46:11 +0100 Subject: [PATCH 148/352] fiy typo --- .../preprocessing/{NoPreprocessing.py => no_peprocessing.py} | 0 tests/components/preprocessing/test_NoPreprocessing.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename ParamSklearn/components/preprocessing/{NoPreprocessing.py => no_peprocessing.py} (100%) diff --git a/ParamSklearn/components/preprocessing/NoPreprocessing.py b/ParamSklearn/components/preprocessing/no_peprocessing.py similarity index 100% rename from ParamSklearn/components/preprocessing/NoPreprocessing.py rename to ParamSklearn/components/preprocessing/no_peprocessing.py diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py index 4831d7e9a3..5bd6232279 100644 --- a/tests/components/preprocessing/test_NoPreprocessing.py +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -1,7 +1,7 @@ import numpy as np import unittest -from ParamSklearn.components.preprocessing.NoPreprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing from ParamSklearn.util import _test_preprocessing From be311cabef6fe1da99bbd28f5339c1da254a7962 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:51:24 +0100 Subject: [PATCH 149/352] add checks for propertiy keys and remove no longer valid checks such as checking for an error when building a searchspace for a sparse multilabel/multiclass dataset --- tests/test_classification.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index 6eb9aa59b1..7cb05733f1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -25,16 +25,22 @@ class TestParamSklearnClassifier(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: - self.assertIn('input', classifiers[c].get_properties()) - self.assertIn('output', classifiers[c].get_properties()) - inp = classifiers[c].get_properties()['input'] - output = classifiers[c].get_properties()['output'] + props = classifiers[c].get_properties() + self.assertIn('input', props) + self.assertIn('output', props) + inp = props['input'] + output = props['output'] self.assertIsInstance(inp, tuple) self.assertIsInstance(output, str) for i in inp: self.assertIn(i, (SPARSE, DENSE)) self.assertEqual(output, PREDICTIONS) + self.assertIn('handles_regression', props) + self.assertFalse(props['handles_regression']) + self.assertIn('handles_classification', props) + self.assertIn('handles_multiclass', props) + self.assertIn('handles_multilabel', props) def test_find_classifiers(self): classifiers = classification_components._classifiers @@ -105,21 +111,22 @@ def test_get_hyperparameter_search_space_dataset_properties(self): cs_sp = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - self.assertNotIn('extra_trees', str(cs_sp)) - self.assertNotIn('gradient_boosting', str(cs_sp)) - self.assertNotIn('random_forest', str(cs_sp)) + self.assertIn('extra_trees', str(cs_sp)) + self.assertIn('gradient_boosting', str(cs_sp)) + self.assertIn('random_forest', str(cs_sp)) cs_mc_ml = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) - self.assertRaisesRegexp(ValueError, - "No classifier to build a configuration space " - "for...", ParamSklearnClassifier. - get_hyperparameter_search_space, - dataset_properties={'multilabel': True, - 'multiclass': True, - 'sparse': True}) + # We now have a preprocessing method that handles this case + #self.assertRaisesRegexp(ValueError, + # "No classifier to build a configuration space " + # "for...", ParamSklearnClassifier. + # get_hyperparameter_search_space, + # dataset_properties={'multilabel': True, + # 'multiclass': True, + # 'sparse': True}) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From f9209a6dfe1d5444d07c1e92e026d3da97bc3961 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:53:00 +0100 Subject: [PATCH 150/352] rebuild classification searchspace with transforming preprocessing methods such as truncated svd, which turns sparse data to dense --- ParamSklearn/classification.py | 366 ++++++++--------------- tests/test_classification_searchspace.py | 137 +++++++++ 2 files changed, 258 insertions(+), 245 deletions(-) create mode 100644 tests/test_classification_searchspace.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 43be0e69d9..6fd78fca00 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -5,7 +5,8 @@ from sklearn.base import ClassifierMixin -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause +from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator @@ -77,7 +78,7 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) @classmethod - def create_match_array(cls, preprocessors, classifiers, sparse): + def get_match_array(cls, preprocessors, classifiers, sparse): # Now select combinations that work # We build a binary matrix, where a 1 indicates, that a combination # work on this dataset based in the dataset and the input/output formats @@ -87,7 +88,14 @@ def create_match_array(cls, preprocessors, classifiers, sparse): classifiers_list = classifiers.keys() matches = np.zeros([len(preprocessors), len(classifiers)]) for pidx, p in enumerate(preprocessors_list): + p_in = preprocessors[p].get_properties()['input'] p_out = preprocessors[p].get_properties()['output'] + if p in cls._get_pipeline(): + continue + elif sparse and SPARSE not in p_in: + continue + elif not sparse and DENSE not in p_in: + continue for cidx, c in enumerate(classifiers_list): c_in = classifiers[c].get_properties()['input'] if p_out == INPUT: @@ -99,7 +107,7 @@ def create_match_array(cls, preprocessors, classifiers, sparse): continue else: # These won't work - pass + continue elif p_out == DENSE and DENSE in c_in: matches[pidx, cidx] = 1 continue @@ -108,50 +116,84 @@ def create_match_array(cls, preprocessors, classifiers, sparse): continue else: # These won't work - pass - return matches, preprocessors_list, classifiers_list + continue + return matches @classmethod - def remove_non_matches(cls, matches, preprocessors_list, classifiers_list): - # We might delete some rows/columns - l = len(preprocessors_list) - for pidx, p in enumerate(preprocessors_list): - # We use the reverse idx as it stays correct - # when we start removing rows - reverse_idx = -l + pidx - if (matches[pidx, :] == 0).all(): - # unusable preprocessor, delete row - matches = np.delete(matches, reverse_idx, axis=0) - #del preprocessors[p] - del preprocessors_list[reverse_idx] - l = len(classifiers_list) - for cidx, c in enumerate(classifiers_list): - # We use the reverse idx as it stays correct - # when we start removing cols - reverse_idx = -l + cidx - if (matches[:, cidx] == 0).all(): - # unusable preprocessor, delete row - matches = np.delete(matches, reverse_idx, axis=1) - #del classifiers[c] - del classifiers_list[reverse_idx] - return matches, preprocessors_list, classifiers_list + def _get_idx_to_keep(cls, m): + # Returns all rows and cols where matches contains not only zeros + keep_row = [idx for idx in range(m.shape[0]) if np.sum(m[idx, :]) != 0] + keep_col = [idx for idx in range(m.shape[1]) if np.sum(m[:, idx]) != 0] + return keep_col, keep_row + + @classmethod + def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, + preprocessors, classifiers): + assert len(preprocessors_list) == len(preprocessors.keys()) + assert len(classifiers_list) == len(classifiers.keys()) + assert isinstance(m, np.ndarray) + # remove components that are not usable for this problem + keep_col, keep_row = ParamSklearnClassifier._get_idx_to_keep(m) + + m = m[keep_row, :] + m = m[:, keep_col] + preproc_list = [preprocessors_list[p] for p in keep_row] + class_list = [classifiers_list[p] for p in keep_col] + + # Make sure they don't exist anymore + del preprocessors_list + del classifiers_list + + new_class = dict() + for c in class_list: + new_class[c] = classifiers[c] + new_preproc = dict() + for p in preproc_list: + new_preproc[p] = preprocessors[p] + + # Make sure they don't exist anymore + del preprocessors + del classifiers + + return m, preproc_list, class_list, new_preproc, new_class @classmethod - def add_forbidden_clauses(cls, configuration_space, preprocessors_list, classifiers_list, matches): - for pdx, p in enumerate(preprocessors_list): + def add_forbidden(cls, conf_space, preproc_list, class_list, matches): + for pdx, p in enumerate(preproc_list): if np.sum(matches[pdx, :]) == matches.shape[1]: continue - for cdx, c in enumerate(classifiers_list): + for cdx, c in enumerate(class_list): if matches[pdx, cdx] == 0: try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + conf_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(conf_space.get_hyperparameter( "classifier"), c), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + ForbiddenEqualsClause(conf_space.get_hyperparameter( "preprocessor"), p))) except: pass - return configuration_space + return conf_space + + @classmethod + def get_available_components(cls, available_comp, data_prop, inc, exc): + components = OrderedDict() + for name in available_comp: + if inc is not None and name not in inc: + continue + elif exc is not None and name in exc: + continue + + entry = available_comp[name] + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and entry.get_properties()['handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and available_comp[name].get_properties()['handles_multilabel'] is False: + continue + components[name] = entry + + return components + @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, @@ -175,92 +217,53 @@ def get_hyperparameter_search_space(cls, include_estimators=None, dataset_properties['sparse'] = False # Compile a list of legal preprocessors for this problem - available_preprocessors = \ - components.preprocessing_components._preprocessors - preprocessors = OrderedDict() - for name in available_preprocessors: - if name in cls._get_pipeline(): - # We don't want these preprocessors, as they are always included - # preprocessors[name] = available_preprocessors[name] - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if available_preprocessors[name]. \ - get_properties()['handles_classification'] is False: - continue - if dataset_properties.get('multiclass') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - SPARSE not in available_preprocessors[name].get_properties()['input']: - continue - elif dataset_properties.get('sparse') is False and \ - DENSE not in available_preprocessors[name].get_properties()['input']: - continue - - preprocessors[name] = available_preprocessors[name] + available_preprocessors = components.preprocessing_components._preprocessors + preprocessors = ParamSklearnClassifier.get_available_components( + available_comp=available_preprocessors, + data_prop=dataset_properties, + inc=include_preprocessors, + exc=exclude_preprocessors) # Compile a list of all estimator objects for this problem available_classifiers = ParamSklearnClassifier._get_estimator_components() + classifiers = ParamSklearnClassifier.get_available_components( + available_comp=available_classifiers, + data_prop=dataset_properties, + inc=include_estimators, + exc=exclude_estimators) - # Remove unwanted classifiers - classifiers = OrderedDict() - for name in available_classifiers: - if include_estimators is not None and name not in include_estimators: - continue - elif exclude_estimators is not None and name in exclude_estimators: - continue - - if dataset_properties.get('multiclass') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multilabel'] is False: - continue - classifiers[name] = available_classifiers[name] if len(classifiers) == 0: - raise ValueError("No classifier to build a configuration space " - "for...") + raise ValueError("No classifiers found") + if len(preprocessors) == 0: + raise ValueError("No preprocessors found, please add NoPreprocessing") - matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ - create_match_array(preprocessors=preprocessors, - classifiers=classifiers, - sparse=dataset_properties.get('sparse')) + preprocessors_list = preprocessors.keys() + classifiers_list = classifiers.keys() + matches = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, + classifiers=classifiers, + sparse=dataset_properties.get('sparse')) # Now we have only legal preprocessors/classifiers we combine them # Simple sanity checks assert np.sum(matches) != 0, "No valid preprocessor/classifier " \ - "combination found, this might be a bug" + "combination found, probably a bug" assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ "'matches' is not binary; %s <= %d, [%d*%d]" % \ (str(np.sum(matches)), matches.shape[0]*matches.shape[1], matches.shape[0], matches.shape[1]) if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ - remove_non_matches(matches=matches, - preprocessors_list=preprocessors_list, - classifiers_list=classifiers_list) - for p in preprocessors.keys(): - if p not in preprocessors_list: - del preprocessors[p] - for c in classifiers.keys(): - if c not in classifiers_list: - del classifiers[c] + matches, preprocessors_list, classifiers_list, preprocessors_list, classifiers = \ + ParamSklearnClassifier.sanitize_arrays(m=matches, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list, + preprocessors=preprocessors, + classifiers=classifiers) # Sanity checks + assert len(preprocessors_list) > 0, "No valid preprocessors found" + assert len(classifiers_list) > 0, "No valid classifiers found" + assert len(preprocessors_list) == matches.shape[0], \ "Preprocessor deleting went wrong" assert len(classifiers_list) == matches.shape[1], \ @@ -285,18 +288,20 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifier_default = classifiers.keys()[0] # Get the configuration space - configuration_space = super(ParamSklearnClassifier, cls)\ - .get_hyperparameter_search_space( - cls._get_estimator_hyperparameter_name(), - classifier_default, classifiers, preprocessors, dataset_properties, - cls._get_pipeline()) + configuration_space = super(ParamSklearnClassifier, cls).\ + get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), + default_estimator=classifier_default, + estimator_components=classifiers, + preprocessor_components=preprocessors, + dataset_properties=dataset_properties, + always_active=cls._get_pipeline()) # And now add forbidden parameter configurations # According to matches - configuration_space = ParamSklearnClassifier.add_forbidden_clauses( - configuration_space=configuration_space, - preprocessors_list=preprocessors_list, - classifiers_list=classifiers_list, matches=matches) + configuration_space = ParamSklearnClassifier.add_forbidden( + conf_space=configuration_space, + preproc_list=preprocessors_list, + class_list=classifiers_list, matches=matches) # which would take too long # Combinations of tree-based models with feature learning: @@ -305,6 +310,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, feature_learning_ = ["kitchen_sinks", "sparse_filtering"] for c, f in product(classifiers_, feature_learning_): + if c not in classifiers_list: + continue + if f not in preprocessors_list: + continue try: configuration_space.add_forbidden_clause(ForbiddenAndConjunction( ForbiddenEqualsClause(configuration_space.get_hyperparameter( @@ -348,139 +357,6 @@ def get_hyperparameter_search_space(cls, include_estimators=None, return configuration_space - - """ - # Compile a list of all estimator objects for this problem - available_classifiers = ParamSklearnClassifier._get_estimator_components() - - classifiers = dict() - for name in available_classifiers: - if include_estimators is not None and \ - name not in include_estimators: - continue - elif exclude_estimators is not None and \ - name in exclude_estimators: - continue - - if dataset_properties.get('multiclass') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - available_classifiers[name].get_properties()[ - 'handles_sparse'] is False: - continue - classifiers[name] = available_classifiers[name] - - if len(classifiers) == 0: - raise ValueError("No classifier to build a configuration space " - "for...") - - # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear', 'sgd', - 'libsvm_svc'] - classifier_default = None - for cd_ in classifier_defaults: - if cd_ in classifiers: - classifier_default = cd_ - break - if classifier_default is None: - classifier_default = classifiers.keys()[0] - - # Compile a list of preprocessor for this problem - available_preprocessors = \ - components.preprocessing_components._preprocessors - - preprocessors = dict() - for name in available_preprocessors: - if name in cls._get_pipeline(): - preprocessors[name] = available_preprocessors[name] - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if available_preprocessors[name]. \ - get_properties()['handles_classification'] is False: - continue - if dataset_properties.get('multiclass') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_sparse'] is False: - continue - elif dataset_properties.get('sparse') is False and \ - available_preprocessors[name].get_properties()[ - 'handles_dense'] is False: - continue - - preprocessors[name] = available_preprocessors[name] - - # Get the configuration space - configuration_space = super(ParamSklearnClassifier, cls)\ - .get_hyperparameter_search_space( - cls._get_estimator_hyperparameter_name(), - classifier_default, classifiers, preprocessors, dataset_properties, - cls._get_pipeline()) - - # And now add forbidden parameter configurations which would take too - # long - - # Combinations of tree-based models with feature learning: - classifiers_ = ["extra_trees", "gradient_boosting", - "k_nearest_neighbors", "libsvm_svc", "random_forest"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] - - for c, f in product(classifiers_, feature_learning_): - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f))) - except: - pass - - # Multinomial NB does not work with negative values -> so don't use - # it with standardization, features learning, pca - classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", - "bernoulli_nb"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] - for c in classifiers_: - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "rescaling:strategy"), "standard"), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) - except: - pass - for c, f in product(classifiers_, feature_learning_): - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) - except: - pass - - return configuration_space - """ - @staticmethod def _get_estimator_hyperparameter_name(): return "classifier" diff --git a/tests/test_classification_searchspace.py b/tests/test_classification_searchspace.py new file mode 100644 index 0000000000..5f5e0c3c32 --- /dev/null +++ b/tests/test_classification_searchspace.py @@ -0,0 +1,137 @@ +from collections import OrderedDict + +import unittest +import numpy + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ParamSklearn.components.classification.random_forest import RandomForest +from ParamSklearn.components.classification.liblinear import LibLinear_SVC + +from ParamSklearn.components.preprocessing.pca import PCA +from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD +from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing + +from ParamSklearn.classification import ParamSklearnClassifier + +class TestCreateClassificationSearchspace(unittest.TestCase): + + def test_get_match_array(self): + # preproc is empty + preprocessors = OrderedDict() + preprocessors["pca"] = PCA # dense + classifiers = OrderedDict() + classifiers["random_forest"] = RandomForest + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(numpy.sum(m), 0) + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m, [[1]]) + + preprocessors['TSVD'] = TruncatedSVD # sparse + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(m[0], [0]) # pca + self.assertEqual(m[1], [1]) # svd + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m[0], [1]) # pca + self.assertEqual(m[1], [0]) # svd + + preprocessors['none'] = NoPreprocessing # sparse + dense + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(m[0, :], [0]) # pca + self.assertEqual(m[1, :], [1]) # tsvd + self.assertEqual(m[2, :], [0]) # none + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m[0, :], [1]) # pca + self.assertEqual(m[1, :], [0]) # tsvd + self.assertEqual(m[2, :], [1]) # none + + classifiers['libsvm'] = LibLinear_SVC + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertListEqual(list(m[0, :]), [1, 1]) # pca + self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd + self.assertListEqual(list(m[2, :]), [1, 1]) # none + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertListEqual(list(m[0, :]), [0, 0]) # pca + self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd + self.assertListEqual(list(m[2, :]), [0, 1]) # none + + def test_get_idx_to_keep(self): + m = numpy.zeros([3, 4]) + col, row = ParamSklearnClassifier._get_idx_to_keep(m) + self.assertListEqual(col, []) + self.assertListEqual(row, []) + + m = numpy.zeros([100, 50]) + c_keep = set() + r_keep = set() + for i in range(20): + col_idx = numpy.random.randint(low=0, high=50, size=1)[0] + c_keep.add(col_idx) + row_idx = numpy.random.randint(low=0, high=100, size=1)[0] + r_keep.add(row_idx) + m[row_idx, col_idx] = 1 + col, row = ParamSklearnClassifier._get_idx_to_keep(m) + self.assertListEqual(col, sorted(c_keep)) + self.assertListEqual(row, sorted(r_keep)) + [self.assertTrue(c < m.shape[1]) for c in c_keep] + [self.assertTrue(r < m.shape[0]) for r in r_keep] + + + def test_sanitize_arrays(self): + m = numpy.zeros([2, 3]) + preprocessors_list = ['pa', 'pb'] + preprocessors = OrderedDict([['pa', 1], ['pb', 2]]) + classifier_list = ['ca', 'cb', 'cc'] + classifiers = OrderedDict([['ca', 1], ['cb', 2], ['cc', 3]]) + + # all zeros -> empty + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertEqual(len(new_m), 0) + self.assertTrue(len(new_classifier_list) == len(new_preprocessors_list) == 0) + self.assertTrue(len(new_preproc) == len(new_class) == 0) + + for i in range(20): + m = numpy.zeros([2, 3]) + class_idx = numpy.random.randint(low=0, high=m.shape[1], size=1)[0] + pre_idx = numpy.random.randint(low=0, high=m.shape[0], size=1)[0] + m[pre_idx, class_idx] = 1 + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertIn(preprocessors_list[pre_idx], new_preprocessors_list) + self.assertIn(preprocessors_list[pre_idx], preprocessors) + self.assertIn(classifier_list[class_idx], new_classifier_list) + self.assertIn(classifier_list[class_idx], classifiers) + self.assertTrue(new_m.shape[0] == new_m.shape[1] == 1) + + m = numpy.array([[1, 0, 0], [0, 1, 0]]) + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertListEqual(preprocessors_list, new_preprocessors_list) + [self.assertIn(p, preprocessors) for p in preprocessors_list] + self.assertListEqual(classifier_list[:-1], new_classifier_list) + [self.assertIn(c, classifiers) for c in new_classifier_list] + self.assertTrue(m.shape[0], new_m.shape[0]) + self.assertTrue(m.shape[1], new_m.shape[1]) + + def test_add_forbidden(self): + m = numpy.ones([2, 3]) + preprocessors_list = ['pa', 'pb'] + classifier_list = ['ca', 'cb', 'cc'] + cs = ConfigurationSpace() + preprocessor = CategoricalHyperparameter(name='preprocessor', choices=preprocessors_list) + classifier = CategoricalHyperparameter(name='classifier', choices=classifier_list) + cs.add_hyperparameter(preprocessor) + cs.add_hyperparameter(classifier) + new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + self.assertEqual(len(new_cs.forbidden_clauses), 0) + self.assertIsInstance(new_cs, ConfigurationSpace) + + m[0, 0] = 0 + new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + self.assertEqual(len(new_cs.forbidden_clauses), 1) + self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'ca') + self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pa') + self.assertIsInstance(new_cs, ConfigurationSpace) \ No newline at end of file From 219ff059103b85f449ab35c1d6fe1e04564f9fa2 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 13:54:37 +0100 Subject: [PATCH 151/352] add input/output key; reorder inputs --- .../components/classification/adaboost.py | 5 ++++- .../classification/bagged_gaussian_nb.py | 15 +++++++-------- .../classification/bagged_multinomial_nb.py | 10 +++++----- .../components/classification/bernoulli_nb.py | 10 +++++----- .../components/classification/extra_trees.py | 10 +++++++--- .../components/classification/gaussian_nb.py | 10 ++++------ .../classification/gradient_boosting.py | 10 +++++----- .../classification/k_nearest_neighbors.py | 5 ++++- .../components/classification/liblinear.py | 11 +++++++---- .../components/classification/libsvm_svc.py | 9 ++++++--- .../components/classification/multinomial_nb.py | 10 +++++----- .../components/classification/random_forest.py | 8 ++++++-- ParamSklearn/components/classification/sgd.py | 11 +++++++---- ParamSklearn/util.py | 6 ++++++ 14 files changed, 78 insertions(+), 52 deletions(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 46c9a7d17b..6c65373256 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -5,7 +5,8 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS class AdaboostClassifier(ParamSklearnClassificationAlgorithm): @@ -64,6 +65,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py index c69f293038..bfa9b5e7f2 100644 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -2,14 +2,11 @@ import sklearn.ensemble import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant +from HPOlibConfigSpace.hyperparameters import Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BaggedGaussianNB(ParamSklearnClassificationAlgorithm): @@ -54,6 +51,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod @@ -61,8 +60,8 @@ def get_hyperparameter_search_space(dataset_properties=None): # The three parameters of the bagging ensemble are set to # constants for now (SF) n_estimators = Constant('n_estimators', 100) - max_samples = Constant('max_samples' ,1.0) # caution: has to be float! - max_features = Constant('max_features', 1.0) # caution: has to be float! + max_samples = Constant('max_samples', 1.0) # caution: has to be float + max_features = Constant('max_features', 1.0) # caution: has to be float cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index 2c9c7d9fb4..bef285c67c 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -2,14 +2,12 @@ import sklearn.naive_bayes import sklearn.ensemble -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): @@ -67,6 +65,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index ea49703551..8702ca7091 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class BernoulliNB(ParamSklearnClassificationAlgorithm): @@ -55,6 +53,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.bool} @staticmethod diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index dc7a081bcc..9f964b16e2 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -4,11 +4,13 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS + # get our own forests to replace the sklearn ones -from ...implementations import forest +from ParamSklearn.implementations import forest + class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): @@ -104,6 +106,8 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 38b6973439..0e7a104cd8 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -1,14 +1,10 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GaussianNB(ParamSklearnClassificationAlgorithm): @@ -48,6 +44,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 169321d9f8..2ac6ffd18c 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.ensemble -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): @@ -109,6 +107,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index b2aa9405d5..ba71f4cd15 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -5,7 +5,8 @@ Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): @@ -57,6 +58,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear.py index 36293a41c1..1eaf94c901 100644 --- a/ParamSklearn/components/classification/liblinear.py +++ b/ParamSklearn/components/classification/liblinear.py @@ -1,4 +1,3 @@ -import numpy as np import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -7,8 +6,10 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ..classification_base import ParamSklearnClassificationAlgorithm -from ...implementations.util import softmax +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.implementations.util import softmax +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + class LibLinear_SVC(ParamSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside @@ -77,8 +78,10 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, # TODO find out what is best used here! - 'preferred_dtype' : None} + 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(dataset_properties=None): diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 84eb07da5a..85036ea919 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -1,13 +1,14 @@ import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction, \ - InCondition +from HPOlibConfigSpace.conditions import EqualsCondition, InCondition from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS + class LibSVM_SVC(ParamSklearnClassificationAlgorithm): def __init__(self, C, kernel, gamma, shrinking, tol, class_weight, max_iter, @@ -82,6 +83,8 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, # TODO find out what is best used here! # C-continouos and double precision... 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index ebda12122a..4fae8f7249 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -1,14 +1,12 @@ import numpy as np import sklearn.naive_bayes -from HPOlibConfigSpace.conditions import EqualsCondition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + CategoricalHyperparameter -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class MultinomialNB(ParamSklearnClassificationAlgorithm): @@ -58,6 +56,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index bc65f244ac..65a5a01a4a 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -5,9 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones -from ...implementations import forest +from ParamSklearn.implementations import forest + class RandomForest(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, @@ -93,6 +95,8 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index c064d13530..da0dd6b3d5 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -1,14 +1,15 @@ -import numpy as np from sklearn.linear_model.stochastic_gradient import SGDClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, UnParametrizedHyperparameter, \ UniformIntegerHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction +from HPOlibConfigSpace.conditions import EqualsCondition + +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax -from ..classification_base import ParamSklearnClassificationAlgorithm -from ...implementations.util import softmax class SGD(ParamSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, @@ -85,6 +86,8 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index ce9ba412ae..c61011f18e 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -10,6 +10,12 @@ import sklearn.datasets +SPARSE = 'SPARSE' +DENSE = 'DENSE' +PREDICTIONS = 'PREDICTIONS' +INPUT = 'INPUT' + + def find_sklearn_classes(class_): classifiers = set() all_subdirectories = [] From 61701d8fc13b37e114afcbf043dfe418a2e18e88 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 13:55:11 +0100 Subject: [PATCH 152/352] add input/output key; resort imports --- ParamSklearn/components/preprocessing/imputation.py | 7 +++++-- .../components/preprocessing/kitchen_sinks.py | 7 +++++-- ParamSklearn/components/preprocessing/pca.py | 10 ++++++---- ParamSklearn/components/preprocessing/rescaling.py | 11 +++++++---- .../select_percentile_classification.py | 9 ++++++--- .../preprocessing/select_percentile_regression.py | 9 ++++++--- .../components/preprocessing/sparse_filtering.py | 13 ++++++++----- .../components/preprocessing/truncatedSVD.py | 11 +++++++---- 8 files changed, 50 insertions(+), 27 deletions(-) diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index 4c2efc113f..fafd08b2e1 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -3,7 +3,8 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT class Imputation(ParamSklearnPreprocessingAlgorithm): @@ -39,6 +40,8 @@ def get_properties(): # TODO find out of this is right! 'handles_sparse': True, 'handles_dense': True, + 'input': (DENSE, SPARSE), + 'output': INPUT, 'preferred_dtype': None} @staticmethod @@ -52,4 +55,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index b4c4397648..9f4c968a02 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -4,7 +4,8 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): @@ -46,6 +47,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'ouput': INPUT, 'preferred_dtype': None} @staticmethod @@ -61,5 +64,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 895b234d8b..5e21acddbb 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -1,11 +1,11 @@ import sklearn.decomposition -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE class PCA(ParamSklearnPreprocessingAlgorithm): @@ -59,6 +59,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, # TODO find out what is best used here! 'preferred_dtype': None} @@ -75,4 +77,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index f723f49092..586fbc0759 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -1,9 +1,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ...implementations.StandardScaler import StandardScaler -from ...implementations.MinMaxScaler import MinMaxScaler -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.StandardScaler import StandardScaler +from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT class Rescaling(ParamSklearnPreprocessingAlgorithm): @@ -43,6 +44,8 @@ def get_properties(): # TODO find out of this is right! 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, # Add something here... 'preferred_dtype': None} @@ -57,4 +60,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index a1d317a5d3..a3a5873ccc 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -3,8 +3,9 @@ import sklearn.feature_selection -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from select_percentile import SelectPercentileBase +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.util import DENSE, SPARSE, INPUT class SelectPercentileClassification(SelectPercentileBase, @@ -44,6 +45,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, 'preferred_dtype': None} @staticmethod @@ -67,5 +70,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 9844d813f5..9668da6fdd 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -3,8 +3,9 @@ import sklearn.feature_selection -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from select_percentile import SelectPercentileBase +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.util import DENSE class SelectPercentileRegression(SelectPercentileBase, @@ -42,6 +43,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, 'preferred_dtype': None} @staticmethod @@ -59,5 +62,5 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/sparse_filtering.py b/ParamSklearn/components/preprocessing/sparse_filtering.py index 184bd4a1a8..026dcacfd1 100644 --- a/ParamSklearn/components/preprocessing/sparse_filtering.py +++ b/ParamSklearn/components/preprocessing/sparse_filtering.py @@ -1,9 +1,10 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ...implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl +from ParamSklearn.util import DENSE + class SparseFiltering(ParamSklearnPreprocessingAlgorithm): @@ -38,6 +39,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': False, 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, 'preferred_dtype': None} @@ -55,4 +58,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index a45456b092..9e70e5871b 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -1,11 +1,12 @@ +import numpy as np + import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm -import numpy as np - +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE class TruncatedSVD(ParamSklearnPreprocessingAlgorithm): @@ -43,6 +44,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': False, + 'input': (SPARSE, ), + 'output': DENSE, 'preferred_dtype': np.float32} @staticmethod @@ -55,4 +58,4 @@ def get_hyperparameter_search_space(dataset_properties=None): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name From a9f232a30dc58ab84f06b9793c17b10272def446 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 14:04:23 +0100 Subject: [PATCH 153/352] add unittest to assert keys in preperty dict --- tests/test_classification.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index c41d9e2345..50225cc23b 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,8 +1,5 @@ __author__ = 'feurerm' -import copy -import numpy as np -import StringIO import unittest import sklearn.datasets @@ -18,12 +15,27 @@ from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.classification as classification_components import ParamSklearn.components.preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset +from ParamSklearn.util import get_dataset, DENSE, SPARSE, PREDICTIONS + class TestParamSklearnClassifier(unittest.TestCase): # TODO: test for both possible ways to initialize ParamSklearn # parameters and other... + def test_io_dict(self): + classifiers = classification_components._classifiers + for c in classifiers: + self.assertIn('input', c.get_properties()) + self.assertIn('output', c.get_properties()) + inp = c.get_properties()['input'] + output = c.get_properties()['output'] + + self.assertIsInstance(inp, tuple) + self.assertIsInstance(output, str) + for i in inp: + self.assertIn(i, (SPARSE, DENSE)) + self.assertEqual(output, PREDICTIONS) + def test_find_classifiers(self): classifiers = classification_components._classifiers self.assertGreaterEqual(len(classifiers), 1) From d5caefa836b737ce01f9862a2c36cde9c6d053af Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 16:44:45 +0100 Subject: [PATCH 154/352] minor --- .../components/preprocessing/select_percentile_classification.py | 1 - .../components/preprocessing/select_percentile_regression.py | 1 - 2 files changed, 2 deletions(-) diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index a3a5873ccc..7ab6d58a23 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -28,7 +28,6 @@ def __init__(self, percentile, score_func="chi2", random_state=None): raise ValueError("score_func must be in ('chi2, 'f_classif'), " "but is: %s" % score_func) - @staticmethod def get_properties(): return {'shortname': 'SPC', diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 9668da6fdd..0ac5e668ce 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -26,7 +26,6 @@ def __init__(self, percentile, score_func="f_classif", random_state=None): else: raise ValueError("Don't know this scoring function: %s" % score_func) - @staticmethod def get_properties(): return {'shortname': 'SPR', From 928b592b2717f5ea5174c31efc373ee852495ce9 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 6 Mar 2015 16:44:53 +0100 Subject: [PATCH 155/352] fix typo --- ParamSklearn/components/preprocessing/kitchen_sinks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index 9f4c968a02..ebab4bd265 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -48,7 +48,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'ouput': INPUT, + 'output': INPUT, 'preferred_dtype': None} @staticmethod From 09510ae61163a88c22f354900130691aa69edd0e Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 13:57:59 +0100 Subject: [PATCH 156/352] add input/output and generic searchspace generation --- ParamSklearn/classification.py | 267 ++++++++++++++++++++++++++++++++- tests/test_classification.py | 8 +- 2 files changed, 268 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index c5d581995a..43be0e69d9 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -1,11 +1,15 @@ +from collections import OrderedDict from itertools import product +import numpy as np + from sklearn.base import ClassifierMixin from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction -from . import components as components -from .base import ParamSklearnBaseEstimator +from ParamSklearn import components as components +from ParamSklearn.base import ParamSklearnBaseEstimator +from ParamSklearn.util import SPARSE, DENSE, INPUT class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): @@ -72,6 +76,83 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) + @classmethod + def create_match_array(cls, preprocessors, classifiers, sparse): + # Now select combinations that work + # We build a binary matrix, where a 1 indicates, that a combination + # work on this dataset based in the dataset and the input/output formats + # A 'zero'-row (column) is an unusable preprocessor (classifier) + # A single zero results in an forbidden condition + preprocessors_list = preprocessors.keys() + classifiers_list = classifiers.keys() + matches = np.zeros([len(preprocessors), len(classifiers)]) + for pidx, p in enumerate(preprocessors_list): + p_out = preprocessors[p].get_properties()['output'] + for cidx, c in enumerate(classifiers_list): + c_in = classifiers[c].get_properties()['input'] + if p_out == INPUT: + # Preprocessor does not change the format + if (sparse and SPARSE in c_in) or \ + (not sparse and DENSE in c_in): + # Classifier input = Dataset format + matches[pidx, cidx] = 1 + continue + else: + # These won't work + pass + elif p_out == DENSE and DENSE in c_in: + matches[pidx, cidx] = 1 + continue + elif p_out == SPARSE and SPARSE in c_in: + matches[pidx, cidx] = 1 + continue + else: + # These won't work + pass + return matches, preprocessors_list, classifiers_list + + @classmethod + def remove_non_matches(cls, matches, preprocessors_list, classifiers_list): + # We might delete some rows/columns + l = len(preprocessors_list) + for pidx, p in enumerate(preprocessors_list): + # We use the reverse idx as it stays correct + # when we start removing rows + reverse_idx = -l + pidx + if (matches[pidx, :] == 0).all(): + # unusable preprocessor, delete row + matches = np.delete(matches, reverse_idx, axis=0) + #del preprocessors[p] + del preprocessors_list[reverse_idx] + l = len(classifiers_list) + for cidx, c in enumerate(classifiers_list): + # We use the reverse idx as it stays correct + # when we start removing cols + reverse_idx = -l + cidx + if (matches[:, cidx] == 0).all(): + # unusable preprocessor, delete row + matches = np.delete(matches, reverse_idx, axis=1) + #del classifiers[c] + del classifiers_list[reverse_idx] + return matches, preprocessors_list, classifiers_list + + @classmethod + def add_forbidden_clauses(cls, configuration_space, preprocessors_list, classifiers_list, matches): + for pdx, p in enumerate(preprocessors_list): + if np.sum(matches[pdx, :]) == matches.shape[1]: + continue + for cdx, c in enumerate(classifiers_list): + if matches[pdx, cdx] == 0: + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), p))) + except: + pass + return configuration_space + @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, @@ -88,11 +169,190 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "exclude_preprocessors cannot be used together.") if dataset_properties is None or not isinstance(dataset_properties, dict): - dataset_properties = dict() + dataset_properties = {} + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + + # Compile a list of legal preprocessors for this problem + available_preprocessors = \ + components.preprocessing_components._preprocessors + preprocessors = OrderedDict() + for name in available_preprocessors: + if name in cls._get_pipeline(): + # We don't want these preprocessors, as they are always included + # preprocessors[name] = available_preprocessors[name] + continue + elif include_preprocessors is not None and \ + name not in include_preprocessors: + continue + elif exclude_preprocessors is not None and \ + name in exclude_preprocessors: + continue + + if available_preprocessors[name]. \ + get_properties()['handles_classification'] is False: + continue + if dataset_properties.get('multiclass') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multiclass'] is False: + continue + if dataset_properties.get('multilabel') is True and \ + available_preprocessors[name].get_properties()[ + 'handles_multilabel'] is False: + continue + if dataset_properties.get('sparse') is True and \ + SPARSE not in available_preprocessors[name].get_properties()['input']: + continue + elif dataset_properties.get('sparse') is False and \ + DENSE not in available_preprocessors[name].get_properties()['input']: + continue + + preprocessors[name] = available_preprocessors[name] # Compile a list of all estimator objects for this problem available_classifiers = ParamSklearnClassifier._get_estimator_components() + # Remove unwanted classifiers + classifiers = OrderedDict() + for name in available_classifiers: + if include_estimators is not None and name not in include_estimators: + continue + elif exclude_estimators is not None and name in exclude_estimators: + continue + + if dataset_properties.get('multiclass') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multiclass'] is False: + continue + if dataset_properties.get('multilabel') is True and \ + available_classifiers[name].get_properties()[ + 'handles_multilabel'] is False: + continue + classifiers[name] = available_classifiers[name] + if len(classifiers) == 0: + raise ValueError("No classifier to build a configuration space " + "for...") + + matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ + create_match_array(preprocessors=preprocessors, + classifiers=classifiers, + sparse=dataset_properties.get('sparse')) + + # Now we have only legal preprocessors/classifiers we combine them + # Simple sanity checks + assert np.sum(matches) != 0, "No valid preprocessor/classifier " \ + "combination found, this might be a bug" + assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ + "'matches' is not binary; %s <= %d, [%d*%d]" % \ + (str(np.sum(matches)), matches.shape[0]*matches.shape[1], + matches.shape[0], matches.shape[1]) + + if np.sum(matches) < (matches.shape[0] * matches.shape[1]): + matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ + remove_non_matches(matches=matches, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list) + for p in preprocessors.keys(): + if p not in preprocessors_list: + del preprocessors[p] + for c in classifiers.keys(): + if c not in classifiers_list: + del classifiers[c] + + # Sanity checks + assert len(preprocessors_list) == matches.shape[0], \ + "Preprocessor deleting went wrong" + assert len(classifiers_list) == matches.shape[1], \ + "Classifier deleting went wrong" + assert [c in classifiers_list for c in classifiers] + assert [p in preprocessors_list for p in preprocessors] + + # Now add always present preprocessors + for name in available_preprocessors: + if name in cls._get_pipeline(): + preprocessors[name] = available_preprocessors[name] + + # Hardcode the defaults based on some educated guesses + classifier_defaults = ['random_forest', 'liblinear', 'sgd', + 'libsvm_svc'] + classifier_default = None + for cd_ in classifier_defaults: + if cd_ in classifiers: + classifier_default = cd_ + break + if classifier_default is None: + classifier_default = classifiers.keys()[0] + + # Get the configuration space + configuration_space = super(ParamSklearnClassifier, cls)\ + .get_hyperparameter_search_space( + cls._get_estimator_hyperparameter_name(), + classifier_default, classifiers, preprocessors, dataset_properties, + cls._get_pipeline()) + + # And now add forbidden parameter configurations + # According to matches + configuration_space = ParamSklearnClassifier.add_forbidden_clauses( + configuration_space=configuration_space, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list, matches=matches) + + # which would take too long + # Combinations of tree-based models with feature learning: + classifiers_ = ["extra_trees", "gradient_boosting", + "k_nearest_neighbors", "libsvm_svc", "random_forest"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + + for c, f in product(classifiers_, feature_learning_): + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f))) + except: + pass + + # Won't work + # Multinomial NB does not work with negative values, don't use + # it with standardization, features learning, pca + classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", + "bernoulli_nb"] + feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] + for c in classifiers_: + if c not in classifiers_list: + continue + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "rescaling:strategy"), "standard"), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + + for c, f in product(classifiers_, feature_learning_): + if c not in classifiers_list: + continue + if f not in preprocessors_list: + continue + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "preprocessor"), f), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except: + pass + + return configuration_space + + + """ + # Compile a list of all estimator objects for this problem + available_classifiers = ParamSklearnClassifier._get_estimator_components() + classifiers = dict() for name in available_classifiers: if include_estimators is not None and \ @@ -219,6 +479,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, pass return configuration_space + """ @staticmethod def _get_estimator_hyperparameter_name(): diff --git a/tests/test_classification.py b/tests/test_classification.py index 50225cc23b..6eb9aa59b1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -25,10 +25,10 @@ class TestParamSklearnClassifier(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: - self.assertIn('input', c.get_properties()) - self.assertIn('output', c.get_properties()) - inp = c.get_properties()['input'] - output = c.get_properties()['output'] + self.assertIn('input', classifiers[c].get_properties()) + self.assertIn('output', classifiers[c].get_properties()) + inp = classifiers[c].get_properties()['input'] + output = classifiers[c].get_properties()['output'] self.assertIsInstance(inp, tuple) self.assertIsInstance(output, str) From af73be9e59969900917cab5059fe3a7aff398222 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 16:20:59 +0100 Subject: [PATCH 157/352] adjust property dicts, all components should have the same keys --- ParamSklearn/components/classification/adaboost.py | 2 ++ ParamSklearn/components/classification/bagged_gaussian_nb.py | 2 ++ ParamSklearn/components/classification/bagged_multinomial_nb.py | 2 ++ ParamSklearn/components/classification/bernoulli_nb.py | 2 ++ ParamSklearn/components/classification/extra_trees.py | 2 ++ ParamSklearn/components/classification/gaussian_nb.py | 2 ++ ParamSklearn/components/classification/gradient_boosting.py | 2 ++ ParamSklearn/components/classification/k_nearest_neighbors.py | 2 ++ ParamSklearn/components/classification/liblinear.py | 2 ++ ParamSklearn/components/classification/libsvm_svc.py | 2 ++ ParamSklearn/components/classification/multinomial_nb.py | 2 ++ ParamSklearn/components/classification/random_forest.py | 2 ++ ParamSklearn/components/classification/sgd.py | 2 ++ 13 files changed, 26 insertions(+) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 6c65373256..0675182b57 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -61,6 +61,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py index bfa9b5e7f2..63dbb77408 100644 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ b/ParamSklearn/components/classification/bagged_gaussian_nb.py @@ -47,6 +47,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index bef285c67c..e13f8d2f6a 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -61,6 +61,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 8702ca7091..3d3745d251 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -49,6 +49,8 @@ def get_properties(): 'handles_numerical_features': False, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': False, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 9f964b16e2..c4cf44a5af 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -102,6 +102,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 0e7a104cd8..771adf0945 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -40,6 +40,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 2ac6ffd18c..61d120836c 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -103,6 +103,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index ba71f4cd15..e0c656e918 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -54,6 +54,8 @@ def get_properties(): 'prefers_data_scaled': True, # Find out if this is good because of sparsity 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear.py index 1eaf94c901..c53ac60c67 100644 --- a/ParamSklearn/components/classification/liblinear.py +++ b/ParamSklearn/components/classification/liblinear.py @@ -72,6 +72,8 @@ def get_properties(): 'prefers_data_scaled': True, # Find out if this is good because of sparsity 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': False, diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 85036ea919..bcdc2382ec 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -77,6 +77,8 @@ def get_properties(): 'prefers_data_scaled': True, # TODO find out if this is good because of sparsity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 4fae8f7249..dea329d413 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -52,6 +52,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 65a5a01a4a..7c45259c94 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -91,6 +91,8 @@ def get_properties(): 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index da0dd6b3d5..d1aecf3547 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -82,6 +82,8 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, From 6764d1f68fa735301dfa60eb38dbbf534bc648cc Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 9 Mar 2015 16:21:35 +0100 Subject: [PATCH 158/352] add a dummy preprocessor, that is always part of the cs --- .../preprocessing/NoPreprocessing.py | 50 +++++++++++++++++++ .../preprocessing/test_NoPreprocessing.py | 20 ++++++++ 2 files changed, 70 insertions(+) create mode 100644 ParamSklearn/components/preprocessing/NoPreprocessing.py create mode 100644 tests/components/preprocessing/test_NoPreprocessing.py diff --git a/ParamSklearn/components/preprocessing/NoPreprocessing.py b/ParamSklearn/components/preprocessing/NoPreprocessing.py new file mode 100644 index 0000000000..31d721d29c --- /dev/null +++ b/ParamSklearn/components/preprocessing/NoPreprocessing.py @@ -0,0 +1,50 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class NoPreprocessing(ParamSklearnPreprocessingAlgorithm): + + def __init__(self, random_state): + """ This preprocessors does not change the data """ + self.preprocessor = None + + def fit(self, X, Y): + self.preprocessor = 0 + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return X + + @staticmethod + def get_properties(): + return {'shortname': 'no', + 'name': 'NoPreprocessing', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py new file mode 100644 index 0000000000..4831d7e9a3 --- /dev/null +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -0,0 +1,20 @@ +import numpy as np +import unittest + +from ParamSklearn.components.preprocessing.NoPreprocessing import NoPreprocessing +from ParamSklearn.util import _test_preprocessing + + +class NoneComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(NoPreprocessing) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], original.shape[1]) + self.assertFalse((transformation == 0).all()) + self.assertEqual(np.sum(original), np.sum(transformation)) + self.assertEqual(np.min(original), np.min(transformation)) + self.assertEqual(np.max(original), np.max(transformation)) + self.assertEqual(np.std(original), np.std(transformation)) + self.assertEqual(np.mean(original), np.mean(transformation)) + + From 47739e9fa2b7e9da3d1d57cefe2d2f5405d374b4 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:46:11 +0100 Subject: [PATCH 159/352] fiy typo --- .../preprocessing/{NoPreprocessing.py => no_peprocessing.py} | 0 tests/components/preprocessing/test_NoPreprocessing.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename ParamSklearn/components/preprocessing/{NoPreprocessing.py => no_peprocessing.py} (100%) diff --git a/ParamSklearn/components/preprocessing/NoPreprocessing.py b/ParamSklearn/components/preprocessing/no_peprocessing.py similarity index 100% rename from ParamSklearn/components/preprocessing/NoPreprocessing.py rename to ParamSklearn/components/preprocessing/no_peprocessing.py diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py index 4831d7e9a3..5bd6232279 100644 --- a/tests/components/preprocessing/test_NoPreprocessing.py +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -1,7 +1,7 @@ import numpy as np import unittest -from ParamSklearn.components.preprocessing.NoPreprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing from ParamSklearn.util import _test_preprocessing From 8492daed01ae7d551a8c32b9639e4c94624d039d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:51:24 +0100 Subject: [PATCH 160/352] add checks for propertiy keys and remove no longer valid checks such as checking for an error when building a searchspace for a sparse multilabel/multiclass dataset --- tests/test_classification.py | 35 +++++++++++++++++++++-------------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index 6eb9aa59b1..7cb05733f1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -25,16 +25,22 @@ class TestParamSklearnClassifier(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: - self.assertIn('input', classifiers[c].get_properties()) - self.assertIn('output', classifiers[c].get_properties()) - inp = classifiers[c].get_properties()['input'] - output = classifiers[c].get_properties()['output'] + props = classifiers[c].get_properties() + self.assertIn('input', props) + self.assertIn('output', props) + inp = props['input'] + output = props['output'] self.assertIsInstance(inp, tuple) self.assertIsInstance(output, str) for i in inp: self.assertIn(i, (SPARSE, DENSE)) self.assertEqual(output, PREDICTIONS) + self.assertIn('handles_regression', props) + self.assertFalse(props['handles_regression']) + self.assertIn('handles_classification', props) + self.assertIn('handles_multiclass', props) + self.assertIn('handles_multilabel', props) def test_find_classifiers(self): classifiers = classification_components._classifiers @@ -105,21 +111,22 @@ def test_get_hyperparameter_search_space_dataset_properties(self): cs_sp = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - self.assertNotIn('extra_trees', str(cs_sp)) - self.assertNotIn('gradient_boosting', str(cs_sp)) - self.assertNotIn('random_forest', str(cs_sp)) + self.assertIn('extra_trees', str(cs_sp)) + self.assertIn('gradient_boosting', str(cs_sp)) + self.assertIn('random_forest', str(cs_sp)) cs_mc_ml = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) - self.assertRaisesRegexp(ValueError, - "No classifier to build a configuration space " - "for...", ParamSklearnClassifier. - get_hyperparameter_search_space, - dataset_properties={'multilabel': True, - 'multiclass': True, - 'sparse': True}) + # We now have a preprocessing method that handles this case + #self.assertRaisesRegexp(ValueError, + # "No classifier to build a configuration space " + # "for...", ParamSklearnClassifier. + # get_hyperparameter_search_space, + # dataset_properties={'multilabel': True, + # 'multiclass': True, + # 'sparse': True}) @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): From 9f961bc131cd17fe2b74bfe3f12309d107be3d00 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 09:53:00 +0100 Subject: [PATCH 161/352] rebuild classification searchspace with transforming preprocessing methods such as truncated svd, which turns sparse data to dense --- ParamSklearn/classification.py | 366 ++++++++--------------- tests/test_classification_searchspace.py | 137 +++++++++ 2 files changed, 258 insertions(+), 245 deletions(-) create mode 100644 tests/test_classification_searchspace.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 43be0e69d9..6fd78fca00 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -5,7 +5,8 @@ from sklearn.base import ClassifierMixin -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause +from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator @@ -77,7 +78,7 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) @classmethod - def create_match_array(cls, preprocessors, classifiers, sparse): + def get_match_array(cls, preprocessors, classifiers, sparse): # Now select combinations that work # We build a binary matrix, where a 1 indicates, that a combination # work on this dataset based in the dataset and the input/output formats @@ -87,7 +88,14 @@ def create_match_array(cls, preprocessors, classifiers, sparse): classifiers_list = classifiers.keys() matches = np.zeros([len(preprocessors), len(classifiers)]) for pidx, p in enumerate(preprocessors_list): + p_in = preprocessors[p].get_properties()['input'] p_out = preprocessors[p].get_properties()['output'] + if p in cls._get_pipeline(): + continue + elif sparse and SPARSE not in p_in: + continue + elif not sparse and DENSE not in p_in: + continue for cidx, c in enumerate(classifiers_list): c_in = classifiers[c].get_properties()['input'] if p_out == INPUT: @@ -99,7 +107,7 @@ def create_match_array(cls, preprocessors, classifiers, sparse): continue else: # These won't work - pass + continue elif p_out == DENSE and DENSE in c_in: matches[pidx, cidx] = 1 continue @@ -108,50 +116,84 @@ def create_match_array(cls, preprocessors, classifiers, sparse): continue else: # These won't work - pass - return matches, preprocessors_list, classifiers_list + continue + return matches @classmethod - def remove_non_matches(cls, matches, preprocessors_list, classifiers_list): - # We might delete some rows/columns - l = len(preprocessors_list) - for pidx, p in enumerate(preprocessors_list): - # We use the reverse idx as it stays correct - # when we start removing rows - reverse_idx = -l + pidx - if (matches[pidx, :] == 0).all(): - # unusable preprocessor, delete row - matches = np.delete(matches, reverse_idx, axis=0) - #del preprocessors[p] - del preprocessors_list[reverse_idx] - l = len(classifiers_list) - for cidx, c in enumerate(classifiers_list): - # We use the reverse idx as it stays correct - # when we start removing cols - reverse_idx = -l + cidx - if (matches[:, cidx] == 0).all(): - # unusable preprocessor, delete row - matches = np.delete(matches, reverse_idx, axis=1) - #del classifiers[c] - del classifiers_list[reverse_idx] - return matches, preprocessors_list, classifiers_list + def _get_idx_to_keep(cls, m): + # Returns all rows and cols where matches contains not only zeros + keep_row = [idx for idx in range(m.shape[0]) if np.sum(m[idx, :]) != 0] + keep_col = [idx for idx in range(m.shape[1]) if np.sum(m[:, idx]) != 0] + return keep_col, keep_row + + @classmethod + def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, + preprocessors, classifiers): + assert len(preprocessors_list) == len(preprocessors.keys()) + assert len(classifiers_list) == len(classifiers.keys()) + assert isinstance(m, np.ndarray) + # remove components that are not usable for this problem + keep_col, keep_row = ParamSklearnClassifier._get_idx_to_keep(m) + + m = m[keep_row, :] + m = m[:, keep_col] + preproc_list = [preprocessors_list[p] for p in keep_row] + class_list = [classifiers_list[p] for p in keep_col] + + # Make sure they don't exist anymore + del preprocessors_list + del classifiers_list + + new_class = dict() + for c in class_list: + new_class[c] = classifiers[c] + new_preproc = dict() + for p in preproc_list: + new_preproc[p] = preprocessors[p] + + # Make sure they don't exist anymore + del preprocessors + del classifiers + + return m, preproc_list, class_list, new_preproc, new_class @classmethod - def add_forbidden_clauses(cls, configuration_space, preprocessors_list, classifiers_list, matches): - for pdx, p in enumerate(preprocessors_list): + def add_forbidden(cls, conf_space, preproc_list, class_list, matches): + for pdx, p in enumerate(preproc_list): if np.sum(matches[pdx, :]) == matches.shape[1]: continue - for cdx, c in enumerate(classifiers_list): + for cdx, c in enumerate(class_list): if matches[pdx, cdx] == 0: try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + conf_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(conf_space.get_hyperparameter( "classifier"), c), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + ForbiddenEqualsClause(conf_space.get_hyperparameter( "preprocessor"), p))) except: pass - return configuration_space + return conf_space + + @classmethod + def get_available_components(cls, available_comp, data_prop, inc, exc): + components = OrderedDict() + for name in available_comp: + if inc is not None and name not in inc: + continue + elif exc is not None and name in exc: + continue + + entry = available_comp[name] + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and entry.get_properties()['handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and available_comp[name].get_properties()['handles_multilabel'] is False: + continue + components[name] = entry + + return components + @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, @@ -175,92 +217,53 @@ def get_hyperparameter_search_space(cls, include_estimators=None, dataset_properties['sparse'] = False # Compile a list of legal preprocessors for this problem - available_preprocessors = \ - components.preprocessing_components._preprocessors - preprocessors = OrderedDict() - for name in available_preprocessors: - if name in cls._get_pipeline(): - # We don't want these preprocessors, as they are always included - # preprocessors[name] = available_preprocessors[name] - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if available_preprocessors[name]. \ - get_properties()['handles_classification'] is False: - continue - if dataset_properties.get('multiclass') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - SPARSE not in available_preprocessors[name].get_properties()['input']: - continue - elif dataset_properties.get('sparse') is False and \ - DENSE not in available_preprocessors[name].get_properties()['input']: - continue - - preprocessors[name] = available_preprocessors[name] + available_preprocessors = components.preprocessing_components._preprocessors + preprocessors = ParamSklearnClassifier.get_available_components( + available_comp=available_preprocessors, + data_prop=dataset_properties, + inc=include_preprocessors, + exc=exclude_preprocessors) # Compile a list of all estimator objects for this problem available_classifiers = ParamSklearnClassifier._get_estimator_components() + classifiers = ParamSklearnClassifier.get_available_components( + available_comp=available_classifiers, + data_prop=dataset_properties, + inc=include_estimators, + exc=exclude_estimators) - # Remove unwanted classifiers - classifiers = OrderedDict() - for name in available_classifiers: - if include_estimators is not None and name not in include_estimators: - continue - elif exclude_estimators is not None and name in exclude_estimators: - continue - - if dataset_properties.get('multiclass') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multilabel'] is False: - continue - classifiers[name] = available_classifiers[name] if len(classifiers) == 0: - raise ValueError("No classifier to build a configuration space " - "for...") + raise ValueError("No classifiers found") + if len(preprocessors) == 0: + raise ValueError("No preprocessors found, please add NoPreprocessing") - matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ - create_match_array(preprocessors=preprocessors, - classifiers=classifiers, - sparse=dataset_properties.get('sparse')) + preprocessors_list = preprocessors.keys() + classifiers_list = classifiers.keys() + matches = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, + classifiers=classifiers, + sparse=dataset_properties.get('sparse')) # Now we have only legal preprocessors/classifiers we combine them # Simple sanity checks assert np.sum(matches) != 0, "No valid preprocessor/classifier " \ - "combination found, this might be a bug" + "combination found, probably a bug" assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ "'matches' is not binary; %s <= %d, [%d*%d]" % \ (str(np.sum(matches)), matches.shape[0]*matches.shape[1], matches.shape[0], matches.shape[1]) if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, preprocessors_list, classifiers_list = ParamSklearnClassifier.\ - remove_non_matches(matches=matches, - preprocessors_list=preprocessors_list, - classifiers_list=classifiers_list) - for p in preprocessors.keys(): - if p not in preprocessors_list: - del preprocessors[p] - for c in classifiers.keys(): - if c not in classifiers_list: - del classifiers[c] + matches, preprocessors_list, classifiers_list, preprocessors_list, classifiers = \ + ParamSklearnClassifier.sanitize_arrays(m=matches, + preprocessors_list=preprocessors_list, + classifiers_list=classifiers_list, + preprocessors=preprocessors, + classifiers=classifiers) # Sanity checks + assert len(preprocessors_list) > 0, "No valid preprocessors found" + assert len(classifiers_list) > 0, "No valid classifiers found" + assert len(preprocessors_list) == matches.shape[0], \ "Preprocessor deleting went wrong" assert len(classifiers_list) == matches.shape[1], \ @@ -285,18 +288,20 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifier_default = classifiers.keys()[0] # Get the configuration space - configuration_space = super(ParamSklearnClassifier, cls)\ - .get_hyperparameter_search_space( - cls._get_estimator_hyperparameter_name(), - classifier_default, classifiers, preprocessors, dataset_properties, - cls._get_pipeline()) + configuration_space = super(ParamSklearnClassifier, cls).\ + get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), + default_estimator=classifier_default, + estimator_components=classifiers, + preprocessor_components=preprocessors, + dataset_properties=dataset_properties, + always_active=cls._get_pipeline()) # And now add forbidden parameter configurations # According to matches - configuration_space = ParamSklearnClassifier.add_forbidden_clauses( - configuration_space=configuration_space, - preprocessors_list=preprocessors_list, - classifiers_list=classifiers_list, matches=matches) + configuration_space = ParamSklearnClassifier.add_forbidden( + conf_space=configuration_space, + preproc_list=preprocessors_list, + class_list=classifiers_list, matches=matches) # which would take too long # Combinations of tree-based models with feature learning: @@ -305,6 +310,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, feature_learning_ = ["kitchen_sinks", "sparse_filtering"] for c, f in product(classifiers_, feature_learning_): + if c not in classifiers_list: + continue + if f not in preprocessors_list: + continue try: configuration_space.add_forbidden_clause(ForbiddenAndConjunction( ForbiddenEqualsClause(configuration_space.get_hyperparameter( @@ -348,139 +357,6 @@ def get_hyperparameter_search_space(cls, include_estimators=None, return configuration_space - - """ - # Compile a list of all estimator objects for this problem - available_classifiers = ParamSklearnClassifier._get_estimator_components() - - classifiers = dict() - for name in available_classifiers: - if include_estimators is not None and \ - name not in include_estimators: - continue - elif exclude_estimators is not None and \ - name in exclude_estimators: - continue - - if dataset_properties.get('multiclass') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_classifiers[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - available_classifiers[name].get_properties()[ - 'handles_sparse'] is False: - continue - classifiers[name] = available_classifiers[name] - - if len(classifiers) == 0: - raise ValueError("No classifier to build a configuration space " - "for...") - - # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear', 'sgd', - 'libsvm_svc'] - classifier_default = None - for cd_ in classifier_defaults: - if cd_ in classifiers: - classifier_default = cd_ - break - if classifier_default is None: - classifier_default = classifiers.keys()[0] - - # Compile a list of preprocessor for this problem - available_preprocessors = \ - components.preprocessing_components._preprocessors - - preprocessors = dict() - for name in available_preprocessors: - if name in cls._get_pipeline(): - preprocessors[name] = available_preprocessors[name] - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if available_preprocessors[name]. \ - get_properties()['handles_classification'] is False: - continue - if dataset_properties.get('multiclass') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multiclass'] is False: - continue - if dataset_properties.get('multilabel') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_multilabel'] is False: - continue - if dataset_properties.get('sparse') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_sparse'] is False: - continue - elif dataset_properties.get('sparse') is False and \ - available_preprocessors[name].get_properties()[ - 'handles_dense'] is False: - continue - - preprocessors[name] = available_preprocessors[name] - - # Get the configuration space - configuration_space = super(ParamSklearnClassifier, cls)\ - .get_hyperparameter_search_space( - cls._get_estimator_hyperparameter_name(), - classifier_default, classifiers, preprocessors, dataset_properties, - cls._get_pipeline()) - - # And now add forbidden parameter configurations which would take too - # long - - # Combinations of tree-based models with feature learning: - classifiers_ = ["extra_trees", "gradient_boosting", - "k_nearest_neighbors", "libsvm_svc", "random_forest"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] - - for c, f in product(classifiers_, feature_learning_): - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f))) - except: - pass - - # Multinomial NB does not work with negative values -> so don't use - # it with standardization, features learning, pca - classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", - "bernoulli_nb"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] - for c in classifiers_: - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "rescaling:strategy"), "standard"), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) - except: - pass - for c, f in product(classifiers_, feature_learning_): - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) - except: - pass - - return configuration_space - """ - @staticmethod def _get_estimator_hyperparameter_name(): return "classifier" diff --git a/tests/test_classification_searchspace.py b/tests/test_classification_searchspace.py new file mode 100644 index 0000000000..5f5e0c3c32 --- /dev/null +++ b/tests/test_classification_searchspace.py @@ -0,0 +1,137 @@ +from collections import OrderedDict + +import unittest +import numpy + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ParamSklearn.components.classification.random_forest import RandomForest +from ParamSklearn.components.classification.liblinear import LibLinear_SVC + +from ParamSklearn.components.preprocessing.pca import PCA +from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD +from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing + +from ParamSklearn.classification import ParamSklearnClassifier + +class TestCreateClassificationSearchspace(unittest.TestCase): + + def test_get_match_array(self): + # preproc is empty + preprocessors = OrderedDict() + preprocessors["pca"] = PCA # dense + classifiers = OrderedDict() + classifiers["random_forest"] = RandomForest + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(numpy.sum(m), 0) + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m, [[1]]) + + preprocessors['TSVD'] = TruncatedSVD # sparse + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(m[0], [0]) # pca + self.assertEqual(m[1], [1]) # svd + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m[0], [1]) # pca + self.assertEqual(m[1], [0]) # svd + + preprocessors['none'] = NoPreprocessing # sparse + dense + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertEqual(m[0, :], [0]) # pca + self.assertEqual(m[1, :], [1]) # tsvd + self.assertEqual(m[2, :], [0]) # none + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertEqual(m[0, :], [1]) # pca + self.assertEqual(m[1, :], [0]) # tsvd + self.assertEqual(m[2, :], [1]) # none + + classifiers['libsvm'] = LibLinear_SVC + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertListEqual(list(m[0, :]), [1, 1]) # pca + self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd + self.assertListEqual(list(m[2, :]), [1, 1]) # none + + m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertListEqual(list(m[0, :]), [0, 0]) # pca + self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd + self.assertListEqual(list(m[2, :]), [0, 1]) # none + + def test_get_idx_to_keep(self): + m = numpy.zeros([3, 4]) + col, row = ParamSklearnClassifier._get_idx_to_keep(m) + self.assertListEqual(col, []) + self.assertListEqual(row, []) + + m = numpy.zeros([100, 50]) + c_keep = set() + r_keep = set() + for i in range(20): + col_idx = numpy.random.randint(low=0, high=50, size=1)[0] + c_keep.add(col_idx) + row_idx = numpy.random.randint(low=0, high=100, size=1)[0] + r_keep.add(row_idx) + m[row_idx, col_idx] = 1 + col, row = ParamSklearnClassifier._get_idx_to_keep(m) + self.assertListEqual(col, sorted(c_keep)) + self.assertListEqual(row, sorted(r_keep)) + [self.assertTrue(c < m.shape[1]) for c in c_keep] + [self.assertTrue(r < m.shape[0]) for r in r_keep] + + + def test_sanitize_arrays(self): + m = numpy.zeros([2, 3]) + preprocessors_list = ['pa', 'pb'] + preprocessors = OrderedDict([['pa', 1], ['pb', 2]]) + classifier_list = ['ca', 'cb', 'cc'] + classifiers = OrderedDict([['ca', 1], ['cb', 2], ['cc', 3]]) + + # all zeros -> empty + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertEqual(len(new_m), 0) + self.assertTrue(len(new_classifier_list) == len(new_preprocessors_list) == 0) + self.assertTrue(len(new_preproc) == len(new_class) == 0) + + for i in range(20): + m = numpy.zeros([2, 3]) + class_idx = numpy.random.randint(low=0, high=m.shape[1], size=1)[0] + pre_idx = numpy.random.randint(low=0, high=m.shape[0], size=1)[0] + m[pre_idx, class_idx] = 1 + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertIn(preprocessors_list[pre_idx], new_preprocessors_list) + self.assertIn(preprocessors_list[pre_idx], preprocessors) + self.assertIn(classifier_list[class_idx], new_classifier_list) + self.assertIn(classifier_list[class_idx], classifiers) + self.assertTrue(new_m.shape[0] == new_m.shape[1] == 1) + + m = numpy.array([[1, 0, 0], [0, 1, 0]]) + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + self.assertListEqual(preprocessors_list, new_preprocessors_list) + [self.assertIn(p, preprocessors) for p in preprocessors_list] + self.assertListEqual(classifier_list[:-1], new_classifier_list) + [self.assertIn(c, classifiers) for c in new_classifier_list] + self.assertTrue(m.shape[0], new_m.shape[0]) + self.assertTrue(m.shape[1], new_m.shape[1]) + + def test_add_forbidden(self): + m = numpy.ones([2, 3]) + preprocessors_list = ['pa', 'pb'] + classifier_list = ['ca', 'cb', 'cc'] + cs = ConfigurationSpace() + preprocessor = CategoricalHyperparameter(name='preprocessor', choices=preprocessors_list) + classifier = CategoricalHyperparameter(name='classifier', choices=classifier_list) + cs.add_hyperparameter(preprocessor) + cs.add_hyperparameter(classifier) + new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + self.assertEqual(len(new_cs.forbidden_clauses), 0) + self.assertIsInstance(new_cs, ConfigurationSpace) + + m[0, 0] = 0 + new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + self.assertEqual(len(new_cs.forbidden_clauses), 1) + self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'ca') + self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pa') + self.assertIsInstance(new_cs, ConfigurationSpace) \ No newline at end of file From 78b2eb905438eddf3b57a0f841b8fa660011851b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 10:07:09 +0100 Subject: [PATCH 162/352] add random trees embedding --- .../preprocessing/random_trees_embedding.py | 96 +++++++++++++++++++ .../test_random_trees_embedding.py | 18 ++++ 2 files changed, 114 insertions(+) create mode 100644 ParamSklearn/components/preprocessing/random_trees_embedding.py create mode 100644 tests/components/preprocessing/test_random_trees_embedding.py diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py new file mode 100644 index 0000000000..ef49b05071 --- /dev/null +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -0,0 +1,96 @@ +import sklearn.ensemble + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ + UnParametrizedHyperparameter + +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE + + +class RandomTreesEmbedding(ParamSklearnPreprocessingAlgorithm): + + def __init__(self, n_estimators, max_depth, min_samples_split, + min_samples_leaf, max_leaf_nodes, sparse_output=True, + n_jobs=1, random_state=None): + self.n_estimators = n_estimators + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = max_leaf_nodes + self.sparse_output = sparse_output + self.n_jobs = n_jobs + self.random_state = random_state + + def fit(self, X, Y): + self.preprocessor = sklearn.ensemble.RandomTreesEmbedding( + n_estimators=self.n_estimators, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + sparse_output=self.sparse_output, + n_jobs=self.n_jobs, + random_state=self.random_state + ) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'RandomTreesEmbedding', + 'name': 'Random Trees Embedding', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, ), + 'output': SPARSE, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_estimators = UniformIntegerHyperparameter(name="n_estimators", + lower=10, upper=100, + default=10) + max_depth = UniformIntegerHyperparameter(name="max_depth", + lower=2, upper=10, + default=5) + min_samples_split = UniformIntegerHyperparameter(name="min_samples_split", + lower=2, upper=20, + default=2) + min_samples_leaf = UniformIntegerHyperparameter(name="min_samples_leaf", + lower=1, upper=20, + default=1) + max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + value="None") + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(max_leaf_nodes) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name \ No newline at end of file diff --git a/tests/components/preprocessing/test_random_trees_embedding.py b/tests/components/preprocessing/test_random_trees_embedding.py new file mode 100644 index 0000000000..c4f31bb4fe --- /dev/null +++ b/tests/components/preprocessing/test_random_trees_embedding.py @@ -0,0 +1,18 @@ +import unittest + +import numpy +import scipy.sparse + +from ParamSklearn.components.preprocessing.random_trees_embedding import \ + RandomTreesEmbedding +from ParamSklearn.util import _test_preprocessing + + +class RandomTreesEmbeddingComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(RandomTreesEmbedding) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 213) + self.assertIsInstance(original, numpy.ndarray) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertTrue(all(transformation.data == 1)) \ No newline at end of file From 2deed036a0477d88258d1783d434b91745b52830 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 10 Mar 2015 10:10:13 +0100 Subject: [PATCH 163/352] adjust #hyperparameter --- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index 7cb05733f1..0612d72505 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -73,7 +73,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(86, len(hyperparameters)) + self.assertEqual(91, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index 2b1bc4109e..cc9bb7017f 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(32, len(hyperparameters)) + self.assertEqual(37, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index de55673b23..b1fbfd506c 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(85, len(hyperparameters)) + self.assertEqual(90, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 64728c2e6ceee17abcabd86b3c04ffc5a63e610b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Mar 2015 10:54:19 +0100 Subject: [PATCH 164/352] Update list of classifiers and transformers --- misc/classifiers.csv | 6 +++--- misc/transformers.csv | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 9e03fdc8f8..3a4053209e 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -24,9 +24,9 @@ class,added,comment ,False,Is a meta-estimator ,False,Abstract base class for naive Bayes estimators ,False,Abstract base class for naive Bayes on discrete/categorical data -,, -,, -,, +,True, +,True, +,True, ,True, ,,Can crash when there is no neighbour within the radius ,, diff --git a/misc/transformers.csv b/misc/transformers.csv index 380df7bad0..4edf1287d9 100644 --- a/misc/transformers.csv +++ b/misc/transformers.csv @@ -32,7 +32,7 @@ class,added,comment ,FALSE,Base class ,, ,, -,, +,True, ,FALSE,Base class ,, ,, @@ -50,12 +50,12 @@ class,added,comment ,, ,, ,, -,, +,True, ,, ,, ,, ,, -,, +,True, ,, ,, ,, From a67e59a4a0ea1e9c2b7a649052c41f56e95fe663 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Mar 2015 11:05:22 +0100 Subject: [PATCH 165/352] Update list of transformers --- misc/transformers.csv | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/transformers.csv b/misc/transformers.csv index 4edf1287d9..1ce1d9a86f 100644 --- a/misc/transformers.csv +++ b/misc/transformers.csv @@ -24,7 +24,7 @@ class,added,comment ,, ,, ,, -,, +,True, ,FALSE,Base class ,FALSE,Prefer Forests ,FALSE,Prefer Forests From 223f1e5038c02c39839a422b7f1937300a06146e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 10 Mar 2015 11:08:55 +0100 Subject: [PATCH 166/352] Remove unnecessary statements --- ParamSklearn/classification.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6fd78fca00..178c104121 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -140,10 +140,6 @@ def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, preproc_list = [preprocessors_list[p] for p in keep_row] class_list = [classifiers_list[p] for p in keep_col] - # Make sure they don't exist anymore - del preprocessors_list - del classifiers_list - new_class = dict() for c in class_list: new_class[c] = classifiers[c] @@ -151,10 +147,6 @@ def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, for p in preproc_list: new_preproc[p] = preprocessors[p] - # Make sure they don't exist anymore - del preprocessors - del classifiers - return m, preproc_list, class_list, new_preproc, new_class @classmethod @@ -186,9 +178,11 @@ def get_available_components(cls, available_comp, data_prop, inc, exc): entry = available_comp[name] if entry.get_properties()['handles_classification'] is False: continue - if data_prop.get('multiclass') is True and entry.get_properties()['handles_multiclass'] is False: + if data_prop.get('multiclass') is True and entry.get_properties()[ + 'handles_multiclass'] is False: continue - if data_prop.get('multilabel') is True and available_comp[name].get_properties()['handles_multilabel'] is False: + if data_prop.get('multilabel') is True and available_comp[name]. \ + get_properties()['handles_multilabel'] is False: continue components[name] = entry From 4008e63b165ae23d6424bd27bbac2d72ad9febe9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 11 Mar 2015 10:11:40 +0100 Subject: [PATCH 167/352] FIX: configuration space building with preprocessors which transform from dense to sparse --- ParamSklearn/base.py | 41 +++++++++-------- ParamSklearn/classification.py | 14 +++--- ...no_peprocessing.py => no_preprocessing.py} | 0 .../preprocessing/test_NoPreprocessing.py | 2 +- tests/test_classification.py | 35 +++++++++++++++ tests/test_classification_searchspace.py | 44 ++++++++++++++----- 6 files changed, 98 insertions(+), 38 deletions(-) rename ParamSklearn/components/preprocessing/{no_peprocessing.py => no_preprocessing.py} (100%) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index f533f6d34b..1bb63d4798 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -96,26 +96,25 @@ def fit(self, X, Y, fit_params=None, init_params=None): preprocessors_names = ["imputation", "rescaling", self.configuration['preprocessor'].value] for preproc_name in preprocessors_names: - if preproc_name != "None": - preproc_params = {} - - for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name \ - .startswith(preproc_name): - continue - if isinstance(instantiated_hyperparameter, - InactiveHyperparameter): - continue - - name_ = instantiated_hyperparameter.hyperparameter.name. \ - split(":")[1] - preproc_params[name_] = instantiated_hyperparameter.value - - preproc_params.update(init_params_per_method[preproc_name]) - preprocessor_object = components.preprocessing_components. \ - _preprocessors[preproc_name](random_state=self.random_state, - **preproc_params) - steps.append((preproc_name, preprocessor_object)) + preproc_params = {} + + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.hyperparameter.name \ + .startswith(preproc_name): + continue + if isinstance(instantiated_hyperparameter, + InactiveHyperparameter): + continue + + name_ = instantiated_hyperparameter.hyperparameter.name. \ + split(":")[1] + preproc_params[name_] = instantiated_hyperparameter.value + + preproc_params.update(init_params_per_method[preproc_name]) + preprocessor_object = components.preprocessing_components. \ + _preprocessors[preproc_name](random_state=self.random_state, + **preproc_params) + steps.append((preproc_name, preprocessor_object)) # Extract Estimator Hyperparameters from the configuration object estimator_name = self.configuration[ @@ -324,7 +323,7 @@ def get_hyperparameter_search_space(cls, estimator_name, preprocessor_choices = filter(lambda app: app not in always_active, available_preprocessors.keys()) preprocessor = CategoricalHyperparameter("preprocessor", - ["None"] + preprocessor_choices, default='None') + preprocessor_choices, default='no_preprocessing') cs.add_hyperparameter(preprocessor) for name in available_preprocessors.keys(): preprocessor_configuration_space = available_preprocessors[name]. \ diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 178c104121..3e573dc60b 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -147,6 +147,8 @@ def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, for p in preproc_list: new_preproc[p] = preprocessors[p] + assert len(new_preproc) == m.shape[0] + assert len(new_class) == m.shape[1] return m, preproc_list, class_list, new_preproc, new_class @classmethod @@ -188,7 +190,6 @@ def get_available_components(cls, available_comp, data_prop, inc, exc): return components - @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, @@ -247,7 +248,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, matches.shape[0], matches.shape[1]) if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, preprocessors_list, classifiers_list, preprocessors_list, classifiers = \ + matches, preprocessors_list, classifiers_list, preprocessors, classifiers = \ ParamSklearnClassifier.sanitize_arrays(m=matches, preprocessors_list=preprocessors_list, classifiers_list=classifiers_list, @@ -301,9 +302,9 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Combinations of tree-based models with feature learning: classifiers_ = ["extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering"] - for c, f in product(classifiers_, feature_learning_): + for c, f in product(classifiers_, preproc_with_negative_X): if c not in classifiers_list: continue if f not in preprocessors_list: @@ -322,7 +323,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # it with standardization, features learning, pca classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", "bernoulli_nb"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering", "pca"] + preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", + "pca", "truncatedSVD"] for c in classifiers_: if c not in classifiers_list: continue @@ -335,7 +337,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, except: pass - for c, f in product(classifiers_, feature_learning_): + for c, f in product(classifiers_, preproc_with_negative_X): if c not in classifiers_list: continue if f not in preprocessors_list: diff --git a/ParamSklearn/components/preprocessing/no_peprocessing.py b/ParamSklearn/components/preprocessing/no_preprocessing.py similarity index 100% rename from ParamSklearn/components/preprocessing/no_peprocessing.py rename to ParamSklearn/components/preprocessing/no_preprocessing.py diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py index 5bd6232279..2c0d1edecb 100644 --- a/tests/components/preprocessing/test_NoPreprocessing.py +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -1,7 +1,7 @@ import numpy as np import unittest -from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing from ParamSklearn.util import _test_preprocessing diff --git a/tests/test_classification.py b/tests/test_classification.py index 0612d72505..a7e842ae3f 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -9,6 +9,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.random_sampler import RandomSampler from ParamSklearn.classification import ParamSklearnClassifier from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm @@ -68,6 +69,40 @@ def test_default_configuration(self): sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) + def test_configurations(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space() + sampler = RandomSampler(cs, 1) + for i in range(10): + config = sampler.sample_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') + cls = ParamSklearnClassifier(config, random_state=1) + try: + cls.fit(X_train, Y_train) + predictions = cls.predict(X_test) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in e.message: + continue + else: + raise e + + def test_configurations_sparse(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + sampler = RandomSampler(cs, 1) + for i in range(10): + config = sampler.sample_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', + make_sparse=True) + cls = ParamSklearnClassifier(config, random_state=1) + try: + cls.fit(X_train, Y_train) + predictions = cls.predict(X_test) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in e.message: + continue + else: + raise e + def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) diff --git a/tests/test_classification_searchspace.py b/tests/test_classification_searchspace.py index 5f5e0c3c32..088849e454 100644 --- a/tests/test_classification_searchspace.py +++ b/tests/test_classification_searchspace.py @@ -6,12 +6,13 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.classification.random_forest import RandomForest from ParamSklearn.components.classification.liblinear import LibLinear_SVC +from ParamSklearn.components.classification.random_forest import RandomForest from ParamSklearn.components.preprocessing.pca import PCA from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD -from ParamSklearn.components.preprocessing.no_peprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding from ParamSklearn.classification import ParamSklearnClassifier @@ -23,42 +24,65 @@ def test_get_match_array(self): preprocessors["pca"] = PCA # dense classifiers = OrderedDict() classifiers["random_forest"] = RandomForest - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=True) self.assertEqual(numpy.sum(m), 0) - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=False) self.assertEqual(m, [[1]]) preprocessors['TSVD'] = TruncatedSVD # sparse - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=True) self.assertEqual(m[0], [0]) # pca self.assertEqual(m[1], [1]) # svd - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=False) self.assertEqual(m[0], [1]) # pca self.assertEqual(m[1], [0]) # svd preprocessors['none'] = NoPreprocessing # sparse + dense - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=True) self.assertEqual(m[0, :], [0]) # pca self.assertEqual(m[1, :], [1]) # tsvd self.assertEqual(m[2, :], [0]) # none - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=False) self.assertEqual(m[0, :], [1]) # pca self.assertEqual(m[1, :], [0]) # tsvd self.assertEqual(m[2, :], [1]) # none classifiers['libsvm'] = LibLinear_SVC - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=False) + self.assertListEqual(list(m[0, :]), [1, 1]) # pca + self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd + self.assertListEqual(list(m[2, :]), [1, 1]) # none + + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=True) + self.assertListEqual(list(m[0, :]), [0, 0]) # pca + self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd + self.assertListEqual(list(m[2, :]), [0, 1]) # none + + preprocessors['rte'] = RandomTreesEmbedding + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=False) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none + self.assertListEqual(list(m[3, :]), [0, 1]) # random trees embedding - m = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearnClassifier.get_match_array( + preprocessors=preprocessors, classifiers=classifiers, sparse=True) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none + self.assertListEqual(list(m[3, :]), [0, 0]) # random trees embedding def test_get_idx_to_keep(self): m = numpy.zeros([3, 4]) From 8788c9eb6b5ed9546c53d816429cab901e27cd83 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 11 Mar 2015 10:12:10 +0100 Subject: [PATCH 168/352] FIX: dense/sparse indicators --- ParamSklearn/components/classification/adaboost.py | 4 ++-- .../components/classification/bagged_multinomial_nb.py | 4 ++-- ParamSklearn/components/classification/bernoulli_nb.py | 4 ++-- ParamSklearn/components/classification/multinomial_nb.py | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 0675182b57..9ce1ef4024 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -66,8 +66,8 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': True, - 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'handles_sparse': False, + 'input': (DENSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py index e13f8d2f6a..73c7d6f813 100644 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ b/ParamSklearn/components/classification/bagged_multinomial_nb.py @@ -7,7 +7,7 @@ CategoricalHyperparameter, Constant from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): @@ -67,7 +67,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 3d3745d251..94d5fc82fd 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -6,7 +6,7 @@ CategoricalHyperparameter from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class BernoulliNB(ParamSklearnClassificationAlgorithm): @@ -55,7 +55,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, 'preferred_dtype': np.bool} diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index dea329d413..a217b395a6 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -6,7 +6,7 @@ CategoricalHyperparameter from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class MultinomialNB(ParamSklearnClassificationAlgorithm): @@ -58,7 +58,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, 'preferred_dtype': np.float32} From f30304d2398d991488724498a0d8bd907ecd8fc1 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 11 Mar 2015 10:12:43 +0100 Subject: [PATCH 169/352] FIX: upper limit on the maximum number of components in truncated SVD --- ParamSklearn/components/preprocessing/truncatedSVD.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index 9e70e5871b..cba9544b4e 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -16,7 +16,7 @@ def __init__(self, target_dim, random_state=None): self.preprocessor = None def fit(self, X, Y): - target_dim = min(self.target_dim, X.shape[0]) + target_dim = min(self.target_dim, X.shape[1] - 1) self.preprocessor = sklearn.decomposition.TruncatedSVD( target_dim, algorithm='arpack') self.preprocessor.fit(X, Y) From 3ec3128b51b04d42d1dc3cb9d0b007b0e6b75b2e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 11 Mar 2015 10:32:36 +0100 Subject: [PATCH 170/352] FIX: kNN can handle sparse data --- ParamSklearn/components/classification/k_nearest_neighbors.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index e0c656e918..5b72adde09 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.conditions import EqualsCondition from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): @@ -60,7 +60,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! 'preferred_dtype' : None} From 8aabb7e7721b382a47afe9099d0900b5c2e75fdd Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 11 Mar 2015 10:33:04 +0100 Subject: [PATCH 171/352] Forbid combination of densifier and classifier which can handle sparse data --- ParamSklearn/classification.py | 26 +++++++-- .../components/preprocessing/densifier.py | 53 +++++++++++++++++++ .../preprocessing/test_densifier.py | 13 +++++ 3 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 ParamSklearn/components/preprocessing/densifier.py create mode 100644 tests/components/preprocessing/test_densifier.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 3e573dc60b..c5fc4d597f 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -276,7 +276,11 @@ def get_hyperparameter_search_space(cls, include_estimators=None, 'libsvm_svc'] classifier_default = None for cd_ in classifier_defaults: - if cd_ in classifiers: + # Make sure that a classifier which can only handle dense is not + # selected as the default for a sparse dataset + no_preprocessing_idx = preprocessors_list.index("no_preprocessing") + cd_index = classifiers_list.index(cd_) + if cd_ in classifiers and matches[no_preprocessing_idx, cd_index] == 1: classifier_default = cd_ break if classifier_default is None: @@ -298,13 +302,29 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preproc_list=preprocessors_list, class_list=classifiers_list, matches=matches) + # A classifier which can handle sparse data after the densifier + for key in classifiers: + if SPARSE in classifiers[key].get_properties()['input']: + try: + configuration_space.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + 'classifier'), key), + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + 'preprocessor'), 'densifier') + )) + except: + pass + # which would take too long # Combinations of tree-based models with feature learning: classifiers_ = ["extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest"] - preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering"] + feature_learning = ["kitchen_sinks", "sparse_filtering"] - for c, f in product(classifiers_, preproc_with_negative_X): + for c, f in product(classifiers_, feature_learning): if c not in classifiers_list: continue if f not in preprocessors_list: diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py new file mode 100644 index 0000000000..3706b80701 --- /dev/null +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -0,0 +1,53 @@ +from scipy import sparse + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE + + +class Densifier(ParamSklearnPreprocessingAlgorithm): + def __init__(self, densify, random_state): + self.densify = bool(densify) + self.random_state = random_state + + def fit(self, X, Y): + return self + + def transform(self, X): + return X.todense() + + @staticmethod + def get_properties(): + return {'shortname': 'RandomTreesEmbedding', + 'name': 'Random Trees Embedding', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': False, + 'input': (SPARSE,), + 'output': DENSE, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + densify = CategoricalHyperparameter("densify", ["False", "True"], + default="False") + cs.add_hyperparameter(densify) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %" % name + diff --git a/tests/components/preprocessing/test_densifier.py b/tests/components/preprocessing/test_densifier.py new file mode 100644 index 0000000000..380d57ecdc --- /dev/null +++ b/tests/components/preprocessing/test_densifier.py @@ -0,0 +1,13 @@ +import unittest + +import numpy as np + +from ParamSklearn.components.preprocessing.densifier import Densifier +from ParamSklearn.util import _test_preprocessing + + +class DensifierComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(Densifier, make_sparse=True) + self.assertEqual(transformation.shape, original.shape) + self.assertIsInstance(transformation, np.ndarray) From 002885e4fba6c54c57e4135446efdfb56673d8c0 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Mar 2015 13:30:11 +0100 Subject: [PATCH 172/352] FIX: adaboost input metainformation --- ParamSklearn/components/classification/adaboost.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 9ce1ef4024..806855d383 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -67,7 +67,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE), + 'input': (DENSE,), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? From 0a2b3fdd0959cd306a54bbed5c0e6bb222857a37 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Mar 2015 13:30:49 +0100 Subject: [PATCH 173/352] FIX: no_preprocessing and include_classifiers --- ParamSklearn/base.py | 3 ++- ParamSklearn/classification.py | 15 +++++++++++++-- ParamSklearn/regression.py | 10 +++++++--- source/first_steps.rst | 2 +- tests/test_classification.py | 7 ++----- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 7 files changed, 27 insertions(+), 14 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 1bb63d4798..729e42cb97 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -227,6 +227,7 @@ def predict(self, X): def get_hyperparameter_search_space(cls, estimator_name, default_estimator, estimator_components, + default_preprocessor, preprocessor_components, dataset_properties, always_active): @@ -323,7 +324,7 @@ def get_hyperparameter_search_space(cls, estimator_name, preprocessor_choices = filter(lambda app: app not in always_active, available_preprocessors.keys()) preprocessor = CategoricalHyperparameter("preprocessor", - preprocessor_choices, default='no_preprocessing') + preprocessor_choices, default=default_preprocessor) cs.add_hyperparameter(preprocessor) for name in available_preprocessors.keys(): preprocessor_configuration_space = available_preprocessors[name]. \ diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index c5fc4d597f..6ee2541f69 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -266,6 +266,14 @@ def get_hyperparameter_search_space(cls, include_estimators=None, assert [c in classifiers_list for c in classifiers] assert [p in preprocessors_list for p in preprocessors] + # Select the default preprocessor before the always active + # preprocessors are added, so they will not be selected as default + # preprocessors + if "no_preprocessing" in preprocessors: + preprocessor_default = "no_preprocessing" + else: + preprocessor_default = sorted(preprocessors.keys())[0] + # Now add always present preprocessors for name in available_preprocessors: if name in cls._get_pipeline(): @@ -278,9 +286,11 @@ def get_hyperparameter_search_space(cls, include_estimators=None, for cd_ in classifier_defaults: # Make sure that a classifier which can only handle dense is not # selected as the default for a sparse dataset - no_preprocessing_idx = preprocessors_list.index("no_preprocessing") + if cd_ not in classifiers: + continue + no_preprocessing_idx = preprocessors_list.index(preprocessor_default) cd_index = classifiers_list.index(cd_) - if cd_ in classifiers and matches[no_preprocessing_idx, cd_index] == 1: + if matches[no_preprocessing_idx, cd_index] == 1: classifier_default = cd_ break if classifier_default is None: @@ -291,6 +301,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), default_estimator=classifier_default, estimator_components=classifiers, + default_preprocessor=preprocessor_default, preprocessor_components=preprocessors, dataset_properties=dataset_properties, always_active=cls._get_pipeline()) diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index cc24a3c545..9de8fbd88f 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -205,9 +205,13 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Get the configuration space configuration_space = super(ParamSklearnRegressor, cls).\ get_hyperparameter_search_space( - cls._get_estimator_hyperparameter_name(), - regressor_default, regressors, preprocessors, dataset_properties, - cls._pipeline, ) + estimator_name=cls._get_estimator_hyperparameter_name(), + default_estimator=regressor_default, + estimator_components=regressors, + default_preprocessor="no_preprocessing", + preprocessor_components=preprocessors, + dataset_properties=dataset_properties, + always_active=cls._pipeline) # And now add forbidden parameter configurations which would take too # long diff --git a/source/first_steps.rst b/source/first_steps.rst index e4a9a930eb..9bb492f11c 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.83999999999999997 + 0.93999999999999995 diff --git a/tests/test_classification.py b/tests/test_classification.py index a7e842ae3f..8e521af94e 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -20,9 +20,6 @@ class TestParamSklearnClassifier(unittest.TestCase): - # TODO: test for both possible ways to initialize ParamSklearn - # parameters and other... - def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: @@ -108,7 +105,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(91, len(hyperparameters)) + self.assertEqual(90, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) @@ -126,7 +123,7 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + CategoricalHyperparameter('preprocessor', ['pca'])) cs = ParamSklearnClassifier.get_hyperparameter_search_space( exclude_preprocessors=['pca']) diff --git a/tests/test_regression.py b/tests/test_regression.py index cc9bb7017f..c0dae9ab28 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(37, len(hyperparameters)) + self.assertEqual(38, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index b1fbfd506c..067712de55 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(90, len(hyperparameters)) + self.assertEqual(89, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From d8d3198ba4ee28db37402958178295e6d7f179c4 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 13 Mar 2015 14:56:49 +0100 Subject: [PATCH 174/352] extract searchspace utils to make them available for regression --- ParamSklearn/classification.py | 127 +++--------------- ParamSklearn/create_searchspace_util.py | 100 ++++++++++++++ tests/test_classification.py | 2 +- ...create_searchspace_util_classification.py} | 88 +++++++----- 4 files changed, 177 insertions(+), 140 deletions(-) create mode 100644 ParamSklearn/create_searchspace_util.py rename tests/{test_classification_searchspace.py => test_create_searchspace_util_classification.py} (62%) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6ee2541f69..d7672d6646 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -10,7 +10,8 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.util import SPARSE +import ParamSklearn.create_searchspace_util class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): @@ -77,100 +78,9 @@ def predict_proba(self, X): return self._pipeline.steps[-1][-1].predict_proba(Xt) - @classmethod - def get_match_array(cls, preprocessors, classifiers, sparse): - # Now select combinations that work - # We build a binary matrix, where a 1 indicates, that a combination - # work on this dataset based in the dataset and the input/output formats - # A 'zero'-row (column) is an unusable preprocessor (classifier) - # A single zero results in an forbidden condition - preprocessors_list = preprocessors.keys() - classifiers_list = classifiers.keys() - matches = np.zeros([len(preprocessors), len(classifiers)]) - for pidx, p in enumerate(preprocessors_list): - p_in = preprocessors[p].get_properties()['input'] - p_out = preprocessors[p].get_properties()['output'] - if p in cls._get_pipeline(): - continue - elif sparse and SPARSE not in p_in: - continue - elif not sparse and DENSE not in p_in: - continue - for cidx, c in enumerate(classifiers_list): - c_in = classifiers[c].get_properties()['input'] - if p_out == INPUT: - # Preprocessor does not change the format - if (sparse and SPARSE in c_in) or \ - (not sparse and DENSE in c_in): - # Classifier input = Dataset format - matches[pidx, cidx] = 1 - continue - else: - # These won't work - continue - elif p_out == DENSE and DENSE in c_in: - matches[pidx, cidx] = 1 - continue - elif p_out == SPARSE and SPARSE in c_in: - matches[pidx, cidx] = 1 - continue - else: - # These won't work - continue - return matches - - @classmethod - def _get_idx_to_keep(cls, m): - # Returns all rows and cols where matches contains not only zeros - keep_row = [idx for idx in range(m.shape[0]) if np.sum(m[idx, :]) != 0] - keep_col = [idx for idx in range(m.shape[1]) if np.sum(m[:, idx]) != 0] - return keep_col, keep_row - - @classmethod - def sanitize_arrays(cls, m, preprocessors_list, classifiers_list, - preprocessors, classifiers): - assert len(preprocessors_list) == len(preprocessors.keys()) - assert len(classifiers_list) == len(classifiers.keys()) - assert isinstance(m, np.ndarray) - # remove components that are not usable for this problem - keep_col, keep_row = ParamSklearnClassifier._get_idx_to_keep(m) - - m = m[keep_row, :] - m = m[:, keep_col] - preproc_list = [preprocessors_list[p] for p in keep_row] - class_list = [classifiers_list[p] for p in keep_col] - - new_class = dict() - for c in class_list: - new_class[c] = classifiers[c] - new_preproc = dict() - for p in preproc_list: - new_preproc[p] = preprocessors[p] - - assert len(new_preproc) == m.shape[0] - assert len(new_class) == m.shape[1] - return m, preproc_list, class_list, new_preproc, new_class - - @classmethod - def add_forbidden(cls, conf_space, preproc_list, class_list, matches): - for pdx, p in enumerate(preproc_list): - if np.sum(matches[pdx, :]) == matches.shape[1]: - continue - for cdx, c in enumerate(class_list): - if matches[pdx, cdx] == 0: - try: - conf_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(conf_space.get_hyperparameter( - "classifier"), c), - ForbiddenEqualsClause(conf_space.get_hyperparameter( - "preprocessor"), p))) - except: - pass - return conf_space - @classmethod def get_available_components(cls, available_comp, data_prop, inc, exc): - components = OrderedDict() + components_dict = OrderedDict() for name in available_comp: if inc is not None and name not in inc: continue @@ -186,9 +96,9 @@ def get_available_components(cls, available_comp, data_prop, inc, exc): if data_prop.get('multilabel') is True and available_comp[name]. \ get_properties()['handles_multilabel'] is False: continue - components[name] = entry + components_dict[name] = entry - return components + return components_dict @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, @@ -206,7 +116,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "exclude_preprocessors cannot be used together.") if dataset_properties is None or not isinstance(dataset_properties, dict): - dataset_properties = {} + dataset_properties = dict() + if 'sparse' not in dataset_properties: # This dataset is probaby dense dataset_properties['sparse'] = False @@ -234,9 +145,9 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preprocessors_list = preprocessors.keys() classifiers_list = classifiers.keys() - matches = ParamSklearnClassifier.get_match_array(preprocessors=preprocessors, - classifiers=classifiers, - sparse=dataset_properties.get('sparse')) + matches = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, + sparse=dataset_properties.get('sparse'), pipeline=cls._get_pipeline()) # Now we have only legal preprocessors/classifiers we combine them # Simple sanity checks @@ -249,11 +160,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, if np.sum(matches) < (matches.shape[0] * matches.shape[1]): matches, preprocessors_list, classifiers_list, preprocessors, classifiers = \ - ParamSklearnClassifier.sanitize_arrays(m=matches, - preprocessors_list=preprocessors_list, - classifiers_list=classifiers_list, - preprocessors=preprocessors, - classifiers=classifiers) + ParamSklearn.create_searchspace_util.sanitize_arrays( + m=matches, preprocessors_list=preprocessors_list, + estimators_list=classifiers_list, + preprocessors=preprocessors, estimators=classifiers) # Sanity checks assert len(preprocessors_list) > 0, "No valid preprocessors found" @@ -308,10 +218,9 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # And now add forbidden parameter configurations # According to matches - configuration_space = ParamSklearnClassifier.add_forbidden( - conf_space=configuration_space, - preproc_list=preprocessors_list, - class_list=classifiers_list, matches=matches) + configuration_space = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=configuration_space, preproc_list=preprocessors_list, + est_list=classifiers_list, matches=matches, est_type="classifier") # A classifier which can handle sparse data after the densifier for key in classifiers: @@ -353,7 +262,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Multinomial NB does not work with negative values, don't use # it with standardization, features learning, pca classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", - "bernoulli_nb"] + "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", "pca", "truncatedSVD"] for c in classifiers_: diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py new file mode 100644 index 0000000000..b6638f824a --- /dev/null +++ b/ParamSklearn/create_searchspace_util.py @@ -0,0 +1,100 @@ + +import numpy as np + +from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause + +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +def get_match_array(preprocessors, estimators, sparse, pipeline): + # Now select combinations that work + # We build a binary matrix, where a 1 indicates, that a combination + # work on this dataset based in the dataset and the input/output formats + # A 'zero'-row (column) is an unusable preprocessor (classifier) + # A single zero results in an forbidden condition + preprocessors_list = preprocessors.keys() + estimator_list = estimators.keys() + matches = np.zeros([len(preprocessors), len(estimators)]) + for pidx, p in enumerate(preprocessors_list): + p_in = preprocessors[p].get_properties()['input'] + p_out = preprocessors[p].get_properties()['output'] + if p in pipeline: + continue + elif sparse and SPARSE not in p_in: + continue + elif not sparse and DENSE not in p_in: + continue + for cidx, c in enumerate(estimator_list): + c_in = estimators[c].get_properties()['input'] + if p_out == INPUT: + # Preprocessor does not change the format + if (sparse and SPARSE in c_in) or \ + (not sparse and DENSE in c_in): + # Estimator input = Dataset format + matches[pidx, cidx] = 1 + continue + else: + # These won't work + continue + elif p_out == DENSE and DENSE in c_in: + matches[pidx, cidx] = 1 + continue + elif p_out == SPARSE and SPARSE in c_in: + matches[pidx, cidx] = 1 + continue + else: + # These won't work + continue + return matches + + +def _get_idx_to_keep(m): + # Returns all rows and cols where matches contains not only zeros + keep_row = [idx for idx in range(m.shape[0]) if np.sum(m[idx, :]) != 0] + keep_col = [idx for idx in range(m.shape[1]) if np.sum(m[:, idx]) != 0] + return keep_col, keep_row + + +def sanitize_arrays(m, preprocessors_list, estimators_list, + preprocessors, estimators): + assert len(preprocessors_list) == len(preprocessors.keys()) + assert len(estimators_list) == len(estimators.keys()) + assert isinstance(m, np.ndarray) + # remove components that are not usable for this problem + keep_col, keep_row = _get_idx_to_keep(m) + + m = m[keep_row, :] + m = m[:, keep_col] + preproc_list = [preprocessors_list[p] for p in keep_row] + est_list = [estimators_list[p] for p in keep_col] + + new_est = dict() + for c in est_list: + new_est[c] = estimators[c] + new_preproc = dict() + for p in preproc_list: + new_preproc[p] = preprocessors[p] + + assert len(new_preproc) == m.shape[0] + assert len(new_est) == m.shape[1] + return m, preproc_list, est_list, new_preproc, new_est + + +def add_forbidden(conf_space, preproc_list, est_list, matches, est_type='classifier'): + assert est_type in ('classifier', 'regressor'), "'task_type is %s" % est_type + + for pdx, p in enumerate(preproc_list): + if np.sum(matches[pdx, :]) == matches.shape[1]: + continue + for cdx, c in enumerate(est_list): + if matches[pdx, cdx] == 0: + try: + conf_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(conf_space.get_hyperparameter( + est_type), c), + ForbiddenEqualsClause(conf_space.get_hyperparameter( + "preprocessor"), p))) + except: + pass + return conf_space diff --git a/tests/test_classification.py b/tests/test_classification.py index 8e521af94e..4dd374a1e1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -176,4 +176,4 @@ def test_set_params(self): pass def test_get_params(self): - pass + pass \ No newline at end of file diff --git a/tests/test_classification_searchspace.py b/tests/test_create_searchspace_util_classification.py similarity index 62% rename from tests/test_classification_searchspace.py rename to tests/test_create_searchspace_util_classification.py index 088849e454..8bba36527f 100644 --- a/tests/test_classification_searchspace.py +++ b/tests/test_create_searchspace_util_classification.py @@ -15,6 +15,7 @@ from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding from ParamSklearn.classification import ParamSklearnClassifier +import ParamSklearn.create_searchspace_util class TestCreateClassificationSearchspace(unittest.TestCase): @@ -24,61 +25,71 @@ def test_get_match_array(self): preprocessors["pca"] = PCA # dense classifiers = OrderedDict() classifiers["random_forest"] = RandomForest - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=True, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(numpy.sum(m), 0) - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=False, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(m, [[1]]) preprocessors['TSVD'] = TruncatedSVD # sparse - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=True, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(m[0], [0]) # pca self.assertEqual(m[1], [1]) # svd - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=False, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(m[0], [1]) # pca self.assertEqual(m[1], [0]) # svd preprocessors['none'] = NoPreprocessing # sparse + dense - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=True, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(m[0, :], [0]) # pca self.assertEqual(m[1, :], [1]) # tsvd self.assertEqual(m[2, :], [0]) # none - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=False, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertEqual(m[0, :], [1]) # pca self.assertEqual(m[1, :], [0]) # tsvd self.assertEqual(m[2, :], [1]) # none classifiers['libsvm'] = LibLinear_SVC - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=False, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=True, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none preprocessors['rte'] = RandomTreesEmbedding - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=False) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=False, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none self.assertListEqual(list(m[3, :]), [0, 1]) # random trees embedding - m = ParamSklearnClassifier.get_match_array( - preprocessors=preprocessors, classifiers=classifiers, sparse=True) + m = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=classifiers, sparse=True, + pipeline=ParamSklearnClassifier._get_pipeline()) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none @@ -86,7 +97,7 @@ def test_get_match_array(self): def test_get_idx_to_keep(self): m = numpy.zeros([3, 4]) - col, row = ParamSklearnClassifier._get_idx_to_keep(m) + col, row = ParamSklearn.create_searchspace_util._get_idx_to_keep(m) self.assertListEqual(col, []) self.assertListEqual(row, []) @@ -99,13 +110,12 @@ def test_get_idx_to_keep(self): row_idx = numpy.random.randint(low=0, high=100, size=1)[0] r_keep.add(row_idx) m[row_idx, col_idx] = 1 - col, row = ParamSklearnClassifier._get_idx_to_keep(m) + col, row = ParamSklearn.create_searchspace_util._get_idx_to_keep(m) self.assertListEqual(col, sorted(c_keep)) self.assertListEqual(row, sorted(r_keep)) [self.assertTrue(c < m.shape[1]) for c in c_keep] [self.assertTrue(r < m.shape[0]) for r in r_keep] - def test_sanitize_arrays(self): m = numpy.zeros([2, 3]) preprocessors_list = ['pa', 'pb'] @@ -114,7 +124,11 @@ def test_sanitize_arrays(self): classifiers = OrderedDict([['ca', 1], ['cb', 2], ['cc', 3]]) # all zeros -> empty - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + ParamSklearn.create_searchspace_util.sanitize_arrays( + m=m, preprocessors=preprocessors, + preprocessors_list=preprocessors_list, estimators=classifiers, + estimators_list=classifier_list) self.assertEqual(len(new_m), 0) self.assertTrue(len(new_classifier_list) == len(new_preprocessors_list) == 0) self.assertTrue(len(new_preproc) == len(new_class) == 0) @@ -124,7 +138,11 @@ def test_sanitize_arrays(self): class_idx = numpy.random.randint(low=0, high=m.shape[1], size=1)[0] pre_idx = numpy.random.randint(low=0, high=m.shape[0], size=1)[0] m[pre_idx, class_idx] = 1 - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + ParamSklearn.create_searchspace_util.sanitize_arrays( + m=m, preprocessors=preprocessors, + preprocessors_list=preprocessors_list, + estimators=classifiers, estimators_list=classifier_list) self.assertIn(preprocessors_list[pre_idx], new_preprocessors_list) self.assertIn(preprocessors_list[pre_idx], preprocessors) self.assertIn(classifier_list[class_idx], new_classifier_list) @@ -132,7 +150,11 @@ def test_sanitize_arrays(self): self.assertTrue(new_m.shape[0] == new_m.shape[1] == 1) m = numpy.array([[1, 0, 0], [0, 1, 0]]) - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = ParamSklearnClassifier.sanitize_arrays(m=m, preprocessors=preprocessors, preprocessors_list=preprocessors_list, classifiers=classifiers, classifiers_list=classifier_list) + new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + ParamSklearn.create_searchspace_util.sanitize_arrays( + m=m, preprocessors=preprocessors, + preprocessors_list=preprocessors_list, estimators=classifiers, + estimators_list=classifier_list) self.assertListEqual(preprocessors_list, new_preprocessors_list) [self.assertIn(p, preprocessors) for p in preprocessors_list] self.assertListEqual(classifier_list[:-1], new_classifier_list) @@ -145,16 +167,22 @@ def test_add_forbidden(self): preprocessors_list = ['pa', 'pb'] classifier_list = ['ca', 'cb', 'cc'] cs = ConfigurationSpace() - preprocessor = CategoricalHyperparameter(name='preprocessor', choices=preprocessors_list) - classifier = CategoricalHyperparameter(name='classifier', choices=classifier_list) + preprocessor = CategoricalHyperparameter(name='preprocessor', + choices=preprocessors_list) + classifier = CategoricalHyperparameter(name='classifier', + choices=classifier_list) cs.add_hyperparameter(preprocessor) cs.add_hyperparameter(classifier) - new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + new_cs = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=cs, preproc_list=preprocessors_list, + est_list=classifier_list, matches=m, est_type="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 0) self.assertIsInstance(new_cs, ConfigurationSpace) m[0, 0] = 0 - new_cs = ParamSklearnClassifier.add_forbidden(conf_space=cs, preproc_list=preprocessors_list, class_list=classifier_list, matches=m) + new_cs = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=cs, preproc_list=preprocessors_list, + est_list=classifier_list, matches=m, est_type="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 1) self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'ca') self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pa') From 6b9d1ead40c3f02ac6f87983cb3de9da904bb672 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 13 Mar 2015 14:57:16 +0100 Subject: [PATCH 175/352] adjust estimator dict --- .../components/regression/gaussian_process.py | 15 +++++++++------ .../components/regression/gradient_boosting.py | 13 +++++++++---- .../components/regression/random_forest.py | 12 ++++++++++-- .../components/regression/ridge_regression.py | 13 +++++++++---- .../regression/support_vector_regression.py | 9 ++++++++- 5 files changed, 45 insertions(+), 17 deletions(-) diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index 1de9b0a60c..55a0f57ccb 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -1,16 +1,13 @@ -import copy - import numpy as np import sklearn.gaussian_process import sklearn.preprocessing from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ..regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GaussianProcess(ParamSklearnRegressionAlgorithm): @@ -59,8 +56,14 @@ def get_properties(): 'prefers_data_scaled': True, # TODO find out if this is good because of sparcity... 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index ddc1625f0f..15ad04f1bc 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -2,12 +2,11 @@ import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.conditions import InCondition from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant + UniformIntegerHyperparameter, CategoricalHyperparameter, Constant -from ..regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS class GradientBoosting(ParamSklearnRegressionAlgorithm): @@ -111,9 +110,15 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, 'prefers_data_normalized': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index bca510a932..4e2c01b9ad 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -5,9 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ..regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones -from ...implementations import forest +from ParamSklearn.implementations import forest + class RandomForest(ParamSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, max_features, @@ -98,9 +100,15 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': False, # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, 'prefers_data_normalized': False, 'is_deterministic': True, 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index 029c89b826..76d0dd40fb 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -2,11 +2,10 @@ import sklearn.linear_model from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ..regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class RidgeRegression(ParamSklearnRegressionAlgorithm): @@ -50,9 +49,15 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, 'prefers_data_normalized': True, 'is_deterministic': True, 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/support_vector_regression.py b/ParamSklearn/components/regression/support_vector_regression.py index e803f7f75c..e187b21646 100644 --- a/ParamSklearn/components/regression/support_vector_regression.py +++ b/ParamSklearn/components/regression/support_vector_regression.py @@ -9,7 +9,8 @@ UnParametrizedHyperparameter -from ..regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS # Something is wrong here... """ @@ -82,9 +83,15 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, 'prefers_data_normalized': True, 'is_deterministic': True, 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'ouput': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} From aa602e5d906b4ec3a2609db11a18f0b6a7ffb49f Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 13 Mar 2015 14:57:50 +0100 Subject: [PATCH 176/352] make searchspace generation more general --- ParamSklearn/regression.py | 216 +++++++++++++++++++++++-------------- tests/test_regression.py | 31 +++++- 2 files changed, 161 insertions(+), 86 deletions(-) diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 9de8fbd88f..7ed74bb93d 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -1,24 +1,19 @@ -from collections import defaultdict -import copy +from collections import OrderedDict from itertools import product import sklearn if sklearn.__version__ != "0.15.2": raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " "you installed %s." % sklearn.__version__) - from sklearn.base import RegressorMixin -from sklearn.pipeline import Pipeline -from sklearn.utils import check_random_state +import numpy as np -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - InactiveHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction -from . import components as components -from .base import ParamSklearnBaseEstimator +from ParamSklearn import components as components +from ParamSklearn.base import ParamSklearnBaseEstimator +from ParamSklearn.util import SPARSE +import ParamSklearn.create_searchspace_util class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): @@ -66,8 +61,6 @@ class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): -------- """ - _pipeline = ["imputation", "rescaling", "__preprocessor__", - "__estimator__"] def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to @@ -85,6 +78,22 @@ def add_model_class(self, model): """ raise NotImplementedError() + @classmethod + def get_available_components(cls, available_comp, data_prop, inc, exc): + components_dict = OrderedDict() + for name in available_comp: + if inc is not None and name not in inc: + continue + elif exc is not None and name in exc: + continue + entry = available_comp[name] + + if not entry.get_properties()['handles_regression']: + continue + components_dict[name] = entry + return components_dict + + @classmethod def get_hyperparameter_search_space(cls, include_estimators=None, exclude_estimators=None, @@ -137,94 +146,135 @@ def get_hyperparameter_search_space(cls, include_estimators=None, if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + + available_preprocessors = components.preprocessing_components._preprocessors + preprocessors = ParamSklearnRegressor.get_available_components( + available_comp=available_preprocessors, + data_prop=dataset_properties, inc=include_preprocessors, + exc=exclude_preprocessors) + # Compile a list of all estimator objects for this problem available_regressors = ParamSklearnRegressor._get_estimator_components() - - # We assume that there exists only a single regression task. which - # is different to classification where we have multiclass, - # multilabel, etc - regressors = dict() - for name in available_regressors: - if include_estimators is not None and \ - name not in include_estimators: - continue - elif exclude_estimators is not None and \ - name in exclude_estimators: - continue - if dataset_properties.get('sparse') is True and \ - available_regressors[name].get_properties()[ - 'handles_sparse'] is False: - continue - regressors[name] = available_regressors[name] + regressors = ParamSklearnRegressor.get_available_components( + available_comp=available_regressors, data_prop=dataset_properties, + inc=include_estimators, exc=exclude_estimators) if len(regressors) == 0: - raise ValueError("No regressors to build a configuration space " - "for...") + raise ValueError("No regressors found") + if len(preprocessors) == 0: + raise ValueError("No preprocessors found, please add NoPreprocessing") + + preprocessors_list = preprocessors.keys() + regressors_list = regressors.keys() + matches = ParamSklearn.create_searchspace_util.get_match_array( + preprocessors=preprocessors, estimators=regressors, + sparse=dataset_properties.get('sparse'), pipeline=cls._get_pipeline()) + + # Now we have only legal preprocessors/classifiers we combine them + # Simple sanity checks + assert np.sum(matches) != 0, "No valid preprocessor/regressor " \ + "combination found, probably a bug" + assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ + "'matches' is not binary; %s <= %d, [%d*%d]" % \ + (str(np.sum(matches)), matches.shape[0]*matches.shape[1], + matches.shape[0], matches.shape[1]) + + if np.sum(matches) < (matches.shape[0] * matches.shape[1]): + matches, preprocessors_list, regressors_list, preprocessors, regressors = \ + ParamSklearn.create_searchspace_util.sanitize_arrays( + m=matches, preprocessors_list=preprocessors_list, + estimators_list=regressors_list, + preprocessors=preprocessors, estimators=regressors) + + # Sanity checks + assert len(preprocessors_list) > 0, "No valid preprocessors found" + assert len(regressors_list) > 0, "No valid classifiers found" + + assert len(preprocessors_list) == matches.shape[0], \ + "Preprocessor deleting went wrong" + assert len(regressors_list) == matches.shape[1], \ + "Classifier deleting went wrong" + assert [r in regressors_list for r in regressors] + assert [p in preprocessors_list for p in preprocessors] + + # Select the default preprocessor before the always active + # preprocessors are added, so they will not be selected as default + # preprocessors + if "no_preprocessing" in preprocessors: + preprocessor_default = "no_preprocessing" + else: + preprocessor_default = sorted(preprocessors.keys())[0] + + # Now add always present preprocessors + for name in available_preprocessors: + if name in cls._get_pipeline(): + preprocessors[name] = available_preprocessors[name] # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear', 'sgd', + regressor_defaults = ['random_forest', 'liblinear', 'sgd', 'libsvm_svc'] regressor_default = None - for cd_ in classifier_defaults: - if cd_ in regressors: - regressor_default = cd_ + for rd_ in regressor_defaults: + if rd_ not in regressors: + continue + no_preprocessing_idx = preprocessors_list.index(preprocessor_default) + rd_index = regressors_list.index(rd_) + if matches[no_preprocessing_idx, rd_index] == 1: + regressor_default = rd_ break if regressor_default is None: regressor_default = regressors.keys()[0] - # Compile a list of preprocessor for this problem - available_preprocessors = \ - components.preprocessing_components._preprocessors - - preprocessors = dict() - for name in available_preprocessors: - if name in ParamSklearnRegressor._pipeline: - preprocessors[name] = available_preprocessors[name] - continue - elif include_preprocessors is not None and \ - name not in include_preprocessors: - continue - elif exclude_preprocessors is not None and \ - name in exclude_preprocessors: - continue - - if dataset_properties.get('sparse') is True and \ - available_preprocessors[name].get_properties()[ - 'handles_sparse'] is False: - continue - elif dataset_properties.get('sparse') is False and \ - available_preprocessors[name].get_properties()[ - 'handles_dense'] is False: - continue - elif available_preprocessors[name]. \ - get_properties()['handles_regression'] is False: - continue - - preprocessors[name] = available_preprocessors[name] - # Get the configuration space configuration_space = super(ParamSklearnRegressor, cls).\ - get_hyperparameter_search_space( - estimator_name=cls._get_estimator_hyperparameter_name(), - default_estimator=regressor_default, - estimator_components=regressors, - default_preprocessor="no_preprocessing", - preprocessor_components=preprocessors, - dataset_properties=dataset_properties, - always_active=cls._pipeline) - - # And now add forbidden parameter configurations which would take too - # long - + get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), + default_estimator=regressor_default, + estimator_components=regressors, + default_preprocessor=preprocessor_default, + preprocessor_components=preprocessors, + dataset_properties=dataset_properties, + always_active=cls._get_pipeline()) + + # And now add forbidden parameter configurations + # According to matches + configuration_space = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=configuration_space, preproc_list=preprocessors_list, + est_list=regressors_list, matches=matches, est_type="regressor") + + # A regressor which can handle sparse data after the densifier + for key in regressors: + if SPARSE in regressors[key].get_properties()['input']: + try: + configuration_space.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + 'regressor'), key), + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + 'preprocessor'), 'densifier') + )) + except: + pass + + + # which would take too long # Combinations of tree-based models with feature learning: regressors_ = ["random_forest", "gradient_boosting", "gaussian_process"] feature_learning_ = ["kitchen_sinks", "sparse_filtering"] - for c, f in product(regressors_, feature_learning_): + for r, f in product(regressors_, feature_learning_): + if r not in regressors_list: + continue + if f not in preprocessors_list: + continue try: configuration_space.add_forbidden_clause(ForbiddenAndConjunction( ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "regressor"), c), + "regressor"), r), ForbiddenEqualsClause(configuration_space.get_hyperparameter( "preprocessor"), f))) except: @@ -238,4 +288,8 @@ def _get_estimator_components(): @staticmethod def _get_estimator_hyperparameter_name(): - return "regressor" \ No newline at end of file + return "regressor" + + @staticmethod + def _get_pipeline(): + return ["imputation", "rescaling", "__preprocessor__", "__estimator__"] diff --git a/tests/test_regression.py b/tests/test_regression.py index c0dae9ab28..695599dd62 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -1,8 +1,6 @@ __author__ = 'eggenspk' import copy -import numpy as np -import StringIO import unittest import sklearn.datasets @@ -18,11 +16,34 @@ from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.regression as regression_components import ParamSklearn.components.preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset +from ParamSklearn.util import get_dataset, SPARSE, DENSE, PREDICTIONS class TestParamSKlearnRegressor(unittest.TestCase): + def test_io_dict(self): + regressors = regression_components._regressors + for c in regressors: + props = regressors[c].get_properties() + self.assertIn('input', props) + self.assertIn('output', props) + inp = props['input'] + output = props['output'] + + self.assertIsInstance(inp, tuple) + self.assertIsInstance(output, str) + for i in inp: + self.assertIn(i, (SPARSE, DENSE)) + self.assertEqual(output, PREDICTIONS) + self.assertIn('handles_regression', props) + self.assertTrue(props['handles_regression']) + self.assertIn('handles_classification', props) + self.assertIn('handles_multiclass', props) + self.assertIn('handles_multilabel', props) + self.assertFalse(props['handles_classification']) + self.assertFalse(props['handles_multiclass']) + self.assertFalse(props['handles_multilabel']) + def test_find_regressors(self): regressors = regression_components._regressors self.assertGreaterEqual(len(regressors), 1) @@ -56,7 +77,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(38, len(hyperparameters)) + self.assertEqual(36, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): @@ -73,7 +94,7 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnRegressor.get_hyperparameter_search_space( include_preprocessors=['pca']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ["None", 'pca'])) + CategoricalHyperparameter('preprocessor', ['pca', ])) cs = ParamSklearnRegressor.get_hyperparameter_search_space( exclude_preprocessors=['pca']) From 6ae96783f5d2bc9d1ce109d49dd0aa1557a2a87a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Mar 2015 14:57:56 +0100 Subject: [PATCH 177/352] Remove densifier hyperparameter --- ParamSklearn/components/preprocessing/densifier.py | 8 ++------ tests/test_regression.py | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py index 3706b80701..069e8d6b5b 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -1,7 +1,7 @@ from scipy import sparse from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.hyperparameters import Constant from ParamSklearn.components.preprocessor_base import \ ParamSklearnPreprocessingAlgorithm @@ -9,8 +9,7 @@ class Densifier(ParamSklearnPreprocessingAlgorithm): - def __init__(self, densify, random_state): - self.densify = bool(densify) + def __init__(self, random_state): self.random_state = random_state def fit(self, X, Y): @@ -42,9 +41,6 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() - densify = CategoricalHyperparameter("densify", ["False", "True"], - default="False") - cs.add_hyperparameter(densify) return cs def __str__(self): diff --git a/tests/test_regression.py b/tests/test_regression.py index c0dae9ab28..cc9bb7017f 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(38, len(hyperparameters)) + self.assertEqual(37, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 70337cdda57920e6bd638fc1b96dffe1d01ba397 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Mar 2015 14:58:25 +0100 Subject: [PATCH 178/352] FIX: raise ValueError on illegal default configurations --- ParamSklearn/classification.py | 14 +++++++++----- tests/test_classification.py | 19 +++++++++++++++++++ 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6ee2541f69..6716836131 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -326,8 +326,12 @@ def get_hyperparameter_search_space(cls, include_estimators=None, configuration_space.get_hyperparameter( 'preprocessor'), 'densifier') )) - except: - pass + except ValueError as e: + if e.message.startswith("Forbidden clause must be " + "instantiated with a legal " + "hyperparameter value for " + "'preprocessor"): + pass # which would take too long # Combinations of tree-based models with feature learning: @@ -346,7 +350,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "classifier"), c), ForbiddenEqualsClause(configuration_space.get_hyperparameter( "preprocessor"), f))) - except: + except KeyError: pass # Won't work @@ -365,7 +369,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "rescaling:strategy"), "standard"), ForbiddenEqualsClause(configuration_space.get_hyperparameter( "classifier"), c))) - except: + except KeyError: pass for c, f in product(classifiers_, preproc_with_negative_X): @@ -379,7 +383,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "preprocessor"), f), ForbiddenEqualsClause(configuration_space.get_hyperparameter( "classifier"), c))) - except: + except KeyError: pass return configuration_space diff --git a/tests/test_classification.py b/tests/test_classification.py index 8e521af94e..3be64964a7 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -129,6 +129,25 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): exclude_preprocessors=['pca']) self.assertNotIn('pca', str(cs)) + def test_get_hyperparameter_search_space_only_forbidden_combinations(self): + self.assertRaisesRegexp(ValueError, "Configuration:\n" + " bagged_multinomial_nb:alpha, Value: 1.000000\n" + " bagged_multinomial_nb:fit_prior, Value: True\n" + " bagged_multinomial_nb:max_features, Constant: 1.0\n" + " bagged_multinomial_nb:max_samples, Constant: 1.0\n" + " bagged_multinomial_nb:n_estimators, Constant: 100\n" + " classifier, Value: bagged_multinomial_nb\n" + " imputation:strategy, Value: mean\n" + " preprocessor, Value: truncatedSVD\n" + " rescaling:strategy, Value: min/max\n" + " truncatedSVD:target_dim, Value: 128\n" + "violates forbidden clause \(Forbidden: preprocessor == " + "truncatedSVD && Forbidden: classifier == bagged_multinomial_nb\)", + ParamSklearnClassifier.get_hyperparameter_search_space, + include_estimators=['bagged_multinomial_nb'], + include_preprocessors=['truncatedSVD'], + dataset_properties={'sparse':True}) + def test_get_hyperparameter_search_space_dataset_properties(self): cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) From ac499268b37abd701c4248e404d6438e41bf0102 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 13 Mar 2015 15:39:52 +0100 Subject: [PATCH 179/352] FIX: raise Exception when trying to use a classifier which can handle sparse data after the densifier --- ParamSklearn/classification.py | 2 ++ tests/test_classification.py | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6716836131..77ee0e384d 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -332,6 +332,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "hyperparameter value for " "'preprocessor"): pass + else: + raise e # which would take too long # Combinations of tree-based models with feature learning: diff --git a/tests/test_classification.py b/tests/test_classification.py index 3be64964a7..fec0a857e0 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -148,6 +148,29 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): include_preprocessors=['truncatedSVD'], dataset_properties={'sparse':True}) + # It must also be catched that no classifiers which can handle sparse + # data are located behind the densifier + self.assertRaisesRegexp(ValueError, "Configuration:\n" + " classifier, Value: liblinear\n" + " imputation:strategy, Value: mean\n" + " liblinear:C, Value: 1.000000\n" + " liblinear:class_weight, Value: None\n" + " liblinear:dual, Constant: False\n" + " liblinear:fit_intercept, Constant: True\n" + " liblinear:intercept_scaling, Constant: 1\n" + " liblinear:loss, Value: l2\n" + " liblinear:multi_class, Constant: ovr\n" + " liblinear:penalty, Value: l2\n" + " liblinear:tol, Value: 0.000100\n" + " preprocessor, Value: densifier\n" + " rescaling:strategy, Value: min/max\n" + "violates forbidden clause \(Forbidden: classifier == liblinear &&" + " Forbidden: preprocessor == densifier\)", + ParamSklearnClassifier.get_hyperparameter_search_space, + include_estimators=['liblinear'], + include_preprocessors=['densifier'], + dataset_properties={'sparse': True}) + def test_get_hyperparameter_search_space_dataset_properties(self): cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) From 00232db9c179e3a1da8266771c7c710a3d2702f9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 16 Mar 2015 16:36:08 +0100 Subject: [PATCH 180/352] Do not use adaboost with feature learning --- ParamSklearn/classification.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 31da769a99..6dfb19bb1a 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -245,8 +245,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, raise e # which would take too long - # Combinations of tree-based models with feature learning: - classifiers_ = ["extra_trees", "gradient_boosting", + # Combinations of non-linear models with feature learning: + classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest"] feature_learning = ["kitchen_sinks", "sparse_filtering"] From a3dec185de1d1973dcc0587c247326875195a1cf Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Thu, 26 Mar 2015 14:13:08 +0100 Subject: [PATCH 181/352] adding gaussian process classification using GPy --- .../classification/gaussian_process.py | 97 +++++++++++++++++++ .../classification/test_gaussian_process.py | 15 +++ 2 files changed, 112 insertions(+) create mode 100644 ParamSklearn/components/classification/gaussian_process.py create mode 100644 tests/components/classification/test_gaussian_process.py diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py new file mode 100644 index 0000000000..cd0db63d93 --- /dev/null +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -0,0 +1,97 @@ +import numpy as np +import setuptools + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from sklearn.preprocessing import OneHotEncoder + +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS +# get our own forests to replace the sklearn ones +import GPy + + +class GPyClassifier(ParamSklearnClassificationAlgorithm): + def __init__(self, random_state=None, n_inducing=5, ard=False): + self.estimators = None + self.n_inducing = int(n_inducing) + + if ard == "True": + self.ard = True + elif ard == "False": + self.ard = False + else: + selfard = ard + + self.enc = None + + def fit(self, X, Y): + # one hot encode targets for one against all classification + self.enc = OneHotEncoder(sparse=False) + targets = self.enc.fit_transform(Y[:,None]) + + # create a list of GP models, one for each class + self.estimators = [] + for i in range(self.enc.n_values_): + # train model + kern = GPy.kern._src.rbf.RBF(X.shape[1], variance=1.0, lengthscale=1.0, ARD=self.ard) + # dense + model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) + # sparse + #model = GPy.models.SparseGPClassification(X, targets[:,i,None], kernel=kern, num_inducing=self.n_inducing) + # fit kernel hyperparameters + model.optimize('bfgs', max_iters=100) + # add to list of estimators + self.estimators.append(model) + return self + + def predict(self, X): + if self.estimators is None: + raise NotImplementedError + # get probabilities for each class + probs = np.zeros([len(X), len(self.estimators)]) + for i, model in enumerate(self.estimators): + probs[:,i] = model.predict(X)[0].flatten() + # return the most probable label + return self.enc.active_features_[np.argmax(probs, 1)] + + def predict_proba(self, X): + if self.estimators is None: + raise NotImplementedError() + probs = np.zeros([len(X), len(estimators)]) + for i, model in enumerate(estimators): + probs[:,i] = model.predict(X)[0].flatten() + # normalize to get probabilities + return probs / np.sum(probs,1)[:,None] + + @staticmethod + def get_properties(): + return {'shortname': 'GPy', + 'name': 'Gaussian Process Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + ard = CategoricalHyperparameter("ard", ["True", "False"], default="False") + cs = ConfigurationSpace() + cs.add_hyperparameter(ard) + return cs + diff --git a/tests/components/classification/test_gaussian_process.py b/tests/components/classification/test_gaussian_process.py new file mode 100644 index 0000000000..e79ba914bd --- /dev/null +++ b/tests/components/classification/test_gaussian_process.py @@ -0,0 +1,15 @@ +import unittest + +from ParamSklearn.components.classification.gaussian_process import GPyClassifier +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class GPyClassifierComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(GPyClassifier) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, targets)) + From a435cb5584b10f7375583147f42390ccbfd179ce Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Mon, 30 Mar 2015 15:50:28 +0200 Subject: [PATCH 182/352] Gaussian process classification uses sparse GPs now --- .../components/classification/gaussian_process.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index cd0db63d93..8aa2f32b80 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -39,9 +39,9 @@ def fit(self, X, Y): # train model kern = GPy.kern._src.rbf.RBF(X.shape[1], variance=1.0, lengthscale=1.0, ARD=self.ard) # dense - model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) + # model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) # sparse - #model = GPy.models.SparseGPClassification(X, targets[:,i,None], kernel=kern, num_inducing=self.n_inducing) + model = GPy.models.SparseGPClassification(X, targets[:,i,None], kernel=kern, num_inducing=self.n_inducing) # fit kernel hyperparameters model.optimize('bfgs', max_iters=100) # add to list of estimators @@ -61,8 +61,8 @@ def predict(self, X): def predict_proba(self, X): if self.estimators is None: raise NotImplementedError() - probs = np.zeros([len(X), len(estimators)]) - for i, model in enumerate(estimators): + probs = np.zeros([len(X), len(self.estimators)]) + for i, model in enumerate(self.estimators): probs[:,i] = model.predict(X)[0].flatten() # normalize to get probabilities return probs / np.sum(probs,1)[:,None] From 8a987057ad7cdda8f6744618ad16e42f3667b228 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 10:37:38 +0200 Subject: [PATCH 183/352] Add components to documentation --- source/components.rst | 49 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/source/components.rst b/source/components.rst index 9f873c203f..cd342bca08 100644 --- a/source/components.rst +++ b/source/components.rst @@ -10,9 +10,24 @@ Classification A list of all classification algorithms considered in the ParamSklearn search space. +.. autoclass:: ParamSklearn.components.classification.adaboost.AdaboostClassifier + :members: + +.. autoclass:: ParamSklearn.components.classification.bagged_gaussian_nb.BaggedGaussianNB + :members: + +.. autoclass:: ParamSklearn.components.classification.bagged_multinomial_nb.BaggedMultinomialNB + :members: + +.. autoclass:: ParamSklearn.components.classification.bernoulli_nb.BernoulliNB + :members: + .. autoclass:: ParamSklearn.components.classification.extra_trees.ExtraTreesClassifier :members: +.. autoclass:: ParamSklearn.components.classification.gaussian_nb.GaussianNB + :members: + .. autoclass:: ParamSklearn.components.classification.gradient_boosting.GradientBoostingClassifier :members: @@ -24,6 +39,9 @@ A list of all classification algorithms considered in the ParamSklearn search sp .. autoclass:: ParamSklearn.components.classification.libsvm_svc.LibSVM_SVC :members: + +.. autoclass:: ParamSklearn.components.classification.multinomial_nb.MultinomialNB + :members: .. autoclass:: ParamSklearn.components.classification.random_forest.RandomForest :members: @@ -46,9 +64,40 @@ A list of all regression algorithms considered in the ParamSklearn search space. :members: .. autoclass:: ParamSklearn.components.regression.ridge_regression.RidgeRegression + :members: Preprocessing ============= +.. autoclass:: ParamSklearn.components.preprocessing.densifier.Densifier + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.imputation.Imputation + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.kitchen_sinks.RandomKitchenSinks + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.no_preprocessing.NoPreprocessing + :members: + .. autoclass:: ParamSklearn.components.preprocessing.pca.PCA + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.random_trees_embedding.RandomTreesEmbedding + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.rescaling.Rescaling + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.select_percentile_classification.SelectPercentileClassification + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.select_percentile_regression.SelectPercentileRegression + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.sparse_filtering.SparseFiltering + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.truncatedSVD.TruncatedSVD From cb4708083d2f6bcbfee29f3f0dcb380bc3d1d1c9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 11:01:17 +0200 Subject: [PATCH 184/352] Fix: Circumvent 'Buffer dtype mismatch, expected 'DOUBLE' but got 'float'' in TruncatedSVD due to sklearn 0.15 --- ParamSklearn/components/preprocessing/truncatedSVD.py | 4 ++++ ParamSklearn/implementations/MinMaxScaler.py | 5 ++++- ParamSklearn/implementations/StandardScaler.py | 4 +++- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index cba9544b4e..7e528f4ec2 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -19,6 +19,10 @@ def fit(self, X, Y): target_dim = min(self.target_dim, X.shape[1] - 1) self.preprocessor = sklearn.decomposition.TruncatedSVD( target_dim, algorithm='arpack') + # TODO: remove when migrating to sklearn 0.16 + # Circumvents a bug in sklearn + # https://github.com/scikit-learn/scikit-learn/commit/f08b8c8e52663167819f242f605db39f3b5a6d0c + X = X.astype(np.float64) self.preprocessor.fit(X, Y) return self diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py index b69cf239d3..32d8765c1d 100644 --- a/ParamSklearn/implementations/MinMaxScaler.py +++ b/ParamSklearn/implementations/MinMaxScaler.py @@ -52,7 +52,10 @@ def fit(self, X, y=None): used for later scaling along the features axis. """ X = check_arrays(X, sparse_format="csc", copy=self.copy)[0] - warn_if_not_float(X, estimator=self) + if warn_if_not_float(X, estimator=self): + # Costly conversion, but otherwise the pipeline will break: + # https://github.com/scikit-learn/scikit-learn/issues/1709 + X = X.astype(np.float) feature_range = self.feature_range if feature_range[0] >= feature_range[1]: raise ValueError("Minimum of desired feature range must be smaller" diff --git a/ParamSklearn/implementations/StandardScaler.py b/ParamSklearn/implementations/StandardScaler.py index 2bed2fe1c4..60d0ed19e9 100644 --- a/ParamSklearn/implementations/StandardScaler.py +++ b/ParamSklearn/implementations/StandardScaler.py @@ -105,7 +105,9 @@ def fit(self, X, y=None): """ X = check_arrays(X, copy=self.copy, sparse_format="csc")[0] if warn_if_not_float(X, estimator=self): - X = X.astype(np.float) + # Costly conversion, but otherwise the pipeline will break: + # https://github.com/scikit-learn/scikit-learn/issues/1709 + X = X.astype(np.float32) if sparse.issparse(X): if self.center_sparse: means = [] From f294ccbaf8e240fddc7b19f43040cde8e2ebb377 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 11:25:14 +0200 Subject: [PATCH 185/352] FIX: MinMaxScaler has zero as minimum after fitting on training data: caused crash in combination with chi2 feature selection --- ParamSklearn/implementations/MinMaxScaler.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py index 32d8765c1d..c15dc1679f 100644 --- a/ParamSklearn/implementations/MinMaxScaler.py +++ b/ParamSklearn/implementations/MinMaxScaler.py @@ -110,9 +110,15 @@ def transform(self, X): for i in range(X.shape[1]): X.data[X.indptr[i]:X.indptr[i + 1]] *= self.scale_[i] X.data[X.indptr[i]:X.indptr[i + 1]] += self.min_[i] + # Fix numeric instabilities + X.data[X.data < 0] = 0 + X.data[X.data > 1] = 1 else: X *= self.scale_ X += self.min_ + # Fix numeric instabilities + X[X < 0] = 0 + X[X > 1] = 1 return X From ef04d7a38a9be61efefc977c112545c35b4cb42e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 11:26:57 +0200 Subject: [PATCH 186/352] Remove bagged_gaussian_nb and bagged_multinomial_nb --- .../classification/bagged_gaussian_nb.py | 74 ------------- .../classification/bagged_multinomial_nb.py | 100 ------------------ source/components.rst | 6 -- 3 files changed, 180 deletions(-) delete mode 100644 ParamSklearn/components/classification/bagged_gaussian_nb.py delete mode 100644 ParamSklearn/components/classification/bagged_multinomial_nb.py diff --git a/ParamSklearn/components/classification/bagged_gaussian_nb.py b/ParamSklearn/components/classification/bagged_gaussian_nb.py deleted file mode 100644 index 63dbb77408..0000000000 --- a/ParamSklearn/components/classification/bagged_gaussian_nb.py +++ /dev/null @@ -1,74 +0,0 @@ -import numpy as np -import sklearn.ensemble -import sklearn.naive_bayes - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import Constant - -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS - - -class BaggedGaussianNB(ParamSklearnClassificationAlgorithm): - - def __init__(self, n_estimators, max_samples, max_features, - random_state=None, verbose=0): - self.n_estimators = n_estimators - self.max_samples = max_samples - self.max_features = max_features - self.random_state = random_state - self.verbose = int(verbose) - self.estimator = None - - def fit(self, X, Y): - self.estimator = sklearn.ensemble.BaggingClassifier( - base_estimator=sklearn.naive_bayes.GaussianNB(), - n_estimators=self.n_estimators, max_samples=self.max_samples, - max_features=self.max_features) - self.estimator.fit(X, Y) - return self - - def predict(self, X): - if self.estimator is None: - raise NotImplementedError - return self.estimator.predict(X) - - def predict_proba(self, X): - if self.estimator is None: - raise NotImplementedError() - return self.estimator.predict_proba(X) - - @staticmethod - def get_properties(): - return {'shortname': 'BaggedGaussianNB', - 'name': 'Bagging of Gaussian Naive Bayes classifiers', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': False, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), - 'output': PREDICTIONS, - 'preferred_dtype': np.float32} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - # The three parameters of the bagging ensemble are set to - # constants for now (SF) - n_estimators = Constant('n_estimators', 100) - max_samples = Constant('max_samples', 1.0) # caution: has to be float - max_features = Constant('max_features', 1.0) # caution: has to be float - - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(max_samples) - cs.add_hyperparameter(max_features) - - return cs - diff --git a/ParamSklearn/components/classification/bagged_multinomial_nb.py b/ParamSklearn/components/classification/bagged_multinomial_nb.py deleted file mode 100644 index 73c7d6f813..0000000000 --- a/ParamSklearn/components/classification/bagged_multinomial_nb.py +++ /dev/null @@ -1,100 +0,0 @@ -import numpy as np -import sklearn.naive_bayes -import sklearn.ensemble - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, Constant - -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS - - -class BaggedMultinomialNB(ParamSklearnClassificationAlgorithm): - - def __init__(self, alpha, fit_prior, n_estimators, max_samples, - max_features, random_state=None, verbose=0): - self.alpha = alpha - if fit_prior.lower() == "true": - self.fit_prior = True - elif fit_prior.lower() == "false": - self.fit_prior = False - else: - self.fit_prior = fit_prior - - self.n_estimators = n_estimators - self.max_samples = max_samples - self.max_features = max_features - - self.random_state = random_state - self.verbose = int(verbose) - self.estimator = None - - def fit(self, X, Y): - self.estimator = sklearn.ensemble.BaggingClassifier( - base_estimator=sklearn.naive_bayes.MultinomialNB( - alpha=self.alpha, fit_prior=self.fit_prior), - n_estimators=self.n_estimators, max_samples=self.max_samples, - max_features=self.max_features) - self.estimator.fit(X, Y) - return self - - def predict(self, X): - if self.estimator is None: - raise NotImplementedError - return self.estimator.predict(X) - - def predict_proba(self, X): - if self.estimator is None: - raise NotImplementedError() - return self.estimator.predict_proba(X) - - @staticmethod - def get_properties(): - return {'shortname': 'MultinomialNB', - 'name': 'Multinomial Naive Bayes classifier', - 'handles_missing_values': False, - 'handles_nominal_values': False, - # sklearn website says: The multinomial distribution normally - # requires integer feature counts. However, in practice, - # fractional counts such as tf-idf may also work. - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': False, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, - 'preferred_dtype': np.float32} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - # The three parameters of the bagging ensamble are set to constants - # for now (SF) - n_estimators = Constant('n_estimators', 100) - max_samples = Constant('max_samples', 1.0) # caution: has to be float! - max_features = Constant('max_features', 1.0) # caution: has to be float! - - cs = ConfigurationSpace() - - # the smoothing parameter is a non-negative float - # I will limit it to 100 and put it on a logarithmic scale. (SF) - # Please adjust that, if you know a proper range, this is just a guess. - alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, - default=1, log=True) - fit_prior = CategoricalHyperparameter(name="fit_prior", - choices=["True", "False"], - default="True") - - cs.add_hyperparameter(alpha) - cs.add_hyperparameter(fit_prior) - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(max_samples) - cs.add_hyperparameter(max_features) - - return cs - diff --git a/source/components.rst b/source/components.rst index cd342bca08..52b14bc0a0 100644 --- a/source/components.rst +++ b/source/components.rst @@ -13,12 +13,6 @@ A list of all classification algorithms considered in the ParamSklearn search sp .. autoclass:: ParamSklearn.components.classification.adaboost.AdaboostClassifier :members: -.. autoclass:: ParamSklearn.components.classification.bagged_gaussian_nb.BaggedGaussianNB - :members: - -.. autoclass:: ParamSklearn.components.classification.bagged_multinomial_nb.BaggedMultinomialNB - :members: - .. autoclass:: ParamSklearn.components.classification.bernoulli_nb.BernoulliNB :members: From 226e861299d1ed29a2cdca0b1a5e494d608def29 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 13:56:15 +0200 Subject: [PATCH 187/352] Change truncatedSVD solver to randomized --- ParamSklearn/components/preprocessing/truncatedSVD.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index 7e528f4ec2..8c22f1e81c 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -18,7 +18,7 @@ def __init__(self, target_dim, random_state=None): def fit(self, X, Y): target_dim = min(self.target_dim, X.shape[1] - 1) self.preprocessor = sklearn.decomposition.TruncatedSVD( - target_dim, algorithm='arpack') + target_dim, algorithm='randomized') # TODO: remove when migrating to sklearn 0.16 # Circumvents a bug in sklearn # https://github.com/scikit-learn/scikit-learn/commit/f08b8c8e52663167819f242f605db39f3b5a6d0c From 10d49f277da1fa7f13baf5588e0c7c3b9483a2eb Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 14:02:31 +0200 Subject: [PATCH 188/352] Move from iris test dataset to digits test dataset where iris is too small --- ParamSklearn/util.py | 10 +++++----- tests/components/classification/test_adaboost.py | 2 +- tests/components/regression/test_gaussian_process.py | 2 +- tests/components/regression/test_gradient_boosting.py | 2 +- tests/components/regression/test_random_forests.py | 2 +- tests/components/regression/test_ridge_regression.py | 4 ++-- tests/test_classification.py | 6 +++--- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 9 files changed, 16 insertions(+), 16 deletions(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index c61011f18e..c9ac027ab3 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -54,14 +54,14 @@ def get_dataset(dataset='iris', make_sparse=False): Y = iris.target rs = np.random.RandomState(42) indices = np.arange(X.shape[0]) - train_size = len(indices) / 3. * 2. + train_size = min(int(len(indices) / 3. * 2.), 150) rs.shuffle(indices) X = X[indices] Y = Y[indices] - X_train = X[:100] - Y_train = Y[:100] - X_test = X[100:] - Y_test = Y[100:] + X_train = X[:train_size] + Y_train = Y[:train_size] + X_test = X[train_size:] + Y_test = Y[train_size:] if make_sparse: X_train[:,0] = 0 diff --git a/tests/components/classification/test_adaboost.py b/tests/components/classification/test_adaboost.py index 0497681945..84d797f3ad 100644 --- a/tests/components/classification/test_adaboost.py +++ b/tests/components/classification/test_adaboost.py @@ -20,5 +20,5 @@ def test_default_configuration_digits(self): predictions, targets = \ _test_classifier(classifier=AdaboostClassifier, dataset='digits') - self.assertAlmostEqual(0.48791985857395404, + self.assertAlmostEqual(0.56527018822100794, sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index f08a549c20..0a814a7717 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -11,6 +11,6 @@ def test_default_configuration(self): for i in range(10): predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') - self.assertAlmostEqual(0.28876614862410088, + self.assertAlmostEqual(0.23323928076000433, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/components/regression/test_gradient_boosting.py b/tests/components/regression/test_gradient_boosting.py index 17dc88fddf..2b4f67d124 100644 --- a/tests/components/regression/test_gradient_boosting.py +++ b/tests/components/regression/test_gradient_boosting.py @@ -12,5 +12,5 @@ def test_default_configuration(self): predictions, targets = _test_regressor(GradientBoosting, dataset='diabetes') - self.assertAlmostEqual(0.39056015252360077, + self.assertAlmostEqual(0.38851325425603489, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index 36bee53643..d49d7970f3 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -12,5 +12,5 @@ def test_default_configuration(self): predictions, targets = _test_regressor(RandomForest, dataset='diabetes') - self.assertAlmostEqual(0.41960285574345746, + self.assertAlmostEqual(0.41224692924630502, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index 061696f0cd..0517dc8aab 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -18,7 +18,7 @@ def test_default_configuration(self): # This should be a bad results predictions, targets = _test_regressor(RidgeRegression, dataset='diabetes') - self.assertAlmostEqual(-3.726787582018825, + self.assertAlmostEqual(-3.5118220034267313, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) # This should be much more better @@ -38,5 +38,5 @@ def test_default_configuration(self): predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) - self.assertAlmostEqual(0.24658871483206091, + self.assertAlmostEqual(0.30195375410805392, sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file diff --git a/tests/test_classification.py b/tests/test_classification.py index fec0a857e0..80e52a9a47 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -62,7 +62,7 @@ def test_default_configuration(self): auto = ParamSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) - self.assertAlmostEqual(0.95999999999999996, + self.assertAlmostEqual(0.62, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) @@ -71,7 +71,7 @@ def test_configurations(self): sampler = RandomSampler(cs, 1) for i in range(10): config = sampler.sample_configuration() - X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) try: cls.fit(X_train, Y_train) @@ -88,7 +88,7 @@ def test_configurations_sparse(self): sampler = RandomSampler(cs, 1) for i in range(10): config = sampler.sample_configuration() - X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) cls = ParamSklearnClassifier(config, random_state=1) try: diff --git a/tests/test_regression.py b/tests/test_regression.py index 9577a9c6d6..785b80ff94 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -68,7 +68,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.41855369945075482, r2_score) + self.assertAlmostEqual(0.36938041779824193, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 067712de55..1786592ccd 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(89, len(hyperparameters)) + self.assertEqual(81, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From d3f4c3851f24c59e7cd7656cd48f8578eb91070f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 14:02:51 +0200 Subject: [PATCH 189/352] Do not use GP until it is stable --- ParamSklearn/components/classification/gaussian_process.py | 2 +- tests/components/classification/test_gaussian_process.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 8aa2f32b80..26f833adaf 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -14,7 +14,7 @@ import GPy -class GPyClassifier(ParamSklearnClassificationAlgorithm): +class GPyClassifier():#ParamSklearnClassificationAlgorithm): def __init__(self, random_state=None, n_inducing=5, ard=False): self.estimators = None self.n_inducing = int(n_inducing) diff --git a/tests/components/classification/test_gaussian_process.py b/tests/components/classification/test_gaussian_process.py index e79ba914bd..46fe0887aa 100644 --- a/tests/components/classification/test_gaussian_process.py +++ b/tests/components/classification/test_gaussian_process.py @@ -10,6 +10,6 @@ class GPyClassifierComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(GPyClassifier) - self.assertAlmostEqual(0.97999999999999998, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) From fa20159d141a39379d2b461cd9c97802409fb11c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 14:03:21 +0200 Subject: [PATCH 190/352] Move numeric stability helpers from implementation to component --- ParamSklearn/components/preprocessing/rescaling.py | 9 +++++++++ ParamSklearn/implementations/MinMaxScaler.py | 6 ------ 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 586fbc0759..61fc069ba3 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -1,3 +1,5 @@ +from scipy import sparse + from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -15,6 +17,13 @@ def __init__(self, strategy, random_state=None): def fit(self, X, Y): if self.strategy == "min/max": self.preprocessor = MinMaxScaler(copy=False) + # Fix numeric instabilities + if sparse.issparse(X): + X.data[X.data < 0] = 0 + X.data[X.data > 1] = 1 + else: + X[X < 0] = 0 + X[X > 1] = 1 elif self.strategy == "standard": self.preprocessor = StandardScaler(copy=False) else: diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py index c15dc1679f..32d8765c1d 100644 --- a/ParamSklearn/implementations/MinMaxScaler.py +++ b/ParamSklearn/implementations/MinMaxScaler.py @@ -110,15 +110,9 @@ def transform(self, X): for i in range(X.shape[1]): X.data[X.indptr[i]:X.indptr[i + 1]] *= self.scale_[i] X.data[X.indptr[i]:X.indptr[i + 1]] += self.min_[i] - # Fix numeric instabilities - X.data[X.data < 0] = 0 - X.data[X.data > 1] = 1 else: X *= self.scale_ X += self.min_ - # Fix numeric instabilities - X[X < 0] = 0 - X[X > 1] = 1 return X From 348ab395a1fb023cffbb2577f654ce5b997c954c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 14:03:45 +0200 Subject: [PATCH 191/352] Do not use PCA together with tree models --- ParamSklearn/classification.py | 29 +++++++++++++++++++++++++++-- ParamSklearn/regression.py | 26 ++++++++++++++++++++++++++ source/first_steps.rst | 2 +- tests/test_classification.py | 26 ++++++++++++-------------- 4 files changed, 66 insertions(+), 17 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6dfb19bb1a..530773202c 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -264,11 +264,36 @@ def get_hyperparameter_search_space(cls, include_estimators=None, except KeyError: pass + # We have seen empirically that tree-based models together with PCA + # don't work better than tree-based models without preprocessing + classifiers_ = ["random_forest", "extra_trees", "gradient_boosting"] + for c in classifiers_: + if c not in classifiers_list: + continue + try: + configuration_space.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + "preprocessor"), "pca"), + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + "classifier"), c))) + except KeyError: + pass + except ValueError as e: + if e.message.startswith("Forbidden clause must be " + "instantiated with a legal " + "hyperparameter value for " + "'preprocessor"): + pass + else: + raise e + # Won't work # Multinomial NB does not work with negative values, don't use # it with standardization, features learning, pca - classifiers_ = ["multinomial_nb", "bagged_multinomial_nb", - "bernoulli_nb"] + classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", "pca", "truncatedSVD"] for c in classifiers_: diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 60bf4b7986..6e8e765fa4 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -285,6 +285,32 @@ def get_hyperparameter_search_space(cls, include_estimators=None, except KeyError: pass + # We have seen empirically that tree-based models together with PCA + # don't work better than tree-based models without preprocessing + regressors_ = ["random_forest", "gradient_boosting"] + for r in regressors_: + if r not in regressors_list: + continue + try: + configuration_space.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + "preprocessor"), "pca"), + ForbiddenEqualsClause( + configuration_space.get_hyperparameter( + "classifier"), r))) + except KeyError: + pass + except ValueError as e: + if e.message.startswith("Forbidden clause must be " + "instantiated with a legal " + "hyperparameter value for " + "'preprocessor"): + pass + else: + raise e + return configuration_space @staticmethod diff --git a/source/first_steps.rst b/source/first_steps.rst index 9bb492f11c..654e1d8ce2 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 + 0.90000000000000002 diff --git a/tests/test_classification.py b/tests/test_classification.py index 80e52a9a47..c538b05e04 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -105,7 +105,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(90, len(hyperparameters)) + self.assertEqual(82, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) @@ -121,30 +121,28 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): self.assertNotIn('libsvm_svc', str(cs)) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_preprocessors=['pca']) + include_preprocessors=['select_percentile_classification']) self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ['pca'])) + CategoricalHyperparameter('preprocessor', + ['select_percentile_classification'])) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - exclude_preprocessors=['pca']) - self.assertNotIn('pca', str(cs)) + exclude_preprocessors=['select_percentile_classification']) + self.assertNotIn('select_percentile_classification', str(cs)) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " bagged_multinomial_nb:alpha, Value: 1.000000\n" - " bagged_multinomial_nb:fit_prior, Value: True\n" - " bagged_multinomial_nb:max_features, Constant: 1.0\n" - " bagged_multinomial_nb:max_samples, Constant: 1.0\n" - " bagged_multinomial_nb:n_estimators, Constant: 100\n" - " classifier, Value: bagged_multinomial_nb\n" + self.assertRaisesRegexp(ValueError, "Default Configuration:\n" + " classifier, Value: multinomial_nb\n" " imputation:strategy, Value: mean\n" + " multinomial_nb:alpha, Value: 1.000000\n" + " multinomial_nb:fit_prior, Value: True\n" " preprocessor, Value: truncatedSVD\n" " rescaling:strategy, Value: min/max\n" " truncatedSVD:target_dim, Value: 128\n" "violates forbidden clause \(Forbidden: preprocessor == " - "truncatedSVD && Forbidden: classifier == bagged_multinomial_nb\)", + "truncatedSVD && Forbidden: classifier == multinomial_nb\)", ParamSklearnClassifier.get_hyperparameter_search_space, - include_estimators=['bagged_multinomial_nb'], + include_estimators=['multinomial_nb'], include_preprocessors=['truncatedSVD'], dataset_properties={'sparse':True}) From 2b7f75c27e863029b21e12cd38c6dcf961707715 Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Tue, 31 Mar 2015 16:45:41 +0200 Subject: [PATCH 192/352] explicitly turned off normalization in GP --- .../components/classification/gaussian_process.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 8aa2f32b80..004223bf1d 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -10,7 +10,7 @@ from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS -# get our own forests to replace the sklearn ones + import GPy @@ -41,7 +41,12 @@ def fit(self, X, Y): # dense # model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) # sparse - model = GPy.models.SparseGPClassification(X, targets[:,i,None], kernel=kern, num_inducing=self.n_inducing) + model = GPy.models.SparseGPClassification(X, + targets[:,i,None], + kernel=kern, + num_inducing=self.n_inducing, + normalize_X=False, + normalize_Y=False) # fit kernel hyperparameters model.optimize('bfgs', max_iters=100) # add to list of estimators From 0841ccee2d3b8704ae055b80f176e2f7c68dbd83 Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Tue, 31 Mar 2015 16:47:20 +0200 Subject: [PATCH 193/352] GP is using noise term in kernel function --- ParamSklearn/components/classification/gaussian_process.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 8aa2f32b80..4dfc71ec17 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -15,7 +15,7 @@ class GPyClassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, random_state=None, n_inducing=5, ard=False): + def __init__(self, random_state=None, n_inducing=20, ard=False): self.estimators = None self.n_inducing = int(n_inducing) @@ -37,7 +37,9 @@ def fit(self, X, Y): self.estimators = [] for i in range(self.enc.n_values_): # train model - kern = GPy.kern._src.rbf.RBF(X.shape[1], variance=1.0, lengthscale=1.0, ARD=self.ard) + white = GPy.kern._src.static.White(X.shape[1], variance=1.0, active_dims=None, name='white') + rbf = GPy.kern._src.rbf.RBF(X.shape[1], variance=1.0, lengthscale=1.0, ARD=self.ard) + kern = rbf + white # dense # model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) # sparse From d9297f990b916afc73b204593a360e5060d9fab0 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 31 Mar 2015 18:29:39 +0200 Subject: [PATCH 194/352] predict: add batch_size argument --- ParamSklearn/base.py | 33 ++++++++- ParamSklearn/classification.py | 62 +++++++++++++++-- .../components/classification_base.py | 2 +- ParamSklearn/regression.py | 7 ++ setup.py | 1 + tests/test_classification.py | 69 +++++++++++++++++++ tests/test_regression.py | 18 +++++ 7 files changed, 182 insertions(+), 10 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 729e42cb97..8cc5a20aab 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -2,6 +2,7 @@ from collections import defaultdict import copy +import numpy as np import sklearn if sklearn.__version__ != "0.15.2": raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " @@ -208,20 +209,46 @@ def add_model_class(self, model): """ raise NotImplementedError() - def predict(self, X): + def predict(self, X, batch_size=None): """Predict the classes using the selected model. Parameters ---------- X : array-like, shape = (n_samples, n_features) + batch_size: int or None, defaults to None + batch_size controls whether the ParamSklearn pipeline will be + called on small chunks of the data. Useful when calling the + predict method on the whole array X results in a MemoryError. + Returns ------- array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) Returns the predicted values""" # TODO check if fit() was called before... - self._validate_input_X(X) - return self._pipeline.predict(X) + + if batch_size is None: + self._validate_input_X(X) + return self._pipeline.predict(X) + else: + if type(batch_size) is not int or batch_size <= 0: + raise Exception("batch_size must be a positive integer") + + else: + if self.num_targets == 1: + y = np.zeros((X.shape[0],)) + else: + y = np.zeros((X.shape[0], self.num_targets)) + + # Copied and adapted from the scikit-learn GP code + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + y[batch_from:batch_to] = \ + self.predict(X[batch_from:batch_to], batch_size=None) + + return y @classmethod def get_hyperparameter_search_space(cls, estimator_name, diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 530773202c..e75f630e1e 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -4,6 +4,7 @@ import numpy as np from sklearn.base import ClassifierMixin +from sklearn.preprocessing import LabelEncoder from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction @@ -60,23 +61,72 @@ class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): """ - def predict_proba(self, X): + def fit(self, X, Y, fit_params=None, init_params=None): + super(ParamSklearnClassifier, self).fit(X, Y, fit_params=fit_params, + init_params=init_params) + self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + return self + + def predict_proba(self, X, batch_size=None): """predict_proba. Parameters ---------- X : array-like, shape = (n_samples, n_features) + batch_size: int or None, defaults to None + batch_size controls whether the ParamSklearn pipeline will be + called on small chunks of the data. Useful when calling the + predict method on the whole array X results in a MemoryError. + Returns ------- array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) """ - self._validate_input_X(X) - Xt = X - for name, transform in self._pipeline.steps[:-1]: - Xt = transform.transform(Xt) + if batch_size is None: + self._validate_input_X(X) + Xt = X + for name, transform in self._pipeline.steps[:-1]: + Xt = transform.transform(Xt) - return self._pipeline.steps[-1][-1].predict_proba(Xt) + return self._pipeline.steps[-1][-1].predict_proba(Xt) + + else: + if type(batch_size) is not int or batch_size <= 0: + raise Exception("batch_size must be a positive integer") + + else: + # Probe for the target array dimensions + target = self.predict_proba(X[0].copy()) + + # Binary or Multiclass + if len(target) == 1: + y = np.zeros((X.shape[0], target.shape[1])) + + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + y[batch_from:batch_to] = \ + self.predict_proba(X[batch_from:batch_to], + batch_size=None) + + elif len(target) > 1: + y = [np.zeros((X.shape[0], target[i].shape[1])) + for i in range(len(target))] + + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + predictions = \ + self.predict_proba(X[batch_from:batch_to], + batch_size=None) + + for i in range(len(target)): + y[i][batch_from:batch_to] = predictions[i] + + return y @classmethod def get_available_components(cls, available_comp, data_prop, inc, exc): diff --git a/ParamSklearn/components/classification_base.py b/ParamSklearn/components/classification_base.py index d9ab97bf8c..63a0179006 100644 --- a/ParamSklearn/components/classification_base.py +++ b/ParamSklearn/components/classification_base.py @@ -119,4 +119,4 @@ def get_estimator(self): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "ParamSklearn %s" % name diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 6e8e765fa4..875d0ad584 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -62,6 +62,13 @@ class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): """ + def fit(self, X, Y, fit_params=None, init_params=None): + super(ParamSklearnRegressor, self).fit(X, Y, fit_params=fit_params, + init_params=init_params) + self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + return self + + def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to # handle them diff --git a/setup.py b/setup.py index eb4074d6e7..326c611233 100644 --- a/setup.py +++ b/setup.py @@ -10,6 +10,7 @@ "scikit-learn==0.15.2", "nose", "HPOlibConfigSpace"], + test_requires=["mock"], test_suite="nose.collector", package_data={'': ['*.txt', '*.md']}, author="Matthias Feurer", diff --git a/tests/test_classification.py b/tests/test_classification.py index c538b05e04..607a1dd917 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -2,10 +2,13 @@ import unittest +import mock +import numpy as np import sklearn.datasets import sklearn.decomposition import sklearn.ensemble import sklearn.svm +from sklearn.utils.testing import assert_array_almost_equal from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -200,6 +203,72 @@ def test_get_hyperparameter_search_space_dataset_properties(self): # 'multiclass': True, # 'sparse': True}) + def test_predict_batched(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space() + default = cs.get_default_configuration() + cls = ParamSklearnClassifier(default) + + # Multiclass + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + #cls_predict.predict.return_value = lambda X: np.ones((X.shape[0],)) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647,), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647, 2), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + def test_predict_proba_batched(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space() + default = cs.get_default_configuration() + + # Multiclass + cls = ParamSklearnClassifier(default) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + # The object behind the last step in the pipeline + cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) + cls._pipeline.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertEqual((1647, 10), prediction.shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + cls = ParamSklearnClassifier(default) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) + cls._pipeline.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertIsInstance(prediction, list) + self.assertEqual(2, len(prediction)) + self.assertEqual((1647, 10), prediction[0].shape) + self.assertEqual((1647, 10), prediction[1].shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() diff --git a/tests/test_regression.py b/tests/test_regression.py index 785b80ff94..a8fc8e93e9 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -3,10 +3,12 @@ import copy import unittest +import mock import sklearn.datasets import sklearn.decomposition import sklearn.ensemble import sklearn.svm +from sklearn.utils.testing import assert_array_almost_equal from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -169,6 +171,22 @@ def test_get_hyperparameter_search_space_dataset_properties(self): multiclass=True, multilabel=True, sparse=True) """ + def test_predict_batched(self): + cs = ParamSklearnRegressor.get_hyperparameter_search_space() + default = cs.get_default_configuration() + cls = ParamSklearnRegressor(default) + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((356,), prediction.shape) + self.assertEqual(18, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() From aff7c3fa1b02fdca406a89f46f35d842ea09ac56 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 1 Apr 2015 09:14:59 +0200 Subject: [PATCH 195/352] Import GPy only if necessary (because it takes ~4s) --- ParamSklearn/components/classification/gaussian_process.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 26f833adaf..007f09da36 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -1,5 +1,4 @@ import numpy as np -import setuptools from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -10,12 +9,14 @@ from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS -# get our own forests to replace the sklearn ones -import GPy + class GPyClassifier():#ParamSklearnClassificationAlgorithm): def __init__(self, random_state=None, n_inducing=5, ard=False): + import GPy + global GPy + self.estimators = None self.n_inducing = int(n_inducing) From 11b4f16ddb4eaaa90ae18b70530d7188297fadbc Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 1 Apr 2015 14:07:31 +0200 Subject: [PATCH 196/352] FIX: batch_size in predict doesn't cause crash on sparse matrix any more --- ParamSklearn/base.py | 2 +- ParamSklearn/classification.py | 5 +- tests/test_classification.py | 104 ++++++++++++++++++++++++++++++++- tests/test_regression.py | 18 ++++++ 4 files changed, 123 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 8cc5a20aab..aa5c10c46d 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -244,7 +244,7 @@ def predict(self, X, batch_size=None): for k in range(max(1, int(np.ceil(float(X.shape[0]) / batch_size)))): batch_from = k * batch_size - batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + batch_to = min([(k + 1) * batch_size, X.shape[0]]) y[batch_from:batch_to] = \ self.predict(X[batch_from:batch_to], batch_size=None) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index e75f630e1e..b6a3b00874 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -4,7 +4,6 @@ import numpy as np from sklearn.base import ClassifierMixin -from sklearn.preprocessing import LabelEncoder from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction @@ -106,7 +105,7 @@ def predict_proba(self, X, batch_size=None): for k in range(max(1, int(np.ceil(float(X.shape[0]) / batch_size)))): batch_from = k * batch_size - batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + batch_to = min([(k + 1) * batch_size, X.shape[0]]) y[batch_from:batch_to] = \ self.predict_proba(X[batch_from:batch_to], batch_size=None) @@ -118,7 +117,7 @@ def predict_proba(self, X, batch_size=None): for k in range(max(1, int(np.ceil(float(X.shape[0]) / batch_size)))): batch_from = k * batch_size - batch_to = min([(k + 1) * batch_size, X.shape[0] + 1]) + batch_to = min([(k + 1) * batch_size, X.shape[0]]) predictions = \ self.predict_proba(X[batch_from:batch_to], batch_size=None) diff --git a/tests/test_classification.py b/tests/test_classification.py index 607a1dd917..90210272bf 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -10,7 +10,8 @@ import sklearn.svm from sklearn.utils.testing import assert_array_almost_equal -from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.random_sampler import RandomSampler @@ -214,7 +215,6 @@ def test_predict_batched(self): X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) cls_predict = mock.Mock(wraps=cls._pipeline) - #cls_predict.predict.return_value = lambda X: np.ones((X.shape[0],)) cls._pipeline = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((1647,), prediction.shape) @@ -234,6 +234,53 @@ def test_predict_batched(self): self.assertEqual(83, cls_predict.predict.call_count) assert_array_almost_equal(prediction_, prediction) + def test_predict_batched_sparse(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + # Densifier + RF is the only combination that easily tests sparse + # data with multilabel classification! + config = Configuration(cs, + hyperparameters={"classifier": "random_forest", + "imputation:strategy": "mean", + "preprocessor": "densifier", + 'random_forest:bootstrap': 'True', + 'random_forest:criterion': 'gini', + 'random_forest:max_depth': 'None', + 'random_forest:min_samples_split': 2, + 'random_forest:min_samples_leaf': 2, + 'random_forest:max_features': 0.5, + 'random_forest:max_leaf_nodes': 'None', + 'random_forest:n_estimators': 100, + "rescaling:strategy": "min/max"}) + cls = ParamSklearnClassifier(config) + + # Multiclass + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647,), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647, 2), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + def test_predict_proba_batched(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() @@ -269,6 +316,59 @@ def test_predict_proba_batched(self): self.assertEqual(84, cls_predict.predict_proba.call_count) assert_array_almost_equal(prediction_, prediction) + def test_predict_proba_batched_sparse(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + + # Densifier + RF is the only combination that easily tests sparse + # data with multilabel classification! + config = Configuration(cs, + hyperparameters={"classifier": "random_forest", + "imputation:strategy": "mean", + "preprocessor": "densifier", + 'random_forest:bootstrap': 'True', + 'random_forest:criterion': 'gini', + 'random_forest:max_depth': 'None', + 'random_forest:min_samples_split': 2, + 'random_forest:min_samples_leaf': 2, + 'random_forest:max_features': 0.5, + 'random_forest:max_leaf_nodes': 'None', + 'random_forest:n_estimators': 100, + "rescaling:strategy": "min/max"}) + + # Multiclass + cls = ParamSklearnClassifier(config) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + # The object behind the last step in the pipeline + cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) + cls._pipeline.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertEqual((1647, 10), prediction.shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + cls = ParamSklearnClassifier(config) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) + cls._pipeline.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertIsInstance(prediction, list) + self.assertEqual(2, len(prediction)) + self.assertEqual((1647, 10), prediction[0].shape) + self.assertEqual((1647, 10), prediction[1].shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() diff --git a/tests/test_regression.py b/tests/test_regression.py index a8fc8e93e9..b820513947 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -187,6 +187,24 @@ def test_predict_batched(self): self.assertEqual(18, cls_predict.predict.call_count) assert_array_almost_equal(prediction_, prediction) + def test_predict_batched_sparse(self): + cs = ParamSklearnRegressor.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + default = cs.get_default_configuration() + cls = ParamSklearnRegressor(default) + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls._pipeline) + cls._pipeline = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((356,), prediction.shape) + self.assertEqual(18, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + @unittest.skip("test_check_random_state Not yet Implemented") def test_check_random_state(self): raise NotImplementedError() From 95c3938dbdc5e08b74af8ae4ba6ab0db2054b2d7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 1 Apr 2015 14:19:11 +0200 Subject: [PATCH 197/352] Raise ValueError if PCA returns NaN component values. --- ParamSklearn/components/preprocessing/pca.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 5e21acddbb..8411cdcfca 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -33,6 +34,10 @@ def fit(self, X, Y): components = self.preprocessor.components_ self.preprocessor.components_ = components[:idx] + + if not np.isfinite(self.preprocessor.components_).all(): + raise ValueError("PCA found non-finite components.") + return self def transform(self, X): From 696ab9f0b9d43168429e51f9214fd5f68f338ea7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 1 Apr 2015 14:41:31 +0200 Subject: [PATCH 198/352] Raise ValueError if Select Percentile mistakenly removes all features --- ParamSklearn/components/preprocessing/select_percentile.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/select_percentile.py b/ParamSklearn/components/preprocessing/select_percentile.py index 8ee52702b5..e89f1e2687 100644 --- a/ParamSklearn/components/preprocessing/select_percentile.py +++ b/ParamSklearn/components/preprocessing/select_percentile.py @@ -13,4 +13,7 @@ def fit(self, X, Y): def transform(self, X): if self.preprocessor is None: raise NotImplementedError() - return self.preprocessor.transform(X) + Xt = self.preprocessor.transform(X) + if Xt.shape[1] == 0: + raise ValueError("%s removed all features." % self.__class__.__name__) + return Xt From 4bcca731c33fd21f77019cd379554fc06dcdb809 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 1 Apr 2015 15:07:50 +0200 Subject: [PATCH 199/352] FIX: bug in Min/Max Scaler; improve numerical stability of min/max scaler --- ParamSklearn/components/preprocessing/rescaling.py | 7 ------- ParamSklearn/components/preprocessing/select_percentile.py | 1 + ParamSklearn/implementations/MinMaxScaler.py | 4 ++-- tests/components/preprocessing/test_scaling.py | 2 +- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 61fc069ba3..269896d077 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -17,13 +17,6 @@ def __init__(self, strategy, random_state=None): def fit(self, X, Y): if self.strategy == "min/max": self.preprocessor = MinMaxScaler(copy=False) - # Fix numeric instabilities - if sparse.issparse(X): - X.data[X.data < 0] = 0 - X.data[X.data > 1] = 1 - else: - X[X < 0] = 0 - X[X > 1] = 1 elif self.strategy == "standard": self.preprocessor = StandardScaler(copy=False) else: diff --git a/ParamSklearn/components/preprocessing/select_percentile.py b/ParamSklearn/components/preprocessing/select_percentile.py index e89f1e2687..0d1b86f11a 100644 --- a/ParamSklearn/components/preprocessing/select_percentile.py +++ b/ParamSklearn/components/preprocessing/select_percentile.py @@ -7,6 +7,7 @@ def fit(self, X, Y): self.preprocessor = sklearn.feature_selection.SelectPercentile( score_func=self.score_func, percentile=self.percentile) + self.preprocessor.fit(X, Y) return self diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py index 32d8765c1d..e491266867 100644 --- a/ParamSklearn/implementations/MinMaxScaler.py +++ b/ParamSklearn/implementations/MinMaxScaler.py @@ -72,8 +72,8 @@ def fit(self, X, y=None): else: data_min.append(X.data[X.indptr[i]:X.indptr[i + 1]].min()) data_max.append(X.data[X.indptr[i]:X.indptr[i + 1]].max()) - data_min = np.array(data_min) - data_max = np.array(data_max) + data_min = np.array(data_min, dtype=np.float32) + data_max = np.array(data_max, dtype=np.float32) data_range = data_max - data_min else: diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index cd05dafde3..dd20d3208f 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -29,5 +29,5 @@ def test_default_configuration_with_sparse_data(self): preprocessing = _test_preprocessing(Rescaling, dataset='boston', make_sparse=True) transformation, original = preprocessing - self.assertAlmostEqual(transformation.max(), 1) + self.assertAlmostEqual(1, transformation.max(), places=6) self.assertTrue(all((original != transformation).data)) diff --git a/tests/test_classification.py b/tests/test_classification.py index 90210272bf..c1a1281772 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -66,7 +66,7 @@ def test_default_configuration(self): auto = ParamSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) - self.assertAlmostEqual(0.62, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) diff --git a/tests/test_regression.py b/tests/test_regression.py index b820513947..1707c55ca6 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -70,7 +70,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.36938041779824193, r2_score) + self.assertAlmostEqual(0.41142912213964022, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) From 11912be8877d6c33b74301fe551759376744eadb Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 13:03:21 +0200 Subject: [PATCH 200/352] Preprocessing: make y a keyword argument where possible --- ParamSklearn/components/preprocessing/densifier.py | 12 +++++++----- ParamSklearn/components/preprocessing/imputation.py | 4 ++-- .../components/preprocessing/kitchen_sinks.py | 4 ++-- .../components/preprocessing/no_preprocessing.py | 2 +- ParamSklearn/components/preprocessing/pca.py | 4 ++-- .../preprocessing/random_trees_embedding.py | 4 ++-- ParamSklearn/components/preprocessing/rescaling.py | 4 ++-- .../components/preprocessing/select_percentile.py | 4 ++-- .../components/preprocessing/sparse_filtering.py | 4 ++-- 9 files changed, 22 insertions(+), 20 deletions(-) diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py index 069e8d6b5b..1a9109559b 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -1,7 +1,6 @@ from scipy import sparse from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import Constant from ParamSklearn.components.preprocessor_base import \ ParamSklearnPreprocessingAlgorithm @@ -9,14 +8,17 @@ class Densifier(ParamSklearnPreprocessingAlgorithm): - def __init__(self, random_state): - self.random_state = random_state + def __init__(self, random_state=None): + pass - def fit(self, X, Y): + def fit(self, X, y=None): return self def transform(self, X): - return X.todense() + if sparse.issparse(X): + return X.todense() + else: + return X @staticmethod def get_properties(): diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index fafd08b2e1..b197631ec0 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -12,10 +12,10 @@ def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made (CSR matrices) self.strategy = strategy - def fit(self, X, Y): + def fit(self, X, y=None): self.preprocessor = sklearn.preprocessing.Imputer( strategy=self.strategy, copy=False) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index ebab4bd265..9bfad4d6f7 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -21,9 +21,9 @@ def __init__(self, gamma, n_components, random_state = None): self.n_components = n_components self.random_state = random_state - def fit(self, X, Y): + def fit(self, X, Y=None): self.preprocessor = sklearn.kernel_approximation.RBFSampler(self.gamma, self.n_components, self.random_state) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/no_preprocessing.py b/ParamSklearn/components/preprocessing/no_preprocessing.py index 31d721d29c..03579743cf 100644 --- a/ParamSklearn/components/preprocessing/no_preprocessing.py +++ b/ParamSklearn/components/preprocessing/no_preprocessing.py @@ -10,7 +10,7 @@ def __init__(self, random_state): """ This preprocessors does not change the data """ self.preprocessor = None - def fit(self, X, Y): + def fit(self, X, Y=None): self.preprocessor = 0 return self diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 8411cdcfca..7455750a65 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -20,10 +20,10 @@ def __init__(self, keep_variance, whiten, random_state=None): self.whiten = whiten self.random_state = random_state - def fit(self, X, Y): + def fit(self, X, Y=None): self.preprocessor = sklearn.decomposition.PCA(whiten=self.whiten, copy=True) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) sum_ = 0. idx = 0 diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py index ef49b05071..6c6a0d9c76 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -28,7 +28,7 @@ def __init__(self, n_estimators, max_depth, min_samples_split, self.n_jobs = n_jobs self.random_state = random_state - def fit(self, X, Y): + def fit(self, X, Y=None): self.preprocessor = sklearn.ensemble.RandomTreesEmbedding( n_estimators=self.n_estimators, max_depth=self.max_depth, @@ -39,7 +39,7 @@ def fit(self, X, Y): n_jobs=self.n_jobs, random_state=self.random_state ) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 269896d077..7d500ce1f8 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -14,14 +14,14 @@ def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made self.strategy = strategy - def fit(self, X, Y): + def fit(self, X, Y=None): if self.strategy == "min/max": self.preprocessor = MinMaxScaler(copy=False) elif self.strategy == "standard": self.preprocessor = StandardScaler(copy=False) else: raise ValueError(self.strategy) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/select_percentile.py b/ParamSklearn/components/preprocessing/select_percentile.py index 0d1b86f11a..cf19ab39c3 100644 --- a/ParamSklearn/components/preprocessing/select_percentile.py +++ b/ParamSklearn/components/preprocessing/select_percentile.py @@ -3,12 +3,12 @@ class SelectPercentileBase(object): - def fit(self, X, Y): + def fit(self, X, y): self.preprocessor = sklearn.feature_selection.SelectPercentile( score_func=self.score_func, percentile=self.percentile) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X, y) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/sparse_filtering.py b/ParamSklearn/components/preprocessing/sparse_filtering.py index 026dcacfd1..6bbec7a954 100644 --- a/ParamSklearn/components/preprocessing/sparse_filtering.py +++ b/ParamSklearn/components/preprocessing/sparse_filtering.py @@ -13,9 +13,9 @@ def __init__(self, N, maxiter=100, random_state=None): self.maxiter = maxiter self.random_state = random_state - def fit(self, X, Y): + def fit(self, X, Y=None): self.preprocessor = SparseFilteringImpl(self.N, self.maxiter, random_state = self.random_state) - self.preprocessor.fit(X, Y) + self.preprocessor.fit(X) return self def transform(self, X): From aa4085a7b7b90d62abdaa13a9b92da0dd78daa09 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 13:05:12 +0200 Subject: [PATCH 201/352] Update: use float32 for test data; test whether preprocessors copy the data --- ParamSklearn/util.py | 67 ++++++++++++++++++- .../preprocessing/test_NoPreprocessing.py | 7 +- .../preprocessing/test_densifier.py | 8 ++- .../preprocessing/test_imputation.py | 10 ++- .../preprocessing/test_kitchen_sinks.py | 63 ++++++++++++++++- tests/components/preprocessing/test_pca.py | 33 ++++++++- .../test_random_trees_embedding.py | 39 +++++++++-- .../components/preprocessing/test_scaling.py | 7 +- .../test_select_percentile_classification.py | 59 +++++++++++++++- .../test_select_percentile_regression.py | 29 +++++++- .../preprocessing/test_sparse_filtering.py | 8 ++- .../regression/test_gaussian_process.py | 7 +- tests/test_regression.py | 2 +- 13 files changed, 313 insertions(+), 26 deletions(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index c9ac027ab3..4983369063 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -2,6 +2,7 @@ import inspect import os import pkgutil +import unittest import numpy as np import scipy.sparse @@ -48,10 +49,10 @@ def find_sklearn_classes(class_): print classifiers -def get_dataset(dataset='iris', make_sparse=False): +def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): iris = getattr(sklearn.datasets, "load_%s" % dataset)() - X = iris.data - Y = iris.target + X = iris.data.astype(np.float32) + Y = iris.target.astype(np.int32) rs = np.random.RandomState(42) indices = np.arange(X.shape[0]) train_size = min(int(len(indices) / 3. * 2.), 150) @@ -63,6 +64,10 @@ def get_dataset(dataset='iris', make_sparse=False): X_test = X[train_size:] Y_test = Y[train_size:] + if add_NaNs: + mask = np.random.choice([True, False], size=(X_train.shape)) + X_train[mask] = np.NaN + if make_sparse: X_train[:,0] = 0 X_train[np.random.random(X_train.shape) > 0.5] = 0 @@ -103,6 +108,62 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): return transformer.transform(X_train), original_X_train +class PreprocessingTestCase(unittest.TestCase): + def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", add_NaNs=add_NaNs) + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", add_NaNs=add_NaNs) + X_train = X_train.astype(np.float64) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + add_NaNs=add_NaNs) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + add_NaNs=add_NaNs) + X_train = X_train.astype(np.float64) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + def _test_regressor(Regressor, dataset='diabetes'): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, make_sparse=False) diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/preprocessing/test_NoPreprocessing.py index 2c0d1edecb..c373382dd4 100644 --- a/tests/components/preprocessing/test_NoPreprocessing.py +++ b/tests/components/preprocessing/test_NoPreprocessing.py @@ -2,10 +2,10 @@ import unittest from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class NoneComponentTest(unittest.TestCase): +class NoneComponentTest(PreprocessingTestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(NoPreprocessing) self.assertEqual(transformation.shape[0], original.shape[0]) @@ -17,4 +17,7 @@ def test_default_configuration(self): self.assertEqual(np.std(original), np.std(transformation)) self.assertEqual(np.mean(original), np.mean(transformation)) + def test_preprocessing_dtype(self): + super(NoneComponentTest, self)._test_preprocessing_dtype(NoPreprocessing) + diff --git a/tests/components/preprocessing/test_densifier.py b/tests/components/preprocessing/test_densifier.py index 380d57ecdc..699f83b5e3 100644 --- a/tests/components/preprocessing/test_densifier.py +++ b/tests/components/preprocessing/test_densifier.py @@ -3,11 +3,15 @@ import numpy as np from ParamSklearn.components.preprocessing.densifier import Densifier -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class DensifierComponentTest(unittest.TestCase): +class DensifierComponentTest(PreprocessingTestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(Densifier, make_sparse=True) self.assertEqual(transformation.shape, original.shape) self.assertIsInstance(transformation, np.ndarray) + + def test_preprocessing_dtype(self): + super(DensifierComponentTest, self)._test_preprocessing_dtype(Densifier) + diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/preprocessing/test_imputation.py index 532535cd1a..7caa6929e9 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/preprocessing/test_imputation.py @@ -3,10 +3,10 @@ from scipy import sparse from ParamSklearn.components.preprocessing.imputation import Imputation -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class ImputationTest(unittest.TestCase): +class ImputationTest(PreprocessingTestCase): def test_default_configuration(self): transformations = [] for i in range(10): @@ -25,4 +25,8 @@ def test_default_configuration_sparse_data(self): self.assertEqual(transformation.shape, original.shape) self.assertTrue((transformation.data == original.data).all()) self.assertIsInstance(transformation, sparse.csc_matrix) - transformations.append(transformation) \ No newline at end of file + transformations.append(transformation) + + def test_preprocessing_dtype(self): + super(ImputationTest, self)._test_preprocessing_dtype(Imputation, + add_NaNs=True) \ No newline at end of file diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/preprocessing/test_kitchen_sinks.py index 5fd5629c75..3e0552984d 100644 --- a/tests/components/preprocessing/test_kitchen_sinks.py +++ b/tests/components/preprocessing/test_kitchen_sinks.py @@ -1,7 +1,9 @@ import unittest +import numpy as np + from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, get_dataset class KitchenSinkComponent(unittest.TestCase): @@ -10,3 +12,62 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], 100) self.assertFalse((transformation == 0).all()) + + @unittest.skip("Right now, the RBFSampler returns a float64 array!") + def _test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomKitchenSinks(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train) + print id(X_train) + Xt = preprocessor.transform(X_train) + print id(Xt) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomKitchenSinks(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomKitchenSinks(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomKitchenSinks(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index 092447eac7..bc7f3f7918 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -1,7 +1,9 @@ import unittest +import numpy as np + from ParamSklearn.components.preprocessing.pca import PCA -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, get_dataset class PCAComponentTest(unittest.TestCase): @@ -14,3 +16,32 @@ def test_default_configuration(self): transformations.append(transformation) if len(transformations) > 1: self.assertTrue((transformations[-1] == transformations[-2]).all()) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = PCA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = PCA(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = PCA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = PCA(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file diff --git a/tests/components/preprocessing/test_random_trees_embedding.py b/tests/components/preprocessing/test_random_trees_embedding.py index c4f31bb4fe..b61325aeb8 100644 --- a/tests/components/preprocessing/test_random_trees_embedding.py +++ b/tests/components/preprocessing/test_random_trees_embedding.py @@ -1,11 +1,11 @@ import unittest -import numpy +import numpy as np import scipy.sparse from ParamSklearn.components.preprocessing.random_trees_embedding import \ RandomTreesEmbedding -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, get_dataset class RandomTreesEmbeddingComponentTest(unittest.TestCase): @@ -13,6 +13,37 @@ def test_default_configuration(self): transformation, original = _test_preprocessing(RandomTreesEmbedding) self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], 213) - self.assertIsInstance(original, numpy.ndarray) + self.assertIsInstance(original, np.ndarray) self.assertTrue(scipy.sparse.issparse(transformation)) - self.assertTrue(all(transformation.data == 1)) \ No newline at end of file + self.assertTrue(all(transformation.data == 1)) + + @unittest.skip("Right now, the RTE returns a float64 array!") + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomTreesEmbedding(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + print Xt + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomTreesEmbedding(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index dd20d3208f..c65a81189b 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -4,10 +4,10 @@ import sklearn.datasets from ParamSklearn.components.preprocessing.rescaling import Rescaling -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class ScalingComponentTest(unittest.TestCase): +class ScalingComponentTest(PreprocessingTestCase): def test_boston_is_not_scaled(self): data = sklearn.datasets.load_boston()['data'] self.assertGreaterEqual(np.max(data), 100) @@ -31,3 +31,6 @@ def test_default_configuration_with_sparse_data(self): transformation, original = preprocessing self.assertAlmostEqual(1, transformation.max(), places=6) self.assertTrue(all((original != transformation).data)) + + def test_preprocessing_dtype(self): + super(ScalingComponentTest, self)._test_preprocessing_dtype(Rescaling) diff --git a/tests/components/preprocessing/test_select_percentile_classification.py b/tests/components/preprocessing/test_select_percentile_classification.py index 7a75093830..29b8d02fe0 100644 --- a/tests/components/preprocessing/test_select_percentile_classification.py +++ b/tests/components/preprocessing/test_select_percentile_classification.py @@ -1,9 +1,10 @@ import unittest +import numpy as np import scipy.sparse from ParamSklearn.components.preprocessing.select_percentile_classification import SelectPercentileClassification -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, get_dataset class SelectPercentileClassificationTest(unittest.TestCase): @@ -17,3 +18,59 @@ def test_default_configuration(self): self.assertTrue(scipy.sparse.issparse(transformation)) self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp.hyperparameter.name: hp.value for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_select_percentile_regression.py b/tests/components/preprocessing/test_select_percentile_regression.py index 1b29349823..337837d2f5 100644 --- a/tests/components/preprocessing/test_select_percentile_regression.py +++ b/tests/components/preprocessing/test_select_percentile_regression.py @@ -1,7 +1,9 @@ import unittest +import numpy as np + from ParamSklearn.components.preprocessing.select_percentile_regression import SelectPercentileRegression -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, get_dataset class SelectPercentileRegressionTest(unittest.TestCase): @@ -10,3 +12,28 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) self.assertFalse((transformation == 0).all()) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileRegression(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileRegression(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_sparse_filtering.py b/tests/components/preprocessing/test_sparse_filtering.py index ae78943ad8..a7ddc18981 100644 --- a/tests/components/preprocessing/test_sparse_filtering.py +++ b/tests/components/preprocessing/test_sparse_filtering.py @@ -1,11 +1,15 @@ import unittest from ParamSklearn.components.preprocessing.sparse_filtering import SparseFiltering -from ParamSklearn.util import _test_preprocessing +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class SparseFilteringComponentTest(unittest.TestCase): +class SparseFilteringComponentTest(PreprocessingTestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(SparseFiltering) self.assertEqual(transformation.shape[0], original.shape[0]) self.assertFalse((transformation == 0).all()) + + @unittest.skip("Right now, the SparseFiltering returns a float64 array!") + def test_preprocessing_dtype(self): + super(SparseFilteringComponentTest, self)._test_preprocessing_dtype(SparseFiltering) \ No newline at end of file diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index 0a814a7717..af8093801a 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -9,8 +9,9 @@ class GaussianProcessComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - + # Float32 leads to numeric instabilities predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') - self.assertAlmostEqual(0.23323928076000433, - sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + self.assertAlmostEqual(0.2331, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions), + places=3) diff --git a/tests/test_regression.py b/tests/test_regression.py index 1707c55ca6..4a240b4e48 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -70,7 +70,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.41142912213964022, r2_score) + self.assertAlmostEqual(0.41211271098191482, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) From 5078f5512fc84c1a6cdec30050f0ef57f0b1f1ed Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 7 Apr 2015 15:00:10 +0200 Subject: [PATCH 202/352] fix bugs --- tests/test_classification.py | 2 +- tests/test_textclassification.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index c1a1281772..af44ba278a 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -109,7 +109,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(82, len(hyperparameters)) + self.assertEqual(90, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 1786592ccd..067712de55 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(81, len(hyperparameters)) + self.assertEqual(89, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From a3f50c7e5c7e0be031a9c7c8744cd56e6554dc0d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 7 Apr 2015 15:00:23 +0200 Subject: [PATCH 203/352] add gpy to deps --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 326c611233..e5ef5b3cf0 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,8 @@ "scipy==0.14.0", "scikit-learn==0.15.2", "nose", - "HPOlibConfigSpace"], + "HPOlibConfigSpace", + "GPy==0.6.0"], test_requires=["mock"], test_suite="nose.collector", package_data={'': ['*.txt', '*.md']}, From 24e026592b99bfdcdbed6fdff61aea388b182d01 Mon Sep 17 00:00:00 2001 From: Manuel Blum Date: Tue, 7 Apr 2015 16:52:33 +0200 Subject: [PATCH 204/352] argument bugfix in GP classify --- ParamSklearn/components/classification/gaussian_process.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 6a7c286b06..790d72d3ea 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -13,7 +13,7 @@ class GPyClassifier():#ParamSklearnClassificationAlgorithm): - def __init__(self, random_state=None, n_inducing=5, ard=False): + def __init__(self, random_state=None, n_inducing=20, ard=False): import GPy global GPy @@ -47,9 +47,7 @@ def fit(self, X, Y): model = GPy.models.SparseGPClassification(X, targets[:,i,None], kernel=kern, - num_inducing=self.n_inducing, - normalize_X=False, - normalize_Y=False) + num_inducing=self.n_inducing) # fit kernel hyperparameters model.optimize('bfgs', max_iters=100) # add to list of estimators From e6df45e2c980576ca96d84dbaa098de74076c720 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 17:09:09 +0200 Subject: [PATCH 205/352] Do not cast targets of test data to int32 any more --- ParamSklearn/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 4983369063..162556a176 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -52,7 +52,7 @@ def find_sklearn_classes(class_): def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data.astype(np.float32) - Y = iris.target.astype(np.int32) + Y = iris.target rs = np.random.RandomState(42) indices = np.arange(X.shape[0]) train_size = min(int(len(indices) / 3. * 2.), 150) From 0980f63b5984f8ac1266e1129df58a3c8c4ede75 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 17:09:32 +0200 Subject: [PATCH 206/352] Make imputation less memory consuming --- .../components/preprocessing/imputation.py | 6 +- ParamSklearn/implementations/Imputation.py | 402 ++++++++++++++++++ tests/implementations/test_imputation.py | 352 +++++++++++++++ 3 files changed, 757 insertions(+), 3 deletions(-) create mode 100644 ParamSklearn/implementations/Imputation.py create mode 100644 tests/implementations/test_imputation.py diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index b197631ec0..df2005f021 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -1,4 +1,4 @@ -import sklearn.preprocessing +import ParamSklearn.implementations.Imputation from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -13,8 +13,8 @@ def __init__(self, strategy, random_state=None): self.strategy = strategy def fit(self, X, y=None): - self.preprocessor = sklearn.preprocessing.Imputer( - strategy=self.strategy, copy=False) + self.preprocessor = ParamSklearn.implementations.Imputation.Imputer( + strategy=self.strategy, copy=False, dtype=X.dtype) self.preprocessor.fit(X) return self diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py new file mode 100644 index 0000000000..1115f2c2a5 --- /dev/null +++ b/ParamSklearn/implementations/Imputation.py @@ -0,0 +1,402 @@ +# Authors: Nicolas Tresegnie +# License: BSD 3 clause + +import warnings + +import numpy as np +import numpy.ma as ma +from scipy import sparse +from scipy import stats + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import array2d +from sklearn.utils import atleast2d_or_csr +from sklearn.utils import atleast2d_or_csc +from sklearn.utils import as_float_array +from sklearn.utils.fixes import astype + +from sklearn.externals import six + +zip = six.moves.zip +map = six.moves.map + +__all__ = [ + 'Imputer', +] + + +def _get_mask(X, value_to_mask): + """Compute the boolean mask X == missing_values.""" + if value_to_mask == "NaN" or np.isnan(value_to_mask): + return np.isnan(X) + else: + return X == value_to_mask + + +def _get_median(data, n_zeros): + """Compute the median of data with n_zeros additional zeros. + + This function is used to support sparse matrices; it modifies data in-place + """ + n_elems = len(data) + n_zeros + if not n_elems: + return np.nan + n_negative = np.count_nonzero(data < 0) + middle, is_odd = divmod(n_elems, 2) + data.sort() + + if is_odd: + return _get_elem_at_rank(middle, data, n_negative, n_zeros) + + return (_get_elem_at_rank(middle - 1, data, n_negative, n_zeros) + + _get_elem_at_rank(middle, data, n_negative, n_zeros)) / 2. + + +def _get_elem_at_rank(rank, data, n_negative, n_zeros): + """Find the value in data augmented with n_zeros for the given rank""" + if rank < n_negative: + return data[rank] + if rank - n_negative < n_zeros: + return 0 + return data[rank - n_zeros] + + +def _most_frequent(array, extra_value, n_repeat): + """Compute the most frequent value in a 1d array extended with + [extra_value] * n_repeat, where extra_value is assumed to be not part + of the array.""" + # Compute the most frequent value in array only + if array.size > 0: + mode = stats.mode(array) + most_frequent_value = mode[0][0] + most_frequent_count = mode[1][0] + else: + most_frequent_value = 0 + most_frequent_count = 0 + + # Compare to array + [extra_value] * n_repeat + if most_frequent_count == 0 and n_repeat == 0: + return np.nan + elif most_frequent_count < n_repeat: + return extra_value + elif most_frequent_count > n_repeat: + return most_frequent_value + elif most_frequent_count == n_repeat: + # Ties the breaks. Copy the behaviour of scipy.stats.mode + if most_frequent_value < extra_value: + return most_frequent_value + else: + return extra_value + + +class Imputer(BaseEstimator, TransformerMixin): + """Imputation transformer for completing missing values. + + Parameters + ---------- + missing_values : integer or "NaN", optional (default="NaN") + The placeholder for the missing values. All occurrences of + `missing_values` will be imputed. For missing values encoded as np.nan, + use the string value "NaN". + + strategy : string, optional (default="mean") + The imputation strategy. + + - If "mean", then replace missing values using the mean along + the axis. + - If "median", then replace missing values using the median along + the axis. + - If "most_frequent", then replace missing using the most frequent + value along the axis. + + axis : integer, optional (default=0) + The axis along which to impute. + + - If `axis=0`, then impute along columns. + - If `axis=1`, then impute along rows. + + dtype : np.dtype + Determines the dtype of the transformed array. + + verbose : integer, optional (default=0) + Controls the verbosity of the imputer. + + copy : boolean, optional (default=True) + If True, a copy of X will be created. If False, imputation will + be done in-place whenever possible. Note that, in the following cases, + a new copy will always be made, even if `copy=False`: + + - If X is not an array of floating values; + - If X is sparse and `missing_values=0`; + - If `axis=0` and X is encoded as a CSR matrix; + - If `axis=1` and X is encoded as a CSC matrix. + + Attributes + ---------- + `statistics_` : array of shape (n_features,) + The imputation fill value for each feature if axis == 0. + + Notes + ----- + - When ``axis=0``, columns which only contained missing values at `fit` + are discarded upon `transform`. + - When ``axis=1``, an exception is raised if there are rows for which it is + not possible to fill in the missing values (e.g., because they only + contain missing values). + """ + + def __init__(self, missing_values="NaN", strategy="mean", + axis=0, dtype=np.float64, verbose=0, copy=True): + self.missing_values = missing_values + self.strategy = strategy + self.axis = axis + self.dtype=dtype + self.verbose = verbose + self.copy = copy + + def fit(self, X, y=None): + """Fit the imputer on X. + + Parameters + ---------- + X : {array-like, sparse matrix}, shape (n_samples, n_features) + Input data, where ``n_samples`` is the number of samples and + ``n_features`` is the number of features. + + Returns + ------- + self : object + Returns self. + """ + # Check parameters + allowed_strategies = ["mean", "median", "most_frequent"] + if self.strategy not in allowed_strategies: + raise ValueError("Can only use these strategies: {0} " + " got strategy={1}".format(allowed_strategies, + self.strategy)) + + if self.axis not in [0, 1]: + raise ValueError("Can only impute missing values on axis 0 and 1, " + " got axis={0}".format(self.axis)) + + # Since two different arrays can be provided in fit(X) and + # transform(X), the imputation data will be computed in transform() + # when the imputation is done per sample (i.e., when axis=1). + if self.axis == 0: + X = atleast2d_or_csc(X, dtype=self.dtype, force_all_finite=False) + + if sparse.issparse(X): + self.statistics_ = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + self.statistics_ = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) + + return self + + def _sparse_fit(self, X, strategy, missing_values, axis): + """Fit the transformer on sparse data.""" + # Imputation is done "by column", so if we want to do it + # by row we only need to convert the matrix to csr format. + if axis == 1: + X = X.tocsr() + else: + X = X.tocsc() + + # Count the zeros + if missing_values == 0: + n_zeros_axis = np.zeros(X.shape[not axis], dtype=int) + else: + n_zeros_axis = X.shape[axis] - np.diff(X.indptr) + + # Mean + if strategy == "mean": + if missing_values != 0: + n_non_missing = n_zeros_axis + + # Mask the missing elements + mask_missing_values = _get_mask(X.data, missing_values) + mask_valids = np.logical_not(mask_missing_values) + + # Sum only the valid elements + new_data = X.data.copy() + new_data[mask_missing_values] = 0 + X = sparse.csc_matrix((new_data, X.indices, X.indptr), + copy=False) + sums = X.sum(axis=0) + + # Count the elements != 0 + mask_non_zeros = sparse.csc_matrix( + (mask_valids.astype(np.float64), + X.indices, + X.indptr), copy=False) + s = mask_non_zeros.sum(axis=0) + n_non_missing = np.add(n_non_missing, s) + + else: + sums = X.sum(axis=axis) + n_non_missing = np.diff(X.indptr) + + # Ignore the error, columns with a np.nan statistics_ + # are not an error at this point. These columns will + # be removed in transform + with np.errstate(all="ignore"): + return np.ravel(sums) / np.ravel(n_non_missing) + + # Median + Most frequent + else: + # Remove the missing values, for each column + columns_all = np.hsplit(X.data, X.indptr[1:-1]) + mask_missing_values = _get_mask(X.data, missing_values) + mask_valids = np.hsplit(np.logical_not(mask_missing_values), + X.indptr[1:-1]) + + # astype necessary for bug in numpy.hsplit before v1.9 + columns = [col[astype(mask, bool, copy=False)] + for col, mask in zip(columns_all, mask_valids)] + + # Median + if strategy == "median": + median = np.empty(len(columns)) + for i, column in enumerate(columns): + median[i] = _get_median(column, n_zeros_axis[i]) + + return median + + # Most frequent + elif strategy == "most_frequent": + most_frequent = np.empty(len(columns)) + + for i, column in enumerate(columns): + most_frequent[i] = _most_frequent(column, + 0, + n_zeros_axis[i]) + + return most_frequent + + def _dense_fit(self, X, strategy, missing_values, axis): + """Fit the transformer on dense data.""" + X = array2d(X, force_all_finite=False) + mask = _get_mask(X, missing_values) + masked_X = ma.masked_array(X, mask=mask) + + # Mean + if strategy == "mean": + mean_masked = np.ma.mean(masked_X, axis=axis) + # Avoid the warning "Warning: converting a masked element to nan." + mean = np.ma.getdata(mean_masked) + mean[np.ma.getmask(mean_masked)] = np.nan + + return mean + + # Median + elif strategy == "median": + if tuple(int(v) for v in np.__version__.split('.')[:2]) < (1, 5): + # In old versions of numpy, calling a median on an array + # containing nans returns nan. This is different is + # recent versions of numpy, which we want to mimic + masked_X.mask = np.logical_or(masked_X.mask, + np.isnan(X)) + median_masked = np.ma.median(masked_X, axis=axis) + # Avoid the warning "Warning: converting a masked element to nan." + median = np.ma.getdata(median_masked) + median[np.ma.getmaskarray(median_masked)] = np.nan + + return median + + # Most frequent + elif strategy == "most_frequent": + # scipy.stats.mstats.mode cannot be used because it will no work + # properly if the first element is masked and if it's frequency + # is equal to the frequency of the most frequent valid element + # See https://github.com/scipy/scipy/issues/2636 + + # To be able access the elements by columns + if axis == 0: + X = X.transpose() + mask = mask.transpose() + + most_frequent = np.empty(X.shape[0]) + + for i, (row, row_mask) in enumerate(zip(X[:], mask[:])): + row_mask = np.logical_not(row_mask).astype(np.bool) + row = row[row_mask] + most_frequent[i] = _most_frequent(row, np.nan, 0) + + return most_frequent + + def transform(self, X): + """Impute all missing values in X. + + Parameters + ---------- + X : {array-like, sparse matrix}, shape = [n_samples, n_features] + The input data to complete. + """ + # Copy just once + X = as_float_array(X, copy=self.copy, force_all_finite=False) + + # Since two different arrays can be provided in fit(X) and + # transform(X), the imputation data need to be recomputed + # when the imputation is done per sample + if self.axis == 1: + X = atleast2d_or_csr(X, force_all_finite=False, copy=False) + + if sparse.issparse(X): + statistics = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + + else: + statistics = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + X = atleast2d_or_csc(X, force_all_finite=False, copy=False) + statistics = self.statistics_ + + # Delete the invalid rows/columns + invalid_mask = np.isnan(statistics) + valid_mask = np.logical_not(invalid_mask) + valid_statistics = statistics[valid_mask] + valid_statistics_indexes = np.where(valid_mask)[0] + missing = np.arange(X.shape[not self.axis])[invalid_mask] + + if self.axis == 0 and invalid_mask.any(): + if self.verbose: + warnings.warn("Deleting features without " + "observed values: %s" % missing) + X = X[:, valid_statistics_indexes] + elif self.axis == 1 and invalid_mask.any(): + raise ValueError("Some rows only contain " + "missing values: %s" % missing) + + # Do actual imputation + if sparse.issparse(X) and self.missing_values != 0: + mask = _get_mask(X.data, self.missing_values) + indexes = np.repeat(np.arange(len(X.indptr) - 1, dtype=np.int), + np.diff(X.indptr))[mask] + + X.data[mask] = valid_statistics[indexes].astype(X.dtype) + else: + if sparse.issparse(X): + X = X.toarray() + + mask = _get_mask(X, self.missing_values) + n_missing = np.sum(mask, axis=self.axis) + values = np.repeat(valid_statistics, n_missing) + + if self.axis == 0: + coordinates = np.where(mask.transpose())[::-1] + else: + coordinates = mask + + X[coordinates] = values + + return X diff --git a/tests/implementations/test_imputation.py b/tests/implementations/test_imputation.py new file mode 100644 index 0000000000..e3616349b3 --- /dev/null +++ b/tests/implementations/test_imputation.py @@ -0,0 +1,352 @@ +import unittest + +import numpy as np +from scipy import sparse + +from sklearn.utils.testing import assert_equal +from sklearn.utils.testing import assert_array_equal +from sklearn.utils.testing import assert_raises +from sklearn.utils.testing import assert_false +from sklearn.utils.testing import assert_true + +from ParamSklearn.implementations.Imputation import Imputer +from sklearn.pipeline import Pipeline +from sklearn import grid_search +from sklearn import tree +from sklearn.random_projection import sparse_random_matrix + + +class ImputationTest(unittest.TestCase): + def _check_statistics(self, X, X_true, + strategy, statistics, missing_values): + """Utility function for testing imputation for a given strategy. + + Test: + - along the two axes + - with dense and sparse arrays + + Check that: + - the statistics (mean, median, mode) are correct + - the missing values are imputed correctly""" + + err_msg = "Parameters: strategy = %s, missing_values = %s, " \ + "axis = {0}, sparse = {1}" % (strategy, missing_values) + + # Normal matrix, axis = 0 + imputer = Imputer(missing_values, strategy=strategy, axis=0) + X_trans = imputer.fit(X).transform(X.copy()) + assert_array_equal(imputer.statistics_, statistics, + err_msg.format(0, False)) + assert_array_equal(X_trans, X_true, err_msg.format(0, False)) + + # Normal matrix, axis = 1 + imputer = Imputer(missing_values, strategy=strategy, axis=1) + imputer.fit(X.transpose()) + if np.isnan(statistics).any(): + assert_raises(ValueError, imputer.transform, X.copy().transpose()) + else: + X_trans = imputer.transform(X.copy().transpose()) + assert_array_equal(X_trans, X_true.transpose(), + err_msg.format(1, False)) + + # Sparse matrix, axis = 0 + imputer = Imputer(missing_values, strategy=strategy, axis=0) + imputer.fit(sparse.csc_matrix(X)) + X_trans = imputer.transform(sparse.csc_matrix(X.copy())) + + if sparse.issparse(X_trans): + X_trans = X_trans.toarray() + + assert_array_equal(imputer.statistics_, statistics, + err_msg.format(0, True)) + assert_array_equal(X_trans, X_true, err_msg.format(0, True)) + + # Sparse matrix, axis = 1 + imputer = Imputer(missing_values, strategy=strategy, axis=1) + imputer.fit(sparse.csc_matrix(X.transpose())) + if np.isnan(statistics).any(): + assert_raises(ValueError, imputer.transform, + sparse.csc_matrix(X.copy().transpose())) + else: + X_trans = imputer.transform(sparse.csc_matrix(X.copy().transpose())) + + if sparse.issparse(X_trans): + X_trans = X_trans.toarray() + + assert_array_equal(X_trans, X_true.transpose(), + err_msg.format(1, True)) + + + def test_imputation_shape(self): + """Verify the shapes of the imputed matrix for different strategies.""" + X = np.random.randn(10, 2) + X[::2] = np.nan + + for strategy in ['mean', 'median', 'most_frequent']: + imputer = Imputer(strategy=strategy) + X_imputed = imputer.fit_transform(X) + assert_equal(X_imputed.shape, (10, 2)) + X_imputed = imputer.fit_transform(sparse.csr_matrix(X)) + assert_equal(X_imputed.shape, (10, 2)) + + + def test_imputation_mean_median_only_zero(self): + """Test imputation using the mean and median strategies, when + missing_values == 0.""" + X = np.array([ + [np.nan, 0, 0, 0, 5], + [np.nan, 1, 0, np.nan, 3], + [np.nan, 2, 0, 0, 0], + [np.nan, 6, 0, 5, 13], + ]) + + X_imputed_mean = np.array([ + [3, 5], + [1, 3], + [2, 7], + [6, 13], + ]) + statistics_mean = [np.nan, 3, np.nan, np.nan, 7] + + # Behaviour of median with NaN is undefined, e.g. different results in + # np.median and np.ma.median + X_for_median = X[:, [0, 1, 2, 4]] + X_imputed_median = np.array([ + [2, 5], + [1, 3], + [2, 5], + [6, 13], + ]) + statistics_median = [np.nan, 2, np.nan, 5] + + self._check_statistics(X, X_imputed_mean, "mean", statistics_mean, 0) + self._check_statistics(X_for_median, X_imputed_median, "median", + statistics_median, 0) + + + def test_imputation_mean_median(self): + """Test imputation using the mean and median strategies, when + missing_values != 0.""" + rng = np.random.RandomState(0) + + dim = 10 + dec = 10 + shape = (dim * dim, dim + dec) + + zeros = np.zeros(shape[0]) + values = np.arange(1, shape[0] + 1) + values[4::2] = - values[4::2] + + tests = [("mean", "NaN", lambda z, v, p: np.mean(np.hstack((z, v)))), + ("mean", 0, lambda z, v, p: np.mean(v)), + ("median", "NaN", lambda z, v, p: np.median(np.hstack((z, v)))), + ("median", 0, lambda z, v, p: np.median(v))] + + for strategy, test_missing_values, true_value_fun in tests: + X = np.empty(shape) + X_true = np.empty(shape) + true_statistics = np.empty(shape[1]) + + # Create a matrix X with columns + # - with only zeros, + # - with only missing values + # - with zeros, missing values and values + # And a matrix X_true containing all true values + for j in range(shape[1]): + nb_zeros = (j - dec + 1 > 0) * (j - dec + 1) * (j - dec + 1) + nb_missing_values = max(shape[0] + dec * dec + - (j + dec) * (j + dec), 0) + nb_values = shape[0] - nb_zeros - nb_missing_values + + z = zeros[:nb_zeros] + p = np.repeat(test_missing_values, nb_missing_values) + v = values[rng.permutation(len(values))[:nb_values]] + + true_statistics[j] = true_value_fun(z, v, p) + + # Create the columns + X[:, j] = np.hstack((v, z, p)) + + if 0 == test_missing_values: + X_true[:, j] = np.hstack((v, + np.repeat( + true_statistics[j], + nb_missing_values + nb_zeros))) + else: + X_true[:, j] = np.hstack((v, + z, + np.repeat(true_statistics[j], + nb_missing_values))) + + # Shuffle them the same way + np.random.RandomState(j).shuffle(X[:, j]) + np.random.RandomState(j).shuffle(X_true[:, j]) + + # Mean doesn't support columns containing NaNs, median does + if strategy == "median": + cols_to_keep = ~np.isnan(X_true).any(axis=0) + else: + cols_to_keep = ~np.isnan(X_true).all(axis=0) + + X_true = X_true[:, cols_to_keep] + + self._check_statistics(X, X_true, strategy, + true_statistics, test_missing_values) + + + def test_imputation_median_special_cases(self): + """Test median imputation with sparse boundary cases + """ + X = np.array([ + [0, np.nan, np.nan], # odd: implicit zero + [5, np.nan, np.nan], # odd: explicit nonzero + [0, 0, np.nan], # even: average two zeros + [-5, 0, np.nan], # even: avg zero and neg + [0, 5, np.nan], # even: avg zero and pos + [4, 5, np.nan], # even: avg nonzeros + [-4, -5, np.nan], # even: avg negatives + [-1, 2, np.nan], # even: crossing neg and pos + ]).transpose() + + X_imputed_median = np.array([ + [0, 0, 0], + [5, 5, 5], + [0, 0, 0], + [-5, 0, -2.5], + [0, 5, 2.5], + [4, 5, 4.5], + [-4, -5, -4.5], + [-1, 2, .5], + ]).transpose() + statistics_median = [0, 5, 0, -2.5, 2.5, 4.5, -4.5, .5] + + self._check_statistics(X, X_imputed_median, "median", + statistics_median, 'NaN') + + + def test_imputation_most_frequent(self): + """Test imputation using the most-frequent strategy.""" + X = np.array([ + [-1, -1, 0, 5], + [-1, 2, -1, 3], + [-1, 1, 3, -1], + [-1, 2, 3, 7], + ]) + + X_true = np.array([ + [2, 0, 5], + [2, 3, 3], + [1, 3, 3], + [2, 3, 7], + ]) + + # scipy.stats.mode, used in Imputer, doesn't return the first most + # frequent as promised in the doc but the lowest most frequent. When this + # test will fail after an update of scipy, Imputer will need to be updated + # to be consistent with the new (correct) behaviour + self._check_statistics(X, X_true, "most_frequent", [np.nan, 2, 3, 3], + -1) + + + def test_imputation_pipeline_grid_search(self): + """Test imputation within a pipeline + gridsearch.""" + pipeline = Pipeline([('imputer', Imputer(missing_values=0)), + ('tree', tree.DecisionTreeRegressor(random_state=0))]) + + parameters = { + 'imputer__strategy': ["mean", "median", "most_frequent"], + 'imputer__axis': [0, 1] + } + + l = 100 + X = sparse_random_matrix(l, l, density=0.10) + Y = sparse_random_matrix(l, 1, density=0.10).toarray() + gs = grid_search.GridSearchCV(pipeline, parameters) + gs.fit(X, Y) + + + def test_imputation_pickle(self): + """Test for pickling imputers.""" + import pickle + + l = 100 + X = sparse_random_matrix(l, l, density=0.10) + + for strategy in ["mean", "median", "most_frequent"]: + imputer = Imputer(missing_values=0, strategy=strategy) + imputer.fit(X) + + imputer_pickled = pickle.loads(pickle.dumps(imputer)) + + assert_array_equal(imputer.transform(X.copy()), + imputer_pickled.transform(X.copy()), + "Fail to transform the data after pickling " + "(strategy = %s)" % (strategy)) + + + def test_imputation_copy(self): + """Test imputation with copy""" + X_orig = sparse_random_matrix(5, 5, density=0.75, random_state=0) + + # copy=True, dense => copy + X = X_orig.copy().toarray() + imputer = Imputer(missing_values=0, strategy="mean", copy=True) + Xt = imputer.fit(X).transform(X) + Xt[0, 0] = -1 + assert_false(np.all(X == Xt)) + + # copy=True, sparse csr => copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", copy=True) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, dense => no copy + X = X_orig.copy().toarray() + imputer = Imputer(missing_values=0, strategy="mean", copy=False) + Xt = imputer.fit(X).transform(X) + Xt[0, 0] = -1 + assert_true(np.all(X == Xt)) + + # copy=False, sparse csr, axis=1 => no copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_true(np.all(X.data == Xt.data)) + + # copy=False, sparse csc, axis=0 => no copy + X = X_orig.copy().tocsc() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=0) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_true(np.all(X.data == Xt.data)) + + # copy=False, sparse csr, axis=0 => copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=0) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, sparse csc, axis=1 => copy + X = X_orig.copy().tocsc() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, sparse csr, axis=1, missing_values=0 => copy + X = X_orig.copy() + imputer = Imputer(missing_values=0, strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + assert_false(sparse.issparse(Xt)) + + # Note: If X is sparse and if missing_values=0, then a (dense) copy of X is + # made, even if copy=False. From 433b03072378b7719a75277d81447cac5b169f64 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 19:27:01 +0200 Subject: [PATCH 207/352] Readd GP to the configuration space --- .../components/classification/gaussian_process.py | 2 +- tests/components/classification/test_gaussian_process.py | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 790d72d3ea..7a00e6128f 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -12,7 +12,7 @@ -class GPyClassifier():#ParamSklearnClassificationAlgorithm): +class GPyClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, random_state=None, n_inducing=20, ard=False): import GPy global GPy diff --git a/tests/components/classification/test_gaussian_process.py b/tests/components/classification/test_gaussian_process.py index 46fe0887aa..55ee01a818 100644 --- a/tests/components/classification/test_gaussian_process.py +++ b/tests/components/classification/test_gaussian_process.py @@ -8,8 +8,10 @@ class GPyClassifierComponentTest(unittest.TestCase): def test_default_configuration(self): - for i in range(10): + for i in range(2): predictions, targets = _test_classifier(GPyClassifier) - self.assertAlmostEqual(0.95999999999999996, - sklearn.metrics.accuracy_score(predictions, targets)) + self.assertGreaterEqual( + sklearn.metrics.accuracy_score(predictions, targets), 0.958) + self.assertLessEqual( + sklearn.metrics.accuracy_score(predictions, targets), 0.98) From 958eceabd9c5c68fc19a19cd76b5e7a4c9206e17 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 19:27:27 +0200 Subject: [PATCH 208/352] FIX: densifier returns array instead of matrix --- ParamSklearn/components/preprocessing/densifier.py | 2 +- tests/components/preprocessing/test_densifier.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py index 1a9109559b..10d9c45cca 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -16,7 +16,7 @@ def fit(self, X, y=None): def transform(self, X): if sparse.issparse(X): - return X.todense() + return X.todense().getA() else: return X diff --git a/tests/components/preprocessing/test_densifier.py b/tests/components/preprocessing/test_densifier.py index 699f83b5e3..3f0d21386e 100644 --- a/tests/components/preprocessing/test_densifier.py +++ b/tests/components/preprocessing/test_densifier.py @@ -9,6 +9,7 @@ class DensifierComponentTest(PreprocessingTestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(Densifier, make_sparse=True) + self.assertIsInstance(transformation, np.ndarray) self.assertEqual(transformation.shape, original.shape) self.assertIsInstance(transformation, np.ndarray) From 9935f3880cd8c761a141eb75a6e6755f799475da Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 7 Apr 2015 19:31:51 +0200 Subject: [PATCH 209/352] Adapt test fixtures --- source/first_steps.rst | 2 +- tests/test_classification.py | 2 +- tests/test_textclassification.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/source/first_steps.rst b/source/first_steps.rst index 654e1d8ce2..9bb492f11c 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.90000000000000002 + 0.93999999999999995 diff --git a/tests/test_classification.py b/tests/test_classification.py index af44ba278a..c14e6054f6 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -109,7 +109,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(90, len(hyperparameters)) + self.assertEqual(83, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 067712de55..b868b73abe 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(89, len(hyperparameters)) + self.assertEqual(82, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 83a1404244e13324b7af74fc270125a4af58cfd5 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Thu, 9 Apr 2015 13:54:20 +0200 Subject: [PATCH 210/352] add logistic regression according to the least squares revisited paper --- .../components/classification/proj_logit.py | 62 +++++++++++++ ParamSklearn/implementations/ProjLogit.py | 87 +++++++++++++++++++ .../classification/test_proj_logit.py | 14 +++ tests/implementations/test_ProjLogit.py | 39 +++++++++ 4 files changed, 202 insertions(+) create mode 100644 ParamSklearn/components/classification/proj_logit.py create mode 100644 ParamSklearn/implementations/ProjLogit.py create mode 100644 tests/components/classification/test_proj_logit.py create mode 100644 tests/implementations/test_ProjLogit.py diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py new file mode 100644 index 0000000000..52e7b375f9 --- /dev/null +++ b/ParamSklearn/components/classification/proj_logit.py @@ -0,0 +1,62 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.implementations import ProjLogit + + + +class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): + + def __init__(self, max_epochs = 10, random_state=None, n_jobs=1): + self.max_epochs = max_epochs + self.estimator = None + + + def fit(self, X, Y): + self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'PLogit', + 'name': 'Logistic Regresion using Least Squares', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + 'preferred_dtype': np.float32} + + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + max_epochs = UniformIntegerHyperparameter("max_epochs", 2, 100, default=10) + cs = ConfigurationSpace() + cs.add_hyperparameter(max_epochs) + return cs diff --git a/ParamSklearn/implementations/ProjLogit.py b/ParamSklearn/implementations/ProjLogit.py new file mode 100644 index 0000000000..b9ab9c8e60 --- /dev/null +++ b/ParamSklearn/implementations/ProjLogit.py @@ -0,0 +1,87 @@ +import numpy as np +import numpy.random as npr + +# from http://arxiv.org/pdf/1309.1541v1.pdf +def proj_simplex(Y): + N,D = np.shape(Y) + # sort in descending order + X = -np.sort(-Y) + Xsum = np.cumsum(X, axis = 1) - 1 + Xsum = Xsum * (1./np.arange(1,D+1)) + biggest = np.sum(X > Xsum, axis = 1) + # TODO last step could be made faster + # via ravel / linear indexing + subtract = np.zeros((N, 1)) + for i in range(N): + subtract[i] = Xsum[i, biggest[i]-1] + return np.maximum(Y - subtract, 0) + + +class ProjLogit(object): + + def __init__(self, max_epochs = 10, verbose = False): + self.w = None + self.ws = None + self.max_epochs = max_epochs + self.verbose = verbose + + def fit(self, X, Y): + # get one hot encoding and add a bias + n = X.shape[0] + trainx = np.hstack([np.ones((n, 1)), X]) + k = np.max(Y) + 1 + if self.verbose: + print("Using {} samples of {} classes".format(n,k)) + yt = np.zeros((n, k)) + for i in range(n): + yt[i, Y[i]] = 1 + # initialize with linear regression + precond = np.eye(trainx.shape[1]) * np.sqrt(n) + C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) + wp = np.linalg.solve(C, np.dot(trainx.T, yt)) + w = np.linalg.solve(C.T, wp) + pred_train = np.dot(trainx, w) + for i in range(self.max_epochs): + # expand prediction + res = np.hstack([pred_train, np.power(pred_train, 2) / 2., np.power(pred_train, 3) / 6., np.power(pred_train, 4) / 24.]) + # solve with linear regression + precond = np.eye(res.shape[1]) * np.sqrt(n) + Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) + ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) + # project to probability simplex + p_res = proj_simplex(np.dot(res, ws)) + # and solve again with updated residual + wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) + w = np.linalg.solve(C.T, wp) + pred_train = p_res + np.dot(trainx, w) + obj = np.linalg.norm(yt - pred_train) + + # compute train error + errort = np.sum(np.argmax(pred_train, axis = 1) != Y) + # print training error + if self.verbose: + print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) + self.ws = ws + self.w = w + return self + + def predict(self, X): + testx = np.hstack([np.ones((X.shape[0], 1)), X]) + pred = np.dot(testx, self.w) + res = np.argmax(pred, axis = 1) + return res + + def predict_proba(self, X): + if self.w == None: + return np.zeros(X.shape[0]) + testx = np.hstack([np.ones((X.shape[0], 1)), X]) + pred = np.dot(testx, self.w) + #print(pred) + p_res = proj_simplex(pred) + return p_res + + def predict_log_proba(self, X): + if self.w == None: + return np.zeros(X.shape[0]) + res = np.log(self.predict_proba(X)) + return res diff --git a/tests/components/classification/test_proj_logit.py b/tests/components/classification/test_proj_logit.py new file mode 100644 index 0000000000..4568d67b2b --- /dev/null +++ b/tests/components/classification/test_proj_logit.py @@ -0,0 +1,14 @@ +import unittest + +from ParamSklearn.components.classification.proj_logit import ProjLogitCLassifier +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class ProjLogitComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(ProjLogitCLassifier, dataset='iris') + self.assertAlmostEqual(0.85999999999999996, + sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/implementations/test_ProjLogit.py b/tests/implementations/test_ProjLogit.py new file mode 100644 index 0000000000..b1e4ff0abe --- /dev/null +++ b/tests/implementations/test_ProjLogit.py @@ -0,0 +1,39 @@ +import unittest +import os +import numpy as np +#import scipy.io + +from ParamSklearn.implementations.ProjLogit import ProjLogit + + +class TestProjLogit(unittest.TestCase): + def test_sparse_filtering(self): + """Test logistic regression implementation based on least squares""" + + # simple test that should work out + trainx = np.random.rand(100,3) + trainy = np.zeros(10000) + testx = np.random.rand(100,3) + testy = np.zeros(100) + for i in range(100): + if trainx[i, 2] > 0.5: + trainy[i] = 1 + for i in range(100): + if testx[i, 2] > 0.5: + testy[i] = 1 + + model = ProjLogit(max_epochs = 10, verbose = True) + model.fit(trainx, trainy) + print("weights:") + print(model.w) + predicted_prob = model.predict_proba(testx) + predicted2 = np.argmax(predicted_prob, axis = 1) + predicted = model.predict(testx) + + #print(predicted) + #print(testy) + #print((predicted != testy).sum()) + #print((predicted2 != testy).sum()) + self.assertTrue((predicted == predicted2).all()) + self.assertTrue(((1 - predicted_prob.sum(axis=1)) < 1e-3).all()) + self.assertTrue((predicted != testy).sum() < 20) From 1ec4287799a1ccfc019f564f3ce0de468de6e313 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Thu, 9 Apr 2015 14:16:14 +0200 Subject: [PATCH 211/352] fix predict method for projlogit, I completely skrewed that one up ;) --- .../components/classification/proj_logit.py | 2 +- ParamSklearn/implementations/ProjLogit.py | 33 ++++++++++--------- .../classification/test_proj_logit.py | 2 +- tests/implementations/test_ProjLogit.py | 4 +-- 4 files changed, 22 insertions(+), 19 deletions(-) diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index 52e7b375f9..f9a40ed45d 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -56,7 +56,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - max_epochs = UniformIntegerHyperparameter("max_epochs", 2, 100, default=10) + max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) cs = ConfigurationSpace() cs.add_hyperparameter(max_epochs) return cs diff --git a/ParamSklearn/implementations/ProjLogit.py b/ParamSklearn/implementations/ProjLogit.py index b9ab9c8e60..cf12df75d9 100644 --- a/ParamSklearn/implementations/ProjLogit.py +++ b/ParamSklearn/implementations/ProjLogit.py @@ -20,8 +20,9 @@ def proj_simplex(Y): class ProjLogit(object): def __init__(self, max_epochs = 10, verbose = False): - self.w = None - self.ws = None + self.w0 = None + self.ws_all = [] + self.w_all = [] self.max_epochs = max_epochs self.verbose = verbose @@ -40,6 +41,7 @@ def fit(self, X, Y): C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) wp = np.linalg.solve(C, np.dot(trainx.T, yt)) w = np.linalg.solve(C.T, wp) + self.w0 = np.copy(w) pred_train = np.dot(trainx, w) for i in range(self.max_epochs): # expand prediction @@ -48,11 +50,13 @@ def fit(self, X, Y): precond = np.eye(res.shape[1]) * np.sqrt(n) Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) + self.ws_all.append(np.copy(ws)) # project to probability simplex p_res = proj_simplex(np.dot(res, ws)) # and solve again with updated residual wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) w = np.linalg.solve(C.T, wp) + self.w_all.append(np.copy(w)) pred_train = p_res + np.dot(trainx, w) obj = np.linalg.norm(yt - pred_train) @@ -61,24 +65,23 @@ def fit(self, X, Y): # print training error if self.verbose: print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) - self.ws = ws - self.w = w return self + def predict(self, X): - testx = np.hstack([np.ones((X.shape[0], 1)), X]) - pred = np.dot(testx, self.w) - res = np.argmax(pred, axis = 1) - return res - + res = self.predict_proba(X) + return np.argmax(res, axis = 1) + def predict_proba(self, X): - if self.w == None: - return np.zeros(X.shape[0]) + if self.w0 == None: + raise NotImplementedError testx = np.hstack([np.ones((X.shape[0], 1)), X]) - pred = np.dot(testx, self.w) - #print(pred) - p_res = proj_simplex(pred) - return p_res + pred = np.dot(testx, self.w0) + for ws, w in zip(self.ws_all, self.w_all): + res = np.hstack([pred, np.power(pred, 2) / 2., np.power(pred, 3) / 6., np.power(pred, 4) / 24.]) + p_res = proj_simplex(np.dot(res, ws)) + pred = p_res + np.dot(testx, w) + return proj_simplex(pred) def predict_log_proba(self, X): if self.w == None: diff --git a/tests/components/classification/test_proj_logit.py b/tests/components/classification/test_proj_logit.py index 4568d67b2b..ab706f4ee1 100644 --- a/tests/components/classification/test_proj_logit.py +++ b/tests/components/classification/test_proj_logit.py @@ -10,5 +10,5 @@ class ProjLogitComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(ProjLogitCLassifier, dataset='iris') - self.assertAlmostEqual(0.85999999999999996, + self.assertAlmostEqual(0.98, sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/implementations/test_ProjLogit.py b/tests/implementations/test_ProjLogit.py index b1e4ff0abe..5b9dc0442c 100644 --- a/tests/implementations/test_ProjLogit.py +++ b/tests/implementations/test_ProjLogit.py @@ -24,8 +24,8 @@ def test_sparse_filtering(self): model = ProjLogit(max_epochs = 10, verbose = True) model.fit(trainx, trainy) - print("weights:") - print(model.w) + print("weights 0:") + print(model.w0) predicted_prob = model.predict_proba(testx) predicted2 = np.argmax(predicted_prob, axis = 1) predicted = model.predict(testx) From db130677d80b9a544f8faf686520163576e6bc67 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Thu, 9 Apr 2015 14:17:40 +0200 Subject: [PATCH 212/352] make constructor match the default --- ParamSklearn/components/classification/proj_logit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index f9a40ed45d..e0b1cbeb4a 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -13,7 +13,7 @@ class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, max_epochs = 10, random_state=None, n_jobs=1): + def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): self.max_epochs = max_epochs self.estimator = None From e7c2ea090be8dbf1c058bbade389c4912caed32e Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Thu, 9 Apr 2015 15:30:53 +0200 Subject: [PATCH 213/352] add new preprocessing method and fix a bug in testing different preprocessors --- ParamSklearn/components/preprocessing/gem.py | 57 ++++++++++++++++++++ ParamSklearn/implementations/gem.py | 46 ++++++++++++++++ ParamSklearn/util.py | 6 +-- tests/components/preprocessing/test_gem.py | 36 +++++++++++++ 4 files changed, 142 insertions(+), 3 deletions(-) create mode 100644 ParamSklearn/components/preprocessing/gem.py create mode 100644 ParamSklearn/implementations/gem.py create mode 100644 tests/components/preprocessing/test_gem.py diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py new file mode 100644 index 0000000000..7906283cd2 --- /dev/null +++ b/ParamSklearn/components/preprocessing/gem.py @@ -0,0 +1,57 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter + +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.gem import GEM as GEMImpl +from ParamSklearn.util import DENSE + +class GEM(ParamSklearnPreprocessingAlgorithm): + + def __init__(self, N, precond, random_state=None): + self.N = N + self.precond = precond + + def fit(self, X, Y): + self.preprocessor = GEMImpl(self.N, self.precond) + self.preprocessor.fit(X, Y) + return self + + + def transform(self, X): + return self.preprocessor.transform(X) + + + @staticmethod + def get_properties(): + return {'shortname': 'GEM', + 'name': 'Generalized Eigenvector extraction', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, + 'preferred_dtype': None} + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + N = UniformIntegerHyperparameter("N", 5, 20, default=10) + precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) + cs = ConfigurationSpace() + cs.add_hyperparameter(N) + cs.add_hyperparameter(precond) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py new file mode 100644 index 0000000000..90b353a87c --- /dev/null +++ b/ParamSklearn/implementations/gem.py @@ -0,0 +1,46 @@ +import numpy as np +from scipy.sparse.linalg import eigs + + +class GEM(object): + + + def __init__(self, N, precond): + self.N = N + self.precond = precond + self.W = None + self.verbose = True + + + def fit(self, X, Y): + self.N = min(self.N, X.shape[1]-2) + y_max = np.max(Y) + 1 + self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) + off = 0 + for i in range(y_max): + Xi = X[Y == i] + covi = np.dot(Xi.T, Xi) + covi /= np.float32(Xi.shape[0]) + for j in range(y_max): + if j == i: + continue + if self.verbose: + print("Finding eigenvectors for pair ({}/{})".format(i,j)) + Xj = X[Y == j] + covj = np.dot(Xj.T, Xj) / np.float32(Xj.shape[0]) + E = np.linalg.pinv(np.linalg.cholesky(covj + np.eye(covj.shape[0]) * self.precond).T) + C = np.dot(np.dot(E.T, covi), E) + C2 = 0.5 * (C + C.T) + S,U = eigs(C2, self.N) + gev = np.dot(E, U[:, :self.N]) + self.W[:, off:off+self.N] = gev + off += self.N + print("DONE") + return self + + + def transform(self, X, Y=None): + features = np.maximum(np.dot(X, self.W), 0) + return features + + diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 162556a176..f6c7aa5ccc 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -120,7 +120,7 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False): preprocessor = Preprocessor(random_state=1, **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) - preprocessor.fit(X_train) + preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -146,7 +146,7 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False): preprocessor = Preprocessor(random_state=1, **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) - preprocessor.fit(X_train) + preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -188,4 +188,4 @@ def _test_regressor(Regressor, dataset='diabetes'): if __name__ == "__main__": find_sklearn_classes(sklearn.base.ClassifierMixin) find_sklearn_classes(sklearn.base.RegressorMixin) - find_sklearn_classes(sklearn.base.TransformerMixin) \ No newline at end of file + find_sklearn_classes(sklearn.base.TransformerMixin) diff --git a/tests/components/preprocessing/test_gem.py b/tests/components/preprocessing/test_gem.py new file mode 100644 index 0000000000..733a985801 --- /dev/null +++ b/tests/components/preprocessing/test_gem.py @@ -0,0 +1,36 @@ +import unittest + +from ParamSklearn.components.classification.proj_logit import ProjLogitCLassifier +from ParamSklearn.components.preprocessing.gem import GEM +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, get_dataset +import sklearn.metrics + + +class GEMComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(GEM) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', + make_sparse=False) + configuration_space = GEM.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = GEM(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = ProjLogitCLassifier(max_epochs = 5, random_state=1) + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(0.98, accuracy) + + @unittest.skip("Right now GEM cannot handle sparse arrays!") + def test_preprocessing_dtype(self): + super(GEMComponentTest, self)._test_preprocessing_dtype(GEM) From d73e490122b6945fc89f8b73fb7aa16c46b9f105 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Thu, 9 Apr 2015 15:40:16 +0200 Subject: [PATCH 214/352] fix a few tests --- ParamSklearn/components/preprocessing/gem.py | 2 +- ParamSklearn/implementations/gem.py | 2 +- tests/test_regression.py | 4 ++-- tests/test_textclassification.py | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py index 7906283cd2..72c010bef1 100644 --- a/ParamSklearn/components/preprocessing/gem.py +++ b/ParamSklearn/components/preprocessing/gem.py @@ -30,7 +30,7 @@ def get_properties(): 'handles_numerical_features': True, 'prefers_data_scaled': True, 'prefers_data_normalized': True, - 'handles_regression': True, + 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, 'handles_multilabel': True, diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py index 90b353a87c..edfd9f5880 100644 --- a/ParamSklearn/implementations/gem.py +++ b/ParamSklearn/implementations/gem.py @@ -9,7 +9,7 @@ def __init__(self, N, precond): self.N = N self.precond = precond self.W = None - self.verbose = True + self.verbose = False def fit(self, X, Y): diff --git a/tests/test_regression.py b/tests/test_regression.py index 4a240b4e48..adf5fe7ac9 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -79,7 +79,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(36, len(hyperparameters)) + self.assertEqual(38, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): @@ -221,4 +221,4 @@ def test_set_params(self): pass def test_get_params(self): - pass \ No newline at end of file + pass diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index b868b73abe..b6c166061f 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(82, len(hyperparameters)) + self.assertEqual(85, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) @@ -21,4 +21,4 @@ def test_get_hyperparameter_search_space(self): "'rescaling:strategy' does not " "exist in this configuration " "space.", cs.get_hyperparameter, - "rescaling:strategy") \ No newline at end of file + "rescaling:strategy") From c61ccf60037cb380d54fb554ec91691c328ae09b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 10 Apr 2015 10:18:24 +0200 Subject: [PATCH 215/352] GEM doesn't seem to work for multilabel --- ParamSklearn/components/preprocessing/gem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py index 72c010bef1..8c7deac191 100644 --- a/ParamSklearn/components/preprocessing/gem.py +++ b/ParamSklearn/components/preprocessing/gem.py @@ -33,7 +33,7 @@ def get_properties(): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': True, + 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, From 17ac2fe325b3783b917dc184a8743154f53743b0 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 10 Apr 2015 10:18:50 +0200 Subject: [PATCH 216/352] Remove combination of gaussian_nb and feature learning from the configuration space --- ParamSklearn/classification.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index b6a3b00874..5ebd5f3e69 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -296,7 +296,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # which would take too long # Combinations of non-linear models with feature learning: classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", - "k_nearest_neighbors", "libsvm_svc", "random_forest"] + "k_nearest_neighbors", "libsvm_svc", "random_forest", + "gaussian_nb"] feature_learning = ["kitchen_sinks", "sparse_filtering"] for c, f in product(classifiers_, feature_learning): From 2138c37b7742ec90c5dfae8640d4dcf4449bffb2 Mon Sep 17 00:00:00 2001 From: Jost Tobias Springenberg Date: Fri, 10 Apr 2015 13:02:08 +0200 Subject: [PATCH 217/352] fix potential bug if labels are not int --- ParamSklearn/implementations/gem.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py index edfd9f5880..c220ffd70d 100644 --- a/ParamSklearn/implementations/gem.py +++ b/ParamSklearn/implementations/gem.py @@ -14,7 +14,7 @@ def __init__(self, N, precond): def fit(self, X, Y): self.N = min(self.N, X.shape[1]-2) - y_max = np.max(Y) + 1 + y_max = int(np.max(Y) + 1) self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) off = 0 for i in range(y_max): From 75f809ac062a2ed5a721f36dd6060b7fa154d7f7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Apr 2015 16:45:03 +0200 Subject: [PATCH 218/352] Adaboost: increase n_estimators, but make them less deep --- .../components/classification/adaboost.py | 27 ++++++++++--------- .../classification/test_adaboost.py | 2 +- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 806855d383..b8381ff082 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -1,5 +1,6 @@ import numpy as np import sklearn.ensemble +import sklearn.tree from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -12,7 +13,7 @@ class AdaboostClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, learning_rate, algorithm='SAMME.R', - base_estimator=None, random_state=None): + max_depth=1, random_state=None): self.n_estimators = int(n_estimators) self.learning_rate = float(learning_rate) @@ -20,23 +21,20 @@ def __init__(self, n_estimators, learning_rate, algorithm='SAMME.R', raise ValueError("Illegal 'algorithm': %s" % algorithm) self.algorithm = algorithm self.random_state = random_state - - if base_estimator is None: - self.base_estimator = base_estimator - elif base_estimator == "None": - self.base_estimator = None - else: - raise ValueError("Illegal ") + self.max_depth = max_depth self.estimator = None def fit(self, X, Y): + base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth) + self.estimator = sklearn.ensemble.AdaBoostClassifier( - base_estimator=self.base_estimator, + base_estimator=base_estimator, n_estimators=self.n_estimators, learning_rate=self.learning_rate, algorithm=self.algorithm, random_state=self.random_state + ) self.estimator.fit(X, Y) return self @@ -78,15 +76,20 @@ def get_hyperparameter_search_space(dataset_properties=None): learning_rate = UniformFloatHyperparameter( name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) algorithm = Constant(name="algorithm", value="SAMME.R") - base_estimator = Constant(name="base_estimator", value="None") + #base_estimator = Constant(name="base_estimator", value="None") n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=5, upper=50, default=10, log=False) + name="n_estimators", lower=50, upper=500, default=50, log=False) + + max_depth = UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=1, log=False) + cs = ConfigurationSpace() cs.add_hyperparameter(n_estimators) cs.add_hyperparameter(learning_rate) - cs.add_hyperparameter(base_estimator) + #cs.add_hyperparameter(base_estimator) + cs.add_hyperparameter(max_depth) cs.add_hyperparameter(algorithm) return cs diff --git a/tests/components/classification/test_adaboost.py b/tests/components/classification/test_adaboost.py index 84d797f3ad..459b92c4c4 100644 --- a/tests/components/classification/test_adaboost.py +++ b/tests/components/classification/test_adaboost.py @@ -20,5 +20,5 @@ def test_default_configuration_digits(self): predictions, targets = \ _test_classifier(classifier=AdaboostClassifier, dataset='digits') - self.assertAlmostEqual(0.56527018822100794, + self.assertAlmostEqual(0.6915604128718883, sklearn.metrics.accuracy_score(predictions, targets)) From 271d3c66d8383bb4c22d6e899152aab5f12247c0 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Apr 2015 16:45:33 +0200 Subject: [PATCH 219/352] kNN: fix bug that kNN does not use its hyperparameters --- .../classification/k_nearest_neighbors.py | 25 +++++++++++-------- .../classification/test_k_nearest_neighbor.py | 11 ++++++-- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index 5b72adde09..5df578e4b6 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -11,7 +11,7 @@ class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, + def __init__(self, n_neighbors, weights, algorithm='auto', p=2, leaf_size=30, random_state=None): self.n_neighbors = int(n_neighbors) @@ -19,10 +19,11 @@ def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, raise ValueError("'weights' should be in ('uniform', 'distance'): " "%s" % weights) self.weights = weights - if metric not in ("euclidean", "manhattan", "chebyshev", "minkowski"): - raise ValueError("'metric' should be in ('euclidean', 'chebyshev', " - "'manhattan', 'minkowski'): %s" % metric) - self.metric = metric + #if metric not in ("euclidean", "manhattan", "chebyshev", "minkowski"): + # raise ValueError("'metric' should be in ('euclidean', + # 'chebyshev', " + # "'manhattan', 'minkowski'): %s" % metric) + #self.metric = metric self.algorithm = algorithm self.p = int(p) self.leaf_size = int(leaf_size) @@ -30,7 +31,10 @@ def __init__(self, n_neighbors, weights, metric, algorithm='auto', p=2, def fit(self, X, Y): self.estimator = \ - sklearn.neighbors.KNeighborsClassifier() + sklearn.neighbors.KNeighborsClassifier( + n_neighbors=self.n_neighbors, weights=self.weights, + p=self.p, algorithm=self.algorithm, + leaf_size=self.leaf_size) self.estimator.fit(X, Y) return self @@ -72,7 +76,6 @@ def get_hyperparameter_search_space(dataset_properties=None): name="n_neighbors", lower=1, upper=100, default=1) weights = CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform") - metric = UnParametrizedHyperparameter(name="metric", value="minkowski") algorithm = Constant(name='algorithm', value="auto") p = CategoricalHyperparameter( name="p", choices=[1, 2, 5], default=2) @@ -80,18 +83,18 @@ def get_hyperparameter_search_space(dataset_properties=None): # Unparametrized # TODO: If we further parametrize 'metric' we need more metric params - metric = UnParametrizedHyperparameter(name="metric", value="minkowski") + #metric = UnParametrizedHyperparameter(name="metric", value="minkowski") cs = ConfigurationSpace() cs.add_hyperparameter(n_neighbors) cs.add_hyperparameter(weights) - cs.add_hyperparameter(metric) + #cs.add_hyperparameter(metric) cs.add_hyperparameter(algorithm) cs.add_hyperparameter(p) cs.add_hyperparameter(leaf_size) # Conditions - metric_p = EqualsCondition(parent=metric, child=p, value="minkowski") - cs.add_condition(metric_p) + #metric_p = EqualsCondition(parent=metric, child=p, value="minkowski") + #cs.add_condition(metric_p) return cs diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py index 47c924c6e3..8c10c3ecb7 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -2,7 +2,7 @@ from ParamSklearn.components.classification.k_nearest_neighbors import \ KNearestNeighborsClassifier -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba import sklearn.metrics @@ -13,4 +13,11 @@ def test_default_configuration(self): predictions, targets = \ _test_classifier(KNearestNeighborsClassifier) self.assertAlmostEqual(0.959999999999999, - sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = \ + _test_classifier_predict_proba(KNearestNeighborsClassifier) + self.assertAlmostEqual(1.381551055796429, + sklearn.metrics.log_loss(targets, predictions)) \ No newline at end of file From bd4c33344e39a74300b105a78100f8c5c603ada5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Apr 2015 16:46:26 +0200 Subject: [PATCH 220/352] Improve some tests --- .../classification/test_proj_logit.py | 7 ++++ tests/components/classification/test_sgd.py | 10 +++++- tests/components/preprocessing/test_gem.py | 33 ++++++++++--------- 3 files changed, 33 insertions(+), 17 deletions(-) diff --git a/tests/components/classification/test_proj_logit.py b/tests/components/classification/test_proj_logit.py index ab706f4ee1..bae277679d 100644 --- a/tests/components/classification/test_proj_logit.py +++ b/tests/components/classification/test_proj_logit.py @@ -12,3 +12,10 @@ def test_default_configuration(self): predictions, targets = _test_classifier(ProjLogitCLassifier, dataset='iris') self.assertAlmostEqual(0.98, sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = _test_classifier(ProjLogitCLassifier, + dataset='digits') + self.assertAlmostEqual(0.8986035215543412, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py index be81c9ae62..62b19a3ce6 100644 --- a/tests/components/classification/test_sgd.py +++ b/tests/components/classification/test_sgd.py @@ -6,10 +6,18 @@ import sklearn.metrics -class RandomForestComponentTest(unittest.TestCase): +class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(SGD, dataset='iris') self.assertAlmostEqual(0.96, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=SGD, dataset='digits') + self.assertAlmostEqual(0.89313904068002425, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/preprocessing/test_gem.py b/tests/components/preprocessing/test_gem.py index 733a985801..6263e3b0e2 100644 --- a/tests/components/preprocessing/test_gem.py +++ b/tests/components/preprocessing/test_gem.py @@ -13,23 +13,24 @@ def test_default_configuration(self): self.assertFalse((transformation == 0).all()) def test_default_configuration_classify(self): - X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', - make_sparse=False) - configuration_space = GEM.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = GEM(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) - preprocessor.fit(X_train, Y_train) - X_train_trans = preprocessor.transform(X_train) - X_test_trans = preprocessor.transform(X_test) + for i in range(3): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = GEM.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = GEM(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) - # fit a classifier on top - classifier = ProjLogitCLassifier(max_epochs = 5, random_state=1) - predictor = classifier.fit(X_train_trans, Y_train) - predictions = predictor.predict(X_test_trans) - accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertAlmostEqual(0.98, accuracy) + # fit a classifier on top + classifier = ProjLogitCLassifier(max_epochs = 5, random_state=1) + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertGreaterEqual(accuracy, 0.94) @unittest.skip("Right now GEM cannot handle sparse arrays!") def test_preprocessing_dtype(self): From 428540eff587473b200a6a485f5029b4b1e9d1af Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Apr 2015 16:46:59 +0200 Subject: [PATCH 221/352] mend --- ParamSklearn/util.py | 13 +++++++++++++ misc/classifiers.csv | 20 ++++++++++---------- source/first_steps.rst | 4 ++-- tests/test_classification.py | 11 ++++++++++- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 6 files changed, 37 insertions(+), 15 deletions(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index f6c7aa5ccc..c0f5375327 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -94,6 +94,19 @@ def _test_classifier(classifier, dataset='iris'): return predictions, Y_test +def _test_classifier_predict_proba(classifier, dataset='iris'): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=False) + configuration_space = classifier.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + classifier = classifier(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict_proba(X_test) + return predictions, Y_test + + def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, make_sparse=make_sparse) diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 3a4053209e..22b0103946 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -7,13 +7,13 @@ class,added,comment ,False,Scikit-learn source code says: This class should not be used directly ,True, ,True, -,FALSE,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. -,, +,True,Added with decision stumps(up to depth ten) +,True, ,False,Mixin but no full model -,, -,, -,, -,, +,False,Is implemented using LibLinear +,True,I don't know how similar to SGD this one is +,False,n fact, Perceptron() is equivalent to SGDClassifier(...) +,True, ,, ,False,This class has abstract methods ,True, @@ -28,15 +28,15 @@ class,added,comment ,True, ,True, ,True, -,,Can crash when there is no neighbour within the radius -,, -,, +,False,Has no predict_proba method, method cannot be easily added +,False,Has no predict_proba method, method cannot be easily added +,True, ,False,semi-supervised learning ,False,semi-supervised learning ,False,semi-supervised learning ,False,ABC for LibSVM-based classifiers ,True, -,, +,False,Equivalent to SVC ,True, ,False,This classifier is in a test module ,FALSE,Rfs are considered better (and are most likely faster to train) diff --git a/source/first_steps.rst b/source/first_steps.rst index 9bb492f11c..5b349fe57b 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -18,10 +18,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 5) + >>> sampler = RandomSampler(configuration_space, 4) >>> configuration = sampler.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 + 0.59999999999999998 diff --git a/tests/test_classification.py b/tests/test_classification.py index c14e6054f6..e49cb55a7f 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -84,6 +84,7 @@ def test_configurations(self): if "Floating-point under-/overflow occurred at epoch" in e.message: continue else: + print config raise e def test_configurations_sparse(self): @@ -102,6 +103,14 @@ def test_configurations_sparse(self): if "Floating-point under-/overflow occurred at epoch" in e.message: continue else: + print config + raise e + except AttributeError as e: + # Some error in QDA + if "log" == e.message: + continue + else: + print config raise e def test_get_hyperparameter_search_space(self): @@ -109,7 +118,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(83, len(hyperparameters)) + self.assertEqual(96, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index adf5fe7ac9..f3e139c47c 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -79,7 +79,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(38, len(hyperparameters)) + self.assertEqual(36, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index b6c166061f..a07984a5cf 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(85, len(hyperparameters)) + self.assertEqual(95, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 3ecb8722577c5cbd38912b9bcce837771f4723d2 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Apr 2015 16:47:43 +0200 Subject: [PATCH 222/352] Add missing classifiers --- ParamSklearn/components/classification/lda.py | 65 ++++++++++++++++ .../classification/passive_aggresive.py | 77 ++++++++++++++++++ ParamSklearn/components/classification/qda.py | 64 +++++++++++++++ .../components/classification/ridge.py | 78 +++++++++++++++++++ tests/components/classification/test_lda.py | 24 ++++++ .../classification/test_passive_aggressive.py | 23 ++++++ tests/components/classification/test_qda.py | 25 ++++++ tests/components/classification/test_ridge.py | 23 ++++++ 8 files changed, 379 insertions(+) create mode 100644 ParamSklearn/components/classification/lda.py create mode 100644 ParamSklearn/components/classification/passive_aggresive.py create mode 100644 ParamSklearn/components/classification/qda.py create mode 100644 ParamSklearn/components/classification/ridge.py create mode 100644 tests/components/classification/test_lda.py create mode 100644 tests/components/classification/test_passive_aggressive.py create mode 100644 tests/components/classification/test_qda.py create mode 100644 tests/components/classification/test_ridge.py diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py new file mode 100644 index 0000000000..842047f2b0 --- /dev/null +++ b/ParamSklearn/components/classification/lda.py @@ -0,0 +1,65 @@ +import sklearn.lda + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter + +from ParamSklearn.components.classification_base import \ + ParamSklearnClassificationAlgorithm +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + + +class LDA(ParamSklearnClassificationAlgorithm): + def __init__(self, n_components, tol, random_state=None): + self.n_components = int(n_components) + self.tol = float(tol) + self.estimator = None + + def fit(self, X, Y): + + self.estimator = sklearn.lda.LDA(n_components=self.n_components) + self.estimator.fit(X, Y, tol=self.tol) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'LDA', + 'name': 'Linear Discriminant Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformIntegerHyperparameter('n_components', 1, 250, + default=10) + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(tol) + return cs diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py new file mode 100644 index 0000000000..070845bfd7 --- /dev/null +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -0,0 +1,77 @@ +from sklearn.linear_model.passive_aggressive import PassiveAggressiveClassifier + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from ParamSklearn.components.classification_base import \ + ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax + + +class PassiveAggressive(ParamSklearnClassificationAlgorithm): + def __init__(self, C, fit_intercept, n_iter, loss, random_state=None): + self.C = float(C) + self.fit_intercept = bool(fit_intercept) + self.n_iter = int(n_iter) + self.loss = loss + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + self.estimator = PassiveAggressiveClassifier( + C=self.C, fit_intercept=self.fit_intercept, n_iter=self.n_iter, + loss=self.loss, shuffle=True, random_state=self.random_state) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.decision_function(X) + return softmax(df) + + @staticmethod + def get_properties(): + return {'shortname': 'PassiveAggressive Classifier', + 'name': 'Passive Aggressive Stochastic Gradient Descent ' + 'Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + loss = CategoricalHyperparameter("loss", + ["hinge", "squared_hinge"], + default="hinge") + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20) + C = UniformFloatHyperparameter("C", 1e-5, 10, 1, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(loss) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(n_iter) + cs.add_hyperparameter(C) + return cs diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py new file mode 100644 index 0000000000..5a4f7700c4 --- /dev/null +++ b/ParamSklearn/components/classification/qda.py @@ -0,0 +1,64 @@ +import sklearn.qda + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter + +from ParamSklearn.components.classification_base import \ + ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS + + +class QDA(ParamSklearnClassificationAlgorithm): + def __init__(self, reg_param, tol, random_state=None): + self.reg_param = float(reg_param) + self.tol = float(tol) + self.estimator = None + + def fit(self, X, Y): + + self.estimator = sklearn.qda.QDA(self.reg_param) + self.estimator.fit(X, Y, tol=self.tol) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'QDA', + 'name': 'Quadratic Discriminant Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformFloatHyperparameter('reg_param', 0.0, 10.0, + default=0.5) + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(tol) + return cs diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py new file mode 100644 index 0000000000..ac7d4966a2 --- /dev/null +++ b/ParamSklearn/components/classification/ridge.py @@ -0,0 +1,78 @@ +from sklearn.linear_model.ridge import RidgeClassifier + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from ParamSklearn.components.classification_base import \ + ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax + + +class Ridge(ParamSklearnClassificationAlgorithm): + def __init__(self, alpha, fit_intercept, tol, random_state=None): + self.alpha = float(alpha) + self.fit_intercept = bool(fit_intercept) + self.tol = float(tol) + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + self.estimator = RidgeClassifier(alpha=self.alpha, + fit_intercept=self.fit_intercept, + tol=self.tol) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.decision_function(X) + return softmax(df) + + @staticmethod + def get_properties(): + return {'shortname': 'Rigde Classifier', + 'name': 'Rigde Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + alpha = UniformFloatHyperparameter("alpha", 10 ** -7, 10 ** -1, + log=True, default=0.0001) + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + + cs = ConfigurationSpace() + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(tol) + + return cs + + def __str__(self): + return "ParamSklearn Ridge Classifier" diff --git a/tests/components/classification/test_lda.py b/tests/components/classification/test_lda.py new file mode 100644 index 0000000000..e76c3523a1 --- /dev/null +++ b/tests/components/classification/test_lda.py @@ -0,0 +1,24 @@ +import unittest + +from ParamSklearn.components.classification.lda import LDA +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class LDAComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(LDA) + self.assertAlmostEqual(1.0, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=LDA, dataset='digits') + self.assertAlmostEqual(0.88585306618093507, + sklearn.metrics.accuracy_score(predictions, + targets)) diff --git a/tests/components/classification/test_passive_aggressive.py b/tests/components/classification/test_passive_aggressive.py new file mode 100644 index 0000000000..d3e00f9400 --- /dev/null +++ b/tests/components/classification/test_passive_aggressive.py @@ -0,0 +1,23 @@ +import unittest + +from ParamSklearn.components.classification.passive_aggresive import PassiveAggressive +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class SGDComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(PassiveAggressive, dataset='iris') + self.assertAlmostEqual(0.92, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=PassiveAggressive, dataset='digits') + self.assertAlmostEqual(0.91317547055251969, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_qda.py b/tests/components/classification/test_qda.py new file mode 100644 index 0000000000..4dc16e4619 --- /dev/null +++ b/tests/components/classification/test_qda.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.classification.qda import QDA +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class QDAComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(QDA) + self.assertAlmostEqual(1.0, + sklearn.metrics.accuracy_score(predictions, + targets)) + + @unittest.skip("QDA fails on this one") + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=QDA, dataset='digits') + self.assertAlmostEqual(0.88585306618093507, + sklearn.metrics.accuracy_score(predictions, + targets)) diff --git a/tests/components/classification/test_ridge.py b/tests/components/classification/test_ridge.py new file mode 100644 index 0000000000..99986a5da8 --- /dev/null +++ b/tests/components/classification/test_ridge.py @@ -0,0 +1,23 @@ +import unittest + +from ParamSklearn.components.classification.ridge import Ridge +from ParamSklearn.util import _test_classifier + +import sklearn.metrics + + +class SGDComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(Ridge, dataset='iris') + self.assertAlmostEqual(0.9, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=Ridge, dataset='digits') + self.assertAlmostEqual(0.8682452944748027, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file From 2b82a0f66a851603acf9b510265be9b3814143bb Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:16:17 +0200 Subject: [PATCH 223/352] kNN: remove illegal configuration for sparse data --- .../components/classification/k_nearest_neighbors.py | 7 +++++-- .../components/classification/test_k_nearest_neighbor.py | 8 ++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index 5df578e4b6..f5f9eeb6f4 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -71,14 +71,17 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_neighbors = UniformIntegerHyperparameter( name="n_neighbors", lower=1, upper=100, default=1) weights = CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform") algorithm = Constant(name='algorithm', value="auto") + if dataset_properties.get('sparse'): + p_choices = [1, 2, 5] + else: + p_choices = [1, 2] p = CategoricalHyperparameter( - name="p", choices=[1, 2, 5], default=2) + name="p", choices=p_choices, default=2) leaf_size = Constant(name="leaf_size", value=30) # Unparametrized diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py index 8c10c3ecb7..0c62dc6a81 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -15,6 +15,14 @@ def test_default_configuration(self): self.assertAlmostEqual(0.959999999999999, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_sparse_data(self): + for i in range(10): + predictions, targets = \ + _test_classifier(KNearestNeighborsClassifier, sparse=True) + self.assertAlmostEqual(0.71999999999999997, + sklearn.metrics.accuracy_score(predictions, + targets)) + def test_default_configuration_predict_proba(self): for i in range(10): predictions, targets = \ From b9e83ac2541e9c95ddfc68c2591a09130408bdd2 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:22:15 +0200 Subject: [PATCH 224/352] Fix the previous commit --- .../classification/k_nearest_neighbors.py | 6 +++--- ParamSklearn/util.py | 17 +++++++++-------- .../classification/test_k_nearest_neighbor.py | 2 +- 3 files changed, 13 insertions(+), 12 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index f5f9eeb6f4..b468639ad5 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -76,10 +76,10 @@ def get_hyperparameter_search_space(dataset_properties=None): weights = CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform") algorithm = Constant(name='algorithm', value="auto") - if dataset_properties.get('sparse'): - p_choices = [1, 2, 5] - else: + if dataset_properties is not None and dataset_properties.get('sparse'): p_choices = [1, 2] + else: + p_choices = [1, 2, 5] p = CategoricalHyperparameter( name="p", choices=p_choices, default=2) leaf_size = Constant(name="leaf_size", value=30) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index c0f5375327..9ad20c5e1b 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -65,26 +65,27 @@ def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): Y_test = Y[train_size:] if add_NaNs: - mask = np.random.choice([True, False], size=(X_train.shape)) + mask = rs.choice([True, False], size=(X_train.shape)) X_train[mask] = np.NaN if make_sparse: X_train[:,0] = 0 - X_train[np.random.random(X_train.shape) > 0.5] = 0 + X_train[rs.random_sample(X_train.shape) > 0.5] = 0 X_train = scipy.sparse.csc_matrix(X_train) X_train.eliminate_zeros() X_test[:,0] = 0 - X_test[np.random.random(X_test.shape) > 0.5] = 0 + X_test[rs.random_sample(X_test.shape) > 0.5] = 0 X_test = scipy.sparse.csc_matrix(X_test) X_test.eliminate_zeros() return X_train, Y_train, X_test, Y_test -def _test_classifier(classifier, dataset='iris'): +def _test_classifier(classifier, dataset='iris', sparse=False): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, - make_sparse=False) - configuration_space = classifier.get_hyperparameter_search_space() + make_sparse=sparse) + configuration_space = classifier.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) default = configuration_space.get_default_configuration() classifier = classifier(random_state=1, **{hp.hyperparameter.name: hp.value for hp in @@ -94,9 +95,9 @@ def _test_classifier(classifier, dataset='iris'): return predictions, Y_test -def _test_classifier_predict_proba(classifier, dataset='iris'): +def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, - make_sparse=False) + make_sparse=sparse) configuration_space = classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() classifier = classifier(random_state=1, diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/tests/components/classification/test_k_nearest_neighbor.py index 0c62dc6a81..ccb7e96091 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/tests/components/classification/test_k_nearest_neighbor.py @@ -19,7 +19,7 @@ def test_default_configuration_sparse_data(self): for i in range(10): predictions, targets = \ _test_classifier(KNearestNeighborsClassifier, sparse=True) - self.assertAlmostEqual(0.71999999999999997, + self.assertAlmostEqual(0.82, sklearn.metrics.accuracy_score(predictions, targets)) From 7152f483a244331e38b36275c7d165dd0c2e6fc5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:24:35 +0200 Subject: [PATCH 225/352] Select percentile: loosen percentile hyperparameter --- .../preprocessing/select_percentile_classification.py | 2 +- .../components/preprocessing/select_percentile_regression.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index 7ab6d58a23..33718172cc 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -51,7 +51,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter( - name="percentile", lower=10, upper=90, default=50) + name="percentile", lower=1, upper=99, default=50) score_func = CategoricalHyperparameter( name="score_func", choices=["chi2", "f_classif"], default="chi2") diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 0ac5e668ce..808eb777c7 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -49,7 +49,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): percentile = UniformFloatHyperparameter( - "percentile", lower=10, upper=90, default=50) + "percentile", lower=1, upper=99, default=50) score_func = UnParametrizedHyperparameter( name="score_func", value="f_regression") From 77cc0f3afd6778096a49820ae62bba0630481fc2 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:25:00 +0200 Subject: [PATCH 226/352] Add: no_preprocessing and normalizer --- .../components/preprocessing/rescaling.py | 17 ++- ParamSklearn/implementations/Normalizer.py | 139 ++++++++++++++++++ 2 files changed, 154 insertions(+), 2 deletions(-) create mode 100644 ParamSklearn/implementations/Normalizer.py diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 7d500ce1f8..43bf8fc67d 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -1,14 +1,22 @@ -from scipy import sparse from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.implementations.StandardScaler import StandardScaler from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler +from ParamSklearn.implementations.Normalizer import Normalizer from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE, INPUT +class none(object): + def fit(self, X, y=None): + return self + + def transform(self, X): + return X + + class Rescaling(ParamSklearnPreprocessingAlgorithm): def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made @@ -19,6 +27,10 @@ def fit(self, X, Y=None): self.preprocessor = MinMaxScaler(copy=False) elif self.strategy == "standard": self.preprocessor = StandardScaler(copy=False) + elif self.strategy == 'none': + self.preprocessor = none() + elif self.strategy == 'normalize': + self.preprocessor = Normalizer(norm='l2', copy=True) else: raise ValueError(self.strategy) self.preprocessor.fit(X) @@ -55,7 +67,8 @@ def get_properties(): def get_hyperparameter_search_space(dataset_properties=None): # TODO add replace by zero! strategy = CategoricalHyperparameter( - "strategy", ["min/max", "standard"], default="min/max") + "strategy", ["min/max", "standard", "none", "normalize"], + default="min/max") cs = ConfigurationSpace() cs.add_hyperparameter(strategy) return cs diff --git a/ParamSklearn/implementations/Normalizer.py b/ParamSklearn/implementations/Normalizer.py new file mode 100644 index 0000000000..97bec74e72 --- /dev/null +++ b/ParamSklearn/implementations/Normalizer.py @@ -0,0 +1,139 @@ +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import check_arrays +from sklearn.utils import atleast2d_or_csr +from sklearn.utils import warn_if_not_float +from sklearn.utils.extmath import row_norms +from sklearn.utils.sparsefuncs_fast import inplace_csr_row_normalize_l1 +from sklearn.utils.sparsefuncs_fast import inplace_csr_row_normalize_l2 + +import numpy as np +from scipy import sparse + +def normalize(X, norm='l2', axis=1, copy=True): + """Normalize a dataset along any axis + + Parameters + ---------- + X : array or scipy.sparse matrix with shape [n_samples, n_features] + The data to normalize, element by element. + scipy.sparse matrices should be in CSR format to avoid an + un-necessary copy. + + norm : 'l1' or 'l2', optional ('l2' by default) + The norm to use to normalize each non zero sample (or each non-zero + feature if axis is 0). + + axis : 0 or 1, optional (1 by default) + axis used to normalize the data along. If 1, independently normalize + each sample, otherwise (if 0) normalize each feature. + + copy : boolean, optional, default is True + set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array or a scipy.sparse + CSR matrix and if axis is 1). + + See also + -------- + :class:`sklearn.preprocessing.Normalizer` to perform normalization + using the ``Transformer`` API (e.g. as part of a preprocessing + :class:`sklearn.pipeline.Pipeline`) + """ + if norm not in ('l1', 'l2'): + raise ValueError("'%s' is not a supported norm" % norm) + + if axis == 0: + sparse_format = 'csc' + elif axis == 1: + sparse_format = 'csr' + else: + raise ValueError("'%d' is not a supported axis" % axis) + + X = check_arrays(X, sparse_format=sparse_format, copy=copy, + dtype=np.float64)[0] + warn_if_not_float(X, 'The normalize function') + if axis == 0: + X = X.T + + if sparse.issparse(X): + if norm == 'l1': + inplace_csr_row_normalize_l1(X) + elif norm == 'l2': + inplace_csr_row_normalize_l2(X) + else: + if norm == 'l1': + norms = np.abs(X).sum(axis=1) + norms[norms == 0.0] = 1.0 + elif norm == 'l2': + norms = row_norms(X) + norms[norms == 0.0] = 1.0 + X /= norms[:, np.newaxis] + + if axis == 0: + X = X.T + + return X + + +class Normalizer(BaseEstimator, TransformerMixin): + """Normalize samples individually to unit norm + + Each sample (i.e. each row of the data matrix) with at least one + non zero component is rescaled independently of other samples so + that its norm (l1 or l2) equals one. + + This transformer is able to work both with dense numpy arrays and + scipy.sparse matrix (use CSR format if you want to avoid the burden of + a copy / conversion). + + Scaling inputs to unit norms is a common operation for text + classification or clustering for instance. For instance the dot + product of two l2-normalized TF-IDF vectors is the cosine similarity + of the vectors and is the base similarity metric for the Vector + Space Model commonly used by the Information Retrieval community. + + Parameters + ---------- + norm : 'l1' or 'l2', optional ('l2' by default) + The norm to use to normalize each non zero sample. + + copy : boolean, optional, default is True + set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array or a scipy.sparse + CSR matrix). + + Notes + ----- + This estimator is stateless (besides constructor parameters), the + fit method does nothing but is useful when used in a pipeline. + + See also + -------- + :func:`sklearn.preprocessing.normalize` equivalent function + without the object oriented API + """ + + def __init__(self, norm='l2', copy=True): + self.norm = norm + self.copy = copy + + def fit(self, X, y=None): + """Do nothing and return the estimator unchanged + + This method is just there to implement the usual API and hence + work in pipelines. + """ + atleast2d_or_csr(X) + return self + + def transform(self, X, y=None, copy=None): + """Scale each non zero row of X to unit norm + + Parameters + ---------- + X : array or scipy.sparse matrix with shape [n_samples, n_features] + The data to normalize, row by row. scipy.sparse matrices should be + in CSR format to avoid an un-necessary copy. + """ + copy = copy if copy is not None else self.copy + atleast2d_or_csr(X) + return normalize(X, norm=self.norm, axis=1, copy=copy) From f5ed072ab460dfac92f1f492c480f493f838026d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:26:11 +0200 Subject: [PATCH 227/352] Add DecisionTreeClassifier --- .../classification/decision_tree.py | 99 +++++++++++++++++++ misc/classifiers.csv | 4 +- source/first_steps.rst | 2 +- .../classification/test_decision_tree.py | 23 +++++ tests/test_classification.py | 16 ++- tests/test_textclassification.py | 2 +- 6 files changed, 141 insertions(+), 5 deletions(-) create mode 100644 ParamSklearn/components/classification/decision_tree.py create mode 100644 tests/components/classification/test_decision_tree.py diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py new file mode 100644 index 0000000000..1c6c9ff06f --- /dev/null +++ b/ParamSklearn/components/classification/decision_tree.py @@ -0,0 +1,99 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.classification_base import \ + ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS +# get our own forests to replace the sklearn ones +from sklearn.tree import DecisionTreeClassifier + + +class DecisionTree(ParamSklearnClassificationAlgorithm): + def __init__(self, criterion, max_features, max_depth, + min_samples_split, min_samples_leaf, + max_leaf_nodes, random_state=None): + self.criterion = criterion + self.max_features = float(max_features) + + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + + self.min_samples_split = int(min_samples_split) + self.min_samples_leaf = int(min_samples_leaf) + + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + + self.random_state = random_state + self.estimator = None + + def fit(self, X, y): + self.estimator = DecisionTreeClassifier( + criterion=self.criterion, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state) + self.estimator.fit(X, y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'DT', + 'name': 'Decision Tree Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + criterion = CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini") + max_features = Constant('max_features', 1.0) + max_depth = UnParametrizedHyperparameter("max_depth", "None") + min_samples_split = UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2) + min_samples_leaf = UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1) + max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") + cs = ConfigurationSpace() + cs.add_hyperparameter(criterion) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(max_leaf_nodes) + return cs diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 22b0103946..3911d1f4c5 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -39,5 +39,5 @@ class,added,comment ,False,Equivalent to SVC ,True, ,False,This classifier is in a test module -,FALSE,Rfs are considered better (and are most likely faster to train) -,FALSE,ExtraTreeForests are considered better +,True, +,FALSE,Extra-trees should only be used within ensemble methods. diff --git a/source/first_steps.rst b/source/first_steps.rst index 5b349fe57b..cdd81ff527 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.59999999999999998 + 0.93999999999999995 diff --git a/tests/components/classification/test_decision_tree.py b/tests/components/classification/test_decision_tree.py new file mode 100644 index 0000000000..a6daff0cea --- /dev/null +++ b/tests/components/classification/test_decision_tree.py @@ -0,0 +1,23 @@ +import unittest + +from ParamSklearn.components.classification.decision_tree import DecisionTree +from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba + +import sklearn.metrics + + +class DecisionTreetComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(DecisionTree, + dataset='iris') + self.assertAlmostEqual(0.92, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + DecisionTree, dataset='iris') + self.assertAlmostEqual(2.7631021115928571, + sklearn.metrics.log_loss(targets, predictions)) \ No newline at end of file diff --git a/tests/test_classification.py b/tests/test_classification.py index e49cb55a7f..73e1cedb94 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -4,6 +4,7 @@ import mock import numpy as np +from scipy.linalg import LinAlgError import sklearn.datasets import sklearn.decomposition import sklearn.ensemble @@ -86,6 +87,19 @@ def test_configurations(self): else: print config raise e + except LinAlgError as e: + if "not positive definite, even with jitter" in e.message: + continue + else: + print config + raise e + except AttributeError as e: + # Some error in QDA + if "log" == e.message: + continue + else: + print config + raise e def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( @@ -118,7 +132,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(96, len(hyperparameters)) + self.assertEqual(102, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index a07984a5cf..219f116bed 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(95, len(hyperparameters)) + self.assertEqual(101, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From f165ddf15914bd9b1c3720e99b3171fb73d331a3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 17 Apr 2015 09:46:18 +0200 Subject: [PATCH 228/352] kNN remove l5 distance --- .../components/classification/k_nearest_neighbors.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index b468639ad5..7c1bb929e5 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -76,18 +76,10 @@ def get_hyperparameter_search_space(dataset_properties=None): weights = CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform") algorithm = Constant(name='algorithm', value="auto") - if dataset_properties is not None and dataset_properties.get('sparse'): - p_choices = [1, 2] - else: - p_choices = [1, 2, 5] p = CategoricalHyperparameter( - name="p", choices=p_choices, default=2) + name="p", choices=[1, 2], default=2) leaf_size = Constant(name="leaf_size", value=30) - # Unparametrized - # TODO: If we further parametrize 'metric' we need more metric params - #metric = UnParametrizedHyperparameter(name="metric", value="minkowski") - cs = ConfigurationSpace() cs.add_hyperparameter(n_neighbors) cs.add_hyperparameter(weights) From da4646780f706ed35f0cdea1740402213169464c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 22 Apr 2015 13:33:50 +0200 Subject: [PATCH 229/352] Fix: lda and qda: return something in predict_proba --- ParamSklearn/components/classification/lda.py | 5 ++++- ParamSklearn/components/classification/qda.py | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index 842047f2b0..6093f3bc23 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -6,7 +6,9 @@ from ParamSklearn.components.classification_base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax + class LDA(ParamSklearnClassificationAlgorithm): @@ -31,6 +33,7 @@ def predict_proba(self, X): raise NotImplementedError() df = self.estimator.predict_proba(X) + return softmax(df) @staticmethod def get_properties(): diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index 5a4f7700c4..79af01cd7f 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -6,6 +6,7 @@ from ParamSklearn.components.classification_base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax class QDA(ParamSklearnClassificationAlgorithm): @@ -30,6 +31,7 @@ def predict_proba(self, X): raise NotImplementedError() df = self.estimator.predict_proba(X) + return softmax(df) @staticmethod def get_properties(): From 76f8336a41e6cda04e45656706af12c94f9eba5b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 22 Apr 2015 13:40:04 +0200 Subject: [PATCH 230/352] Add missing preprocessors --- ParamSklearn/base.py | 17 +-- ParamSklearn/classification.py | 37 ++++- .../preprocessing/dictionary_learning.py | 91 ++++++++++++ .../components/preprocessing/extra_trees.py | 138 ++++++++++++++++++ .../components/preprocessing/fast_ica.py | 80 ++++++++++ .../preprocessing/feature_agglomeration.py | 71 +++++++++ .../components/preprocessing/kernel_pca.py | 88 +++++++++++ .../components/preprocessing/liblinear.py | 125 ++++++++++++++++ .../preprocessing/nystroem_sampler.py | 86 +++++++++++ .../components/preprocessing/polynomial.py | 73 +++++++++ .../components/preprocessing/select_rates.py | 89 +++++++++++ ParamSklearn/util.py | 56 +++---- misc/classifiers.csv | 2 +- misc/transformers.csv | 114 +++++++-------- .../preprocessing/test_dictionary_learning.py | 40 +++++ .../preprocessing/test_extra_trees.py | 42 ++++++ .../components/preprocessing/test_fast_ica.py | 43 ++++++ .../test_feature_agglomeration.py | 39 +++++ .../preprocessing/test_kernel_pca.py | 43 ++++++ .../preprocessing/test_liblinear.py | 42 ++++++ .../preprocessing/test_nystroem_sampler.py | 76 ++++++++++ .../preprocessing/test_polynomial.py | 43 ++++++ .../preprocessing/test_select_rates.py | 86 +++++++++++ tests/test_classification.py | 20 ++- tests/test_textclassification.py | 4 +- 25 files changed, 1440 insertions(+), 105 deletions(-) create mode 100644 ParamSklearn/components/preprocessing/dictionary_learning.py create mode 100644 ParamSklearn/components/preprocessing/extra_trees.py create mode 100644 ParamSklearn/components/preprocessing/fast_ica.py create mode 100644 ParamSklearn/components/preprocessing/feature_agglomeration.py create mode 100644 ParamSklearn/components/preprocessing/kernel_pca.py create mode 100644 ParamSklearn/components/preprocessing/liblinear.py create mode 100644 ParamSklearn/components/preprocessing/nystroem_sampler.py create mode 100644 ParamSklearn/components/preprocessing/polynomial.py create mode 100644 ParamSklearn/components/preprocessing/select_rates.py create mode 100644 tests/components/preprocessing/test_dictionary_learning.py create mode 100644 tests/components/preprocessing/test_extra_trees.py create mode 100644 tests/components/preprocessing/test_fast_ica.py create mode 100644 tests/components/preprocessing/test_feature_agglomeration.py create mode 100644 tests/components/preprocessing/test_kernel_pca.py create mode 100644 tests/components/preprocessing/test_liblinear.py create mode 100644 tests/components/preprocessing/test_nystroem_sampler.py create mode 100644 tests/components/preprocessing/test_polynomial.py create mode 100644 tests/components/preprocessing/test_select_rates.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index aa5c10c46d..a7fa596f5a 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -15,7 +15,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ InactiveHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition +from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction, \ ForbiddenEqualsClause @@ -371,6 +371,8 @@ def get_hyperparameter_search_space(cls, estimator_name, for condition in available_preprocessors[name]. \ get_hyperparameter_search_space(dataset_properties).get_conditions(): + if not isinstance(condition, AbstractConjunction): + continue dlcs = condition.get_descendent_literal_conditions() for dlc in dlcs: if not dlc.child.name.startswith(name): @@ -383,22 +385,11 @@ def get_hyperparameter_search_space(cls, estimator_name, get_hyperparameter_search_space(dataset_properties).forbidden_clauses: dlcs = forbidden_clause.get_descendant_literal_clauses() for dlc in dlcs: - if not dlc.hyperparameter.startwith(name): + if not dlc.hyperparameter.name.startswith(name): dlc.hyperparameter.name = "%s:%s" % (name, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - # Now try to add things for which we know that they don't work - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "select_percentile_classification:score_func"), "chi2"), - ForbiddenEqualsClause(cs.get_hyperparameter( - "rescaling:strategy"), "standard") - )) - except: - pass - return cs @staticmethod diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 5ebd5f3e69..467f72e2de 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -297,8 +297,9 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Combinations of non-linear models with feature learning: classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest", - "gaussian_nb"] - feature_learning = ["kitchen_sinks", "sparse_filtering"] + "gaussian_nb", "gaussian_process", "decision_tree"] + feature_learning = ["kitchen_sinks", "sparse_filtering", + "nystroem_sampler", "dictionary_learning"] for c, f in product(classifiers_, feature_learning): if c not in classifiers_list: @@ -316,7 +317,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # We have seen empirically that tree-based models together with PCA # don't work better than tree-based models without preprocessing - classifiers_ = ["random_forest", "extra_trees", "gradient_boosting"] + classifiers_ = ["random_forest", "extra_trees", "gradient_boosting", + "decision_tree"] for c in classifiers_: if c not in classifiers_list: continue @@ -345,7 +347,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # it with standardization, features learning, pca classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", - "pca", "truncatedSVD"] + "pca", "truncatedSVD", "fast_ica", + "kernel_pca"] for c in classifiers_: if c not in classifiers_list: continue @@ -372,6 +375,32 @@ def get_hyperparameter_search_space(cls, include_estimators=None, except KeyError: pass + # Now try to add things for which we know that they don't work + forbidden_hyperparameter_combinations = \ + [("select_percentile_classification:score_func", "chi2", + "rescaling:strategy", "standard"), + ("select_percentile_classification:score_func", "chi2", + "rescaling:strategy", "none"), + ("select_rates:score_func", "chi2", + "rescaling:strategy", "standard"), + ("select_rates:score_func", "chi2", + "rescaling:strategy", "none"), + ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + "standard"), + ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + "none")] + for hp_name_1, hp_value_1, hp_name_2, hp_value_2 in \ + forbidden_hyperparameter_combinations: + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + hp_name_1), hp_value_1), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + hp_name_2), hp_value_2) + )) + except: + pass + return configuration_space @staticmethod diff --git a/ParamSklearn/components/preprocessing/dictionary_learning.py b/ParamSklearn/components/preprocessing/dictionary_learning.py new file mode 100644 index 0000000000..6fa0fb64ba --- /dev/null +++ b/ParamSklearn/components/preprocessing/dictionary_learning.py @@ -0,0 +1,91 @@ +import sklearn.decomposition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter, UniformFloatHyperparameter, Constant + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class DictionaryLearning(ParamSklearnPreprocessingAlgorithm): + def __init__(self, n_components, alpha, max_iter, tol, fit_algorithm, + transform_algorithm, transform_alpha, split_sign, + random_state=None): + self.n_components = int(n_components) + self.alpha = float(alpha) + self.max_iter = int(max_iter) + self.tol = float(tol) + self.fit_algorithm = fit_algorithm + self.transform_algorithm = transform_algorithm + self.transform_alpha = bool(transform_alpha) + self.split_sign = bool(split_sign) + self.random_state = random_state + + def fit(self, X, Y=None): + self.preprocessor = sklearn.decomposition.DictionaryLearning( + n_components=self.n_components, alpha=self.alpha, + max_iter=self.max_iter, tol=self.tol, + fit_algorithm=self.fit_algorithm, + transform_algorithm=self.transform_algorithm, + transform_alpha=self.transform_alpha, + split_sign=self.split_sign, random_state=self.random_state + ) + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'Dictionary Learning', + 'name': 'Dictionary Learning', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformIntegerHyperparameter( + "n_components", 50, 2000, default=100) + alpha = UniformFloatHyperparameter( + "alpha", 1e-5, 10, 1, log=True) + max_iter = UniformIntegerHyperparameter( + "max_iter", 50, 500, default=100) + tol = UniformFloatHyperparameter('tol', 1e-9, 1e-3, 1e-8, log=True) + # CD causes problems here + fit_algorithm = Constant('fit_algorithm', 'lars') + transform_algorithm = CategoricalHyperparameter('transform_algorithm', + ['lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'], 'omp') + transform_alpha = UniformFloatHyperparameter('transform_alpha', + 0.1, 10., 1., log=True) + split_sign = CategoricalHyperparameter('split_sign', ['False', + 'True'], 'False') + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(max_iter) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(fit_algorithm) + cs.add_hyperparameter(transform_algorithm) + cs.add_hyperparameter(transform_alpha) + cs.add_hyperparameter(split_sign) + return cs + + diff --git a/ParamSklearn/components/preprocessing/extra_trees.py b/ParamSklearn/components/preprocessing/extra_trees.py new file mode 100644 index 0000000000..f0787dca7f --- /dev/null +++ b/ParamSklearn/components/preprocessing/extra_trees.py @@ -0,0 +1,138 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS + +# get our own forests to replace the sklearn ones +from ParamSklearn.implementations import forest + + +class ExtraTreesPreprocessor(ParamSklearnPreprocessingAlgorithm): + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, + max_leaf_nodes_or_max_depth="max_depth", + bootstrap=False, max_leaf_nodes=None, max_depth="None", + oob_score=False, n_jobs=1, random_state=None, verbose=0): + + self.n_estimators = int(n_estimators) + self.estimator_increment = 10 + if criterion not in ("gini", "entropy"): + raise ValueError("'criterion' is not in ('gini', 'entropy'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + # if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None + else: + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.preprocessor = None + + def fit(self, X, Y): + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.preprocessor = forest.ExtraTreesClassifier( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, + random_state=self.random_state, + warm_start=True + ) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.preprocessor.estimators_) < self.n_estimators: + tmp = self.preprocessor # TODO copy ? + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y) + self.preprocessor = tmp + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'ET', + 'name': 'Extra Trees Classifier Preprocessing', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + bootstrap = CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False") + n_estimators = Constant("n_estimators", 100) + criterion = CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini") + max_features = UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1) + min_samples_split = UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2) + min_samples_leaf = UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1) + + max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") + + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(criterion) + cs.add_hyperparameter(max_features) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(bootstrap) + return cs diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py new file mode 100644 index 0000000000..233c524204 --- /dev/null +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -0,0 +1,80 @@ +import warnings + +import sklearn.decomposition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter, UniformFloatHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ + ForbiddenAndConjunction, ForbiddenEqualsClause + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + +import numpy as np + + +class FastICA(ParamSklearnPreprocessingAlgorithm): + def __init__(self, n_components, algorithm, whiten, fun, + random_state=None): + self.n_components = int(n_components) + self.algorithm = algorithm + self.whiten = bool(whiten) + self.fun = fun + self.random_state = random_state + + def fit(self, X, Y=None): + self.preprocessor = sklearn.decomposition.FastICA( + n_components=self.n_components, algorithm=self.algorithm, + fun=self.fun, whiten=self.whiten, random_state=self.random_state + ) + # Make the RuntimeWarning an Exception! + with warnings.catch_warnings(): + warnings.filterwarnings("error") + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'FastICA', + 'name': 'Fast Independent Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformIntegerHyperparameter( + "n_components", 50, 2000, default=100) + algorithm = CategoricalHyperparameter('algorithm', + ['parallel', 'deflation'], 'parallel') + whiten = CategoricalHyperparameter('whiten', + ['False', 'True'], 'False') + fun = CategoricalHyperparameter('fun', ['logcosh', 'exp', 'cube'], + 'logcosh') + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(algorithm) + cs.add_hyperparameter(whiten) + cs.add_hyperparameter(fun) + return cs + + diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/preprocessing/feature_agglomeration.py new file mode 100644 index 0000000000..cb769a844c --- /dev/null +++ b/ParamSklearn/components/preprocessing/feature_agglomeration.py @@ -0,0 +1,71 @@ +import sklearn.cluster + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ + ForbiddenAndConjunction, ForbiddenEqualsClause + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class FeatureAgglomeration(ParamSklearnPreprocessingAlgorithm): + def __init__(self, n_clusters, affinity, linkage, random_state=None): + self.n_clusters = int(n_clusters) + self.affinity = affinity + self.linkage = linkage + self.random_state = random_state + + def fit(self, X, Y=None): + n_clusters = min(self.n_clusters, X.shape[1]) + + self.preprocessor = sklearn.cluster.FeatureAgglomeration( + n_clusters=n_clusters, affinity=self.affinity, + linkage=self.linkage) + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'Feature Agglomeration', + 'name': 'Feature Agglomeration', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_clusters = UniformIntegerHyperparameter("n_clusters", 2, 400, 25) + affinity = CategoricalHyperparameter("affinity", + ["euclidean", "manhattan", "cosine"], "euclidean") + linkage = CategoricalHyperparameter("linkage", + ["ward", "complete", "average"], "ward") + cs = ConfigurationSpace() + cs.add_hyperparameter(n_clusters) + cs.add_hyperparameter(affinity) + cs.add_hyperparameter(linkage) + affinity_and_linkage = ForbiddenAndConjunction( + ForbiddenInClause(affinity, ["manhattan", "cosine"]), + ForbiddenEqualsClause(linkage, "ward")) + cs.add_forbidden_clause(affinity_and_linkage) + return cs + diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py new file mode 100644 index 0000000000..00a0567295 --- /dev/null +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -0,0 +1,88 @@ +import warnings + +import sklearn.decomposition + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter, UniformFloatHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, InCondition + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class KernelPCA(ParamSklearnPreprocessingAlgorithm): + def __init__(self, n_components, kernel, degree=3, gamma=0.25, coef0=0.0, + random_state=None): + self.n_components = int(n_components) + self.kernel = kernel + self.degree = int(degree) + self.gamma = float(gamma) + self.coef0 = float(coef0) + self.random_state = random_state + + def fit(self, X, Y=None): + self.preprocessor = sklearn.decomposition.KernelPCA( + n_components=self.n_components, kernel=self.kernel, + degree=self.degree, gamma=self.gamma, coef0=self.coef0) + # Make the RuntimeWarning an Exception! + with warnings.catch_warnings(): + warnings.filterwarnings("error") + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + with warnings.catch_warnings(): + warnings.filterwarnings("error") + X_new = self.preprocessor.transform(X) + return X_new + + @staticmethod + def get_properties(): + return {'shortname': 'KernelPCA', + 'name': 'Kernel Principal Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformIntegerHyperparameter( + "n_components", 50, 2000, default=100) + kernel = CategoricalHyperparameter('kernel', + ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') + degree = UniformIntegerHyperparameter('degree', 2, 5, 3) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True, default=0.1) + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) + gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"]) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coef0_condition) + cs.add_condition(gamma_condition) + return cs + + diff --git a/ParamSklearn/components/preprocessing/liblinear.py b/ParamSklearn/components/preprocessing/liblinear.py new file mode 100644 index 0000000000..d6f03b131a --- /dev/null +++ b/ParamSklearn/components/preprocessing/liblinear.py @@ -0,0 +1,125 @@ +import sklearn.svm + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant, UnParametrizedHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.util import softmax +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + + +class LibLinear_Preprocessor(ParamSklearnPreprocessingAlgorithm): + def __init__(self, penalty, loss, dual, tol, C, multi_class, + fit_intercept, intercept_scaling, class_weight, + random_state=None): + self.penalty = penalty + self.loss = loss + self.dual = dual + self.tol = tol + self.C = C + self.multi_class = multi_class + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling + self.class_weight = class_weight + self.random_state = random_state + self.preprocessor = None + + def fit(self, X, Y): + self.C = float(self.C) + self.tol = float(self.tol) + + self.dual = bool(self.dual) + self.fit_intercept = bool(self.fit_intercept) + self.intercept_scaling = float(self.intercept_scaling) + + if self.class_weight == "None": + self.class_weight = None + + self.preprocessor = sklearn.svm.LinearSVC(penalty=self.penalty, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + random_state=self.random_state) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'Liblinear-Preprocessor', + 'name': 'Liblinear Support Vector Preprocessing', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': False, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], + default="l2") + loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") + dual = Constant("dual", "False") + # This is set ad-how + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, + default=1.0) + multi_class = UnParametrizedHyperparameter("multi_class", "ovr") + # These are set ad-hoc + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + intercept_scaling = UnParametrizedHyperparameter("intercept_scaling", 1) + # This does not allow for other resampling methods! + class_weight = CategoricalHyperparameter("class_weight", + ["None", "auto"], + default="None") + cs = ConfigurationSpace() + cs.add_hyperparameter(penalty) + cs.add_hyperparameter(loss) + cs.add_hyperparameter(dual) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(C) + cs.add_hyperparameter(multi_class) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(intercept_scaling) + cs.add_hyperparameter(class_weight) + penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(penalty, "l1"), + ForbiddenEqualsClause(loss, "l1") + ) + constant_penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(penalty, "l2"), + ForbiddenEqualsClause(loss, "l1") + ) + penalty_and_dual = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(penalty, "l1") + ) + cs.add_forbidden_clause(penalty_and_loss) + cs.add_forbidden_clause(constant_penalty_and_loss) + cs.add_forbidden_clause(penalty_and_dual) + return cs diff --git a/ParamSklearn/components/preprocessing/nystroem_sampler.py b/ParamSklearn/components/preprocessing/nystroem_sampler.py new file mode 100644 index 0000000000..5c70d7312a --- /dev/null +++ b/ParamSklearn/components/preprocessing/nystroem_sampler.py @@ -0,0 +1,86 @@ +import sklearn.kernel_approximation + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter +from HPOlibConfigSpace.conditions import InCondition, EqualsCondition, AndConjunction + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import SPARSE, DENSE, INPUT + + +class Nystroem(ParamSklearnPreprocessingAlgorithm): + def __init__(self, kernel, n_components, gamma=None, degree=3, + coef0=1, random_state=None): + self.kernel = kernel + self.n_components = int(n_components) + self.gamma = float(gamma) + self.degree = int(degree) + self.coef0 = float(coef0) + self.random_state = random_state + + def fit(self, X, Y=None): + self.preprocessor = sklearn.kernel_approximation.Nystroem( + kernel=self.kernel, n_components=self.n_components, + gamma=self.gamma, degree=self.degree, coef0=self.coef0, + random_state=self.random_state) + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'Nystroem', + 'name': 'Nystroem kernel approximation', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + kernel = CategoricalHyperparameter('kernel', + ['chi2', 'poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') + degree = UniformIntegerHyperparameter('degree', 2, 5, 3) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True, default=0.1) + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + n_components = UniformIntegerHyperparameter( + "n_components", 50, 10000, default=100, log=True) + + cs = ConfigurationSpace() + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(n_components) + + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) + gamma_condition = InCondition(gamma, kernel, ["poly", "rbf", "chi2", + "sigmoid"]) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coef0_condition) + cs.add_condition(gamma_condition) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py new file mode 100644 index 0000000000..5559ebbc7c --- /dev/null +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -0,0 +1,73 @@ +import sklearn.preprocessing + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.util import softmax +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + + +class PolynomialFeatures(ParamSklearnPreprocessingAlgorithm): + def __init__(self, degree, interaction_only, include_bias, random_state=None): + self.degree = int(degree) + self.interaction_only = bool(interaction_only) + self.include_bias = bool(include_bias) + self.random_state = random_state + self.preprocessor = None + + def fit(self, X, Y): + self.preprocessor = sklearn.preprocessing.PolynomialFeatures( + degree=self.degree, interaction_only=self.interaction_only, + include_bias=self.include_bias) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(): + return {'shortname': 'PolynomialFeatures', + 'name': 'PolynomialFeatures', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': False, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + 'input': (DENSE,), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # More than degree 3 is too expensive! + degree = UniformIntegerHyperparameter("degree", 2, 3, 2) + interaction_only = CategoricalHyperparameter("interaction_only", + ["False", "True"], "False") + include_bias = CategoricalHyperparameter("include_bias", + ["True", "False"], "True") + + cs = ConfigurationSpace() + cs.add_hyperparameter(degree) + cs.add_hyperparameter(interaction_only) + cs.add_hyperparameter(include_bias) + + return cs diff --git a/ParamSklearn/components/preprocessing/select_rates.py b/ParamSklearn/components/preprocessing/select_rates.py new file mode 100644 index 0000000000..284b5c222b --- /dev/null +++ b/ParamSklearn/components/preprocessing/select_rates.py @@ -0,0 +1,89 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant + +import sklearn.feature_selection + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT + + +class SelectRates(ParamSklearnPreprocessingAlgorithm): + def __init__(self, alpha, mode='fpr', + score_func="chi2", random_state=None): + self.random_state = random_state # We don't use this + self.alpha = float(alpha) + + if score_func == "chi2": + self.score_func = sklearn.feature_selection.chi2 + elif score_func == "f_classif": + self.score_func = sklearn.feature_selection.f_classif + else: + raise ValueError("score_func must be in ('chi2, 'f_classif'), " + "but is: %s" % score_func) + + self.mode = mode + + def fit(self, X, y): + self.preprocessor = sklearn.feature_selection.GenericUnivariateSelect( + score_func=self.score_func, param=self.alpha, mode=self.mode) + + self.preprocessor.fit(X, y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + Xt = self.preprocessor.transform(X) + if Xt.shape[1] == 0: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + return Xt + + @staticmethod + def get_properties(): + return {'shortname': 'SR', + 'name': 'Univariate Feature Selection based on rates', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + alpha = UniformFloatHyperparameter( + name="alpha", lower=0.01, upper=0.5, default=0.1) + + score_func = CategoricalHyperparameter( + name="score_func", choices=["chi2", "f_classif"], default="chi2") + if dataset_properties is not None: + # Chi2 can handle sparse data, so we respect this + if 'sparse' in dataset_properties and dataset_properties['sparse']: + score_func = Constant( + name="score_func", value="chi2") + + mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr') + + cs = ConfigurationSpace() + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(score_func) + cs.add_hyperparameter(mode) + + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 9ad20c5e1b..b2b2562a2d 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -123,7 +123,8 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): class PreprocessingTestCase(unittest.TestCase): - def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False): + def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, + test_sparse=True): # Dense # np.float32 X_train, Y_train, X_test, Y_test = get_dataset("iris", add_NaNs=add_NaNs) @@ -150,32 +151,33 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False): Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) - # Sparse - # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, - add_NaNs=add_NaNs) - self.assertEqual(X_train.dtype, np.float32) - configuration_space = Preprocessor.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) - preprocessor.fit(X_train, Y_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float32) - - # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, - add_NaNs=add_NaNs) - X_train = X_train.astype(np.float64) - configuration_space = Preprocessor.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) - preprocessor.fit(X_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float64) + if test_sparse is True: + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + add_NaNs=add_NaNs) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + add_NaNs=add_NaNs) + X_train = X_train.astype(np.float64) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) def _test_regressor(Regressor, dataset='diabetes'): diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 3911d1f4c5..75b08491c3 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -14,7 +14,7 @@ class,added,comment ,True,I don't know how similar to SGD this one is ,False,n fact, Perceptron() is equivalent to SGDClassifier(...) ,True, -,, +,False,Contains CV ,False,This class has abstract methods ,True, ,False,This classifier is in a test module diff --git a/misc/transformers.csv b/misc/transformers.csv index 1ce1d9a86f..8bd506b5bf 100644 --- a/misc/transformers.csv +++ b/misc/transformers.csv @@ -1,91 +1,91 @@ class,added,comment ,FALSE,BaseClass ,FALSE,Mixin class for feature agglomeration. -,, -,, -,, -,, -,, -,,Base class; https://www.stat.washington.edu/research/reports/2000/tr371.pdf -,, -,, -,, -,, -,, -,,What is the difference to Tobis implementation? +,TRUE,try out +,FALSE,deprecated +,FALSE,We expect this to not work better than PCA +,False,BaseClass +,FALSE,works only if there are known correlated data points +,FALSE,Base class; https://www.stat.washington.edu/research/reports/2000/tr371.pdf +,FALSE,Regression only +,FALSE,Regression only +,FALSE,Regression only +,TRUE,try out +,FALSE, +,FALSE,What is the difference to Tobis implementation? ,FALSE,Mixin class for sparse coding -,, -,, -,, -,, -,, +,FALSE, +,TRUE,try out +,TRUE,try out +,FALSE,Special case of sparse coding +,FALSE,see above ,TRUE, -,, -,, -,, -,, +,FALSE,same output as above +,FALSE,? +,FALSE,? +,FALSE,should not use according to the scikit-learn docs ,True, ,FALSE,Base class -,FALSE,Prefer Forests -,FALSE,Prefer Forests +,TRUE,try out +,FALSE,Only classification so far ,FALSE,Base class ,FALSE,Base class -,, -,, +,FALSE,use ExtraTreesClassifier +,FALSE,Only classification so far ,True, ,FALSE,Base class -,, -,, +,FALSE,use ExtraTreesClassifier +,FALSE,use ExtraTreesClassifier ,FALSE,Similar to 1HotEncoding ,FALSE,Useful when working with strings -,, +,FALSE,no text classification atm ,FALSE,subclass of TransformerMixin ,FALSE,subclass of TransformerMixin ,FALSE,"Metaclass, can blow up the configuration space" ,FALSE,"Metaclass, with cross validation" -,, +,FALSE,lives in the test module ,FALSE,Base class -,, -,, -,, -,, -,, +,FALSE,Base class +,TRUE,try out +,TRUE,try out +,TRUE,try out +,FALSE,as as below but different formulation ,True, -,, -,, -,, -,, +,FALSE,Same as above but different formulation +,FALSE,output transformation +,FALSE,very special case +,TRUE,try out ,True, -,, -,, -,, -,, +,FALSE,very specia case +,FALSE,Special case of GEM +,FALSE,same as LibLinear +,FALSE,same as SGD ,FALSE,Base class -,, -,, -,, -,, -,, -,, -,, -,, -,, +,FALSE,not right now +,FALSE,not right now +,FALSE,not right now +,FALSE,not right now +,FALSE,test class +,FALSE,test class +,FALSE,only look at if clustering helps +,FALSE,only look at if clustering helps +,FALSE,handles only binary input ,FALSE,"Right now, we do not have different feature sources." ,FALSE,"Right now, we have no need to binarize data" ,FALSE,"Right now, we have no need to center a kernel" ,TRUE, -,, +,TRUE, ,TRUE, -,, +,TRUE, ,TRUE, ,TRUE, ,FALSE,"Right now, we have 1HotEncoding" ,FALSE,This should be done before passing data to scikit-learn and thus not configured. ,FALSE,… -,, -,, -,, -,, +,FALSE,Base class +,FALSE,use kitchen sinks instead +,FALSE,use kitchen sinks +,TRUE, ,FALSE,Is in a test package ,FALSE,Base class ,FALSE,Use forests diff --git a/tests/components/preprocessing/test_dictionary_learning.py b/tests/components/preprocessing/test_dictionary_learning.py new file mode 100644 index 0000000000..e64fb8019d --- /dev/null +++ b/tests/components/preprocessing/test_dictionary_learning.py @@ -0,0 +1,40 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.dictionary_learning import \ + DictionaryLearning +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class DictionaryLearningComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(DictionaryLearning) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = DictionaryLearning.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = DictionaryLearning(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp in default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.81, places=2) + + @unittest.skip("Always returns float64") + def test_preprocessing_dtype(self): + super(DictionaryLearningComponentTest, + self)._test_preprocessing_dtype(DictionaryLearning) diff --git a/tests/components/preprocessing/test_extra_trees.py b/tests/components/preprocessing/test_extra_trees.py new file mode 100644 index 0000000000..3826f45b0e --- /dev/null +++ b/tests/components/preprocessing/test_extra_trees.py @@ -0,0 +1,42 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.extra_trees import \ + ExtraTreesPreprocessor +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class ExtreTreesComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(ExtraTreesPreprocessor) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = ExtraTreesPreprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = ExtraTreesPreprocessor(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.87310261080752882, places=2) + + def test_preprocessing_dtype(self): + super(ExtreTreesComponentTest, + self)._test_preprocessing_dtype(ExtraTreesPreprocessor, + test_sparse=False) diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/preprocessing/test_fast_ica.py new file mode 100644 index 0000000000..677f2cf8a8 --- /dev/null +++ b/tests/components/preprocessing/test_fast_ica.py @@ -0,0 +1,43 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.fast_ica import \ + FastICA +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class FastICAComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(FastICA) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(5): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = FastICA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = FastICA(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.83545840922890102) + + @unittest.skip("Always returns float64") + def test_preprocessing_dtype(self): + super(FastICAComponentTest, + self)._test_preprocessing_dtype(FastICA) + diff --git a/tests/components/preprocessing/test_feature_agglomeration.py b/tests/components/preprocessing/test_feature_agglomeration.py new file mode 100644 index 0000000000..2e6a9b1ca0 --- /dev/null +++ b/tests/components/preprocessing/test_feature_agglomeration.py @@ -0,0 +1,39 @@ +import unittest + +from sklearn.ensemble import RandomForestClassifier +from ParamSklearn.components.preprocessing.feature_agglomeration import FeatureAgglomeration +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class FeatureAgglomerationComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(FeatureAgglomeration) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(3): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = FeatureAgglomeration.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = FeatureAgglomeration(random_state=1, + **{hp.hyperparameter.name: hp.value for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RandomForestClassifier(random_state=1) + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.8026715) + + def test_preprocessing_dtype(self): + super(FeatureAgglomerationComponentTest, + self)._test_preprocessing_dtype(FeatureAgglomeration, + test_sparse=False) diff --git a/tests/components/preprocessing/test_kernel_pca.py b/tests/components/preprocessing/test_kernel_pca.py new file mode 100644 index 0000000000..1e8430e8de --- /dev/null +++ b/tests/components/preprocessing/test_kernel_pca.py @@ -0,0 +1,43 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.kernel_pca import \ + KernelPCA +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class KernelPCAComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(KernelPCA) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(5): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = KernelPCA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = KernelPCA(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.096539162112932606) + + @unittest.skip("Always returns float64") + def test_preprocessing_dtype(self): + super(KernelPCAComponentTest, + self)._test_preprocessing_dtype(KernelPCA) + diff --git a/tests/components/preprocessing/test_liblinear.py b/tests/components/preprocessing/test_liblinear.py new file mode 100644 index 0000000000..e7532d8aab --- /dev/null +++ b/tests/components/preprocessing/test_liblinear.py @@ -0,0 +1,42 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.liblinear import \ + LibLinear_Preprocessor +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class LiblinearComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(LibLinear_Preprocessor) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = LibLinear_Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = LibLinear_Preprocessor(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.87310261080752882, places=2) + + def test_preprocessing_dtype(self): + super(LiblinearComponentTest, + self)._test_preprocessing_dtype(LibLinear_Preprocessor, + test_sparse=False) diff --git a/tests/components/preprocessing/test_nystroem_sampler.py b/tests/components/preprocessing/test_nystroem_sampler.py new file mode 100644 index 0000000000..9f06b8058c --- /dev/null +++ b/tests/components/preprocessing/test_nystroem_sampler.py @@ -0,0 +1,76 @@ +import unittest + +import numpy as np + +from ParamSklearn.components.preprocessing.nystroem_sampler import \ + Nystroem +from ParamSklearn.util import _test_preprocessing, get_dataset + + +class NystroemComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(Nystroem) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + self.assertFalse((transformation == 0).all()) + + #@unittest.skip("Right now, the RBFSampler returns a float64 array!") + def _test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_polynomial.py b/tests/components/preprocessing/test_polynomial.py new file mode 100644 index 0000000000..c82e2a574b --- /dev/null +++ b/tests/components/preprocessing/test_polynomial.py @@ -0,0 +1,43 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.polynomial import \ + PolynomialFeatures +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class PolynomialFeaturesComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(PolynomialFeatures) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = PolynomialFeatures.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = PolynomialFeatures(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.93564055859137829, places=2) + + @unittest.skip("Produces np.float64") + def test_preprocessing_dtype(self): + super(PolynomialFeaturesComponentTest, + self)._test_preprocessing_dtype(PolynomialFeatures, + test_sparse=False) diff --git a/tests/components/preprocessing/test_select_rates.py b/tests/components/preprocessing/test_select_rates.py new file mode 100644 index 0000000000..51684f4d7e --- /dev/null +++ b/tests/components/preprocessing/test_select_rates.py @@ -0,0 +1,86 @@ +import unittest + +import numpy as np +import scipy.sparse + +from ParamSklearn.components.preprocessing.select_rates import \ + SelectRates +from ParamSklearn.util import _test_preprocessing, get_dataset + + +class SelectRatesComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(SelectRates) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 3) + self.assertFalse((transformation == 0).all()) + + transformation, original = _test_preprocessing( + SelectRates, make_sparse=True) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1] / 2)) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{ + hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/test_classification.py b/tests/test_classification.py index 73e1cedb94..48927792d3 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -80,8 +80,12 @@ def test_configurations(self): cls = ParamSklearnClassifier(config, random_state=1) try: cls.fit(X_train, Y_train) + X_test_ = X_test.copy() predictions = cls.predict(X_test) - except ValueError as e: + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict_proba(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except KeyError as e: if "Floating-point under-/overflow occurred at epoch" in e.message: continue else: @@ -100,6 +104,20 @@ def test_configurations(self): else: print config raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.message: + continue + elif "divide by zero encountered in divide" in e.message: + continue + else: + print config + raise e + except UserWarning as e: + if "FastICA did not converge" in e.message: + continue + else: + print config + raise e def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 219f116bed..c4cd1293e9 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,10 +11,10 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(101, len(hyperparameters)) + self.assertEqual(130, len(hyperparameters)) # The three parameters which are always active are classifier, # preprocessor and imputation strategy - self.assertEqual(len(hyperparameters) - 3, len(conditions)) + self.assertEqual(len(hyperparameters) - 9, len(conditions)) self.assertNotIn("rescaling", cs.get_hyperparameter( "preprocessor").choices) self.assertRaisesRegexp(KeyError, "Hyperparameter " From ff3bc3cee6ee7eb950bc6d0ac02d62c609ba37d9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 22 Apr 2015 15:12:05 +0200 Subject: [PATCH 231/352] FIX: do not use nb without feature squashing --- ParamSklearn/classification.py | 18 ++++++++++-------- tests/test_classification.py | 2 +- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 467f72e2de..32ae104f89 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -349,17 +349,19 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", "pca", "truncatedSVD", "fast_ica", "kernel_pca"] + scaling_strategies = ['standard', 'none'] for c in classifiers_: if c not in classifiers_list: continue - try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "rescaling:strategy"), "standard"), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) - except KeyError: - pass + for scaling_strategy in scaling_strategies: + try: + configuration_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "rescaling:strategy"), scaling_strategy), + ForbiddenEqualsClause(configuration_space.get_hyperparameter( + "classifier"), c))) + except KeyError: + pass for c, f in product(classifiers_, preproc_with_negative_X): if c not in classifiers_list: diff --git a/tests/test_classification.py b/tests/test_classification.py index 48927792d3..1993546b0a 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -85,7 +85,7 @@ def test_configurations(self): self.assertIsInstance(predictions, np.ndarray) predicted_probabiliets = cls.predict_proba(X_test_) self.assertIsInstance(predicted_probabiliets, np.ndarray) - except KeyError as e: + except ValueError as e: if "Floating-point under-/overflow occurred at epoch" in e.message: continue else: From ba1cd329959a90b21cc1b10aa55e6e8d8098024f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Apr 2015 10:12:10 +0200 Subject: [PATCH 232/352] Forbid nystroem sampler with nb --- ParamSklearn/classification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 32ae104f89..f7851624b8 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -348,7 +348,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", "pca", "truncatedSVD", "fast_ica", - "kernel_pca"] + "kernel_pca", "nystroem_sampler"] scaling_strategies = ['standard', 'none'] for c in classifiers_: if c not in classifiers_list: From f6c976dd645a892b93243a7b47ac9fd0a4414609 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Apr 2015 14:04:49 +0200 Subject: [PATCH 233/352] Fix bug and tests --- ParamSklearn/base.py | 5 +++-- .../components/preprocessing/dictionary_learning.py | 8 +++++--- ParamSklearn/components/preprocessing/fast_ica.py | 2 +- ParamSklearn/components/preprocessing/kernel_pca.py | 4 ++-- source/first_steps.rst | 4 ++-- tests/components/preprocessing/test_fast_ica.py | 4 ++-- tests/components/preprocessing/test_kernel_pca.py | 3 ++- tests/test_classification.py | 7 +++++-- tests/test_regression.py | 6 +++--- tests/test_textclassification.py | 6 +++--- 10 files changed, 28 insertions(+), 21 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index a7fa596f5a..0b728faadc 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -372,8 +372,9 @@ def get_hyperparameter_search_space(cls, estimator_name, for condition in available_preprocessors[name]. \ get_hyperparameter_search_space(dataset_properties).get_conditions(): if not isinstance(condition, AbstractConjunction): - continue - dlcs = condition.get_descendent_literal_conditions() + dlcs = [condition] + else: + dlcs = condition.get_descendent_literal_conditions() for dlc in dlcs: if not dlc.child.name.startswith(name): dlc.child.name = "%s:%s" % (name, dlc.child.name) diff --git a/ParamSklearn/components/preprocessing/dictionary_learning.py b/ParamSklearn/components/preprocessing/dictionary_learning.py index 6fa0fb64ba..3b8e75dd0a 100644 --- a/ParamSklearn/components/preprocessing/dictionary_learning.py +++ b/ParamSklearn/components/preprocessing/dictionary_learning.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -32,6 +33,7 @@ def fit(self, X, Y=None): transform_alpha=self.transform_alpha, split_sign=self.split_sign, random_state=self.random_state ) + X = X.astype(np.float64) self.preprocessor.fit(X) return self @@ -56,7 +58,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (DENSE, ), 'output': INPUT, 'preferred_dtype': None} @@ -69,8 +71,8 @@ def get_hyperparameter_search_space(dataset_properties=None): max_iter = UniformIntegerHyperparameter( "max_iter", 50, 500, default=100) tol = UniformFloatHyperparameter('tol', 1e-9, 1e-3, 1e-8, log=True) - # CD causes problems here - fit_algorithm = Constant('fit_algorithm', 'lars') + fit_algorithm = CategoricalHyperparameter('fit_algorithm', + ['lars', 'cd'], 'lars') transform_algorithm = CategoricalHyperparameter('transform_algorithm', ['lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'], 'omp') transform_alpha = UniformFloatHyperparameter('transform_alpha', diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py index 233c524204..072df494d5 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -56,7 +56,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (DENSE, ), 'output': INPUT, 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index 00a0567295..16a47adc28 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -56,7 +56,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (DENSE, ), 'output': INPUT, 'preferred_dtype': None} @@ -68,7 +68,7 @@ def get_hyperparameter_search_space(dataset_properties=None): ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') degree = UniformIntegerHyperparameter('degree', 2, 5, 3) gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, - log=True, default=0.1) + log=True, default=1.0) coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) cs = ConfigurationSpace() cs.add_hyperparameter(n_components) diff --git a/source/first_steps.rst b/source/first_steps.rst index cdd81ff527..c1c06c9597 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -18,10 +18,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 4) + >>> sampler = RandomSampler(configuration_space, 5) >>> configuration = sampler.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 + 0.95999999999999996 diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/preprocessing/test_fast_ica.py index 677f2cf8a8..f0d521073a 100644 --- a/tests/components/preprocessing/test_fast_ica.py +++ b/tests/components/preprocessing/test_fast_ica.py @@ -16,7 +16,7 @@ def test_default_configuration(self): def test_default_configuration_classify(self): for i in range(5): - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', make_sparse=False) configuration_space = FastICA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() @@ -34,7 +34,7 @@ def test_default_configuration_classify(self): predictor = classifier.fit(X_train_trans, Y_train) predictions = predictor.predict(X_test_trans) accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertAlmostEqual(accuracy, 0.83545840922890102) + self.assertAlmostEqual(accuracy, 0.90000000000000002) @unittest.skip("Always returns float64") def test_preprocessing_dtype(self): diff --git a/tests/components/preprocessing/test_kernel_pca.py b/tests/components/preprocessing/test_kernel_pca.py index 1e8430e8de..9b00a92e61 100644 --- a/tests/components/preprocessing/test_kernel_pca.py +++ b/tests/components/preprocessing/test_kernel_pca.py @@ -10,7 +10,8 @@ class KernelPCAComponentTest(PreprocessingTestCase): def test_default_configuration(self): - transformation, original = _test_preprocessing(KernelPCA) + transformation, original = _test_preprocessing(KernelPCA, + dataset='digits') self.assertEqual(transformation.shape[0], original.shape[0]) self.assertFalse((transformation == 0).all()) diff --git a/tests/test_classification.py b/tests/test_classification.py index 1993546b0a..2d1f62e4c9 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -86,7 +86,9 @@ def test_configurations(self): predicted_probabiliets = cls.predict_proba(X_test_) self.assertIsInstance(predicted_probabiliets, np.ndarray) except ValueError as e: - if "Floating-point under-/overflow occurred at epoch" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.message or \ + "removed all features" in e.message: continue else: print config @@ -147,10 +149,11 @@ def test_configurations_sparse(self): def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() + print cs self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(102, len(hyperparameters)) + self.assertEqual(130, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 4, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index f3e139c47c..9f5f85f584 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -79,7 +79,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(36, len(hyperparameters)) + self.assertEqual(61, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): @@ -99,8 +99,8 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): CategoricalHyperparameter('preprocessor', ['pca', ])) cs = ParamSklearnRegressor.get_hyperparameter_search_space( - exclude_preprocessors=['pca']) - self.assertNotIn('pca', str(cs)) + exclude_preprocessors=['no_preprocessing']) + self.assertNotIn('no_preprocessing', str(cs)) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index c4cd1293e9..fd9427537f 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,10 +11,10 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(130, len(hyperparameters)) - # The three parameters which are always active are classifier, + self.assertEqual(129, len(hyperparameters)) + # The four parameters which are always active are classifier, # preprocessor and imputation strategy - self.assertEqual(len(hyperparameters) - 9, len(conditions)) + self.assertEqual(len(hyperparameters) - 3, len(conditions)) self.assertNotIn("rescaling", cs.get_hyperparameter( "preprocessor").choices) self.assertRaisesRegexp(KeyError, "Hyperparameter " From a9424721477b640dcd61435340bbc3e4d7e8902e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Apr 2015 18:02:41 +0200 Subject: [PATCH 234/352] Improve stability with sparse datasets --- .../preprocessing/feature_agglomeration.py | 2 +- .../preprocessing/nystroem_sampler.py | 23 ++++++++++---- tests/test_classification.py | 31 +++++++++++++++++-- 3 files changed, 46 insertions(+), 10 deletions(-) diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/preprocessing/feature_agglomeration.py index cb769a844c..1bcd2177b4 100644 --- a/ParamSklearn/components/preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/preprocessing/feature_agglomeration.py @@ -48,7 +48,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (DENSE, ), 'output': INPUT, 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/nystroem_sampler.py b/ParamSklearn/components/preprocessing/nystroem_sampler.py index 5c70d7312a..cd30b7d595 100644 --- a/ParamSklearn/components/preprocessing/nystroem_sampler.py +++ b/ParamSklearn/components/preprocessing/nystroem_sampler.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.kernel_approximation from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -11,7 +12,7 @@ class Nystroem(ParamSklearnPreprocessingAlgorithm): - def __init__(self, kernel, n_components, gamma=None, degree=3, + def __init__(self, kernel, n_components, gamma=1.0, degree=3, coef0=1, random_state=None): self.kernel = kernel self.n_components = int(n_components) @@ -25,7 +26,7 @@ def fit(self, X, Y=None): kernel=self.kernel, n_components=self.n_components, gamma=self.gamma, degree=self.degree, coef0=self.coef0, random_state=self.random_state) - self.preprocessor.fit(X) + self.preprocessor.fit(X.astype(np.float64)) return self def transform(self, X): @@ -55,8 +56,15 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - kernel = CategoricalHyperparameter('kernel', - ['chi2', 'poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') + if dataset_properties is not None and dataset_properties.get("sparse"): + allow_chi2 = False + else: + allow_chi2 = True + + possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine'] + if allow_chi2: + possible_kernels.append("chi2") + kernel = CategoricalHyperparameter('kernel', possible_kernels, 'rbf') degree = UniformIntegerHyperparameter('degree', 2, 5, 3) gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, log=True, default=0.1) @@ -73,8 +81,11 @@ def get_hyperparameter_search_space(dataset_properties=None): degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) - gamma_condition = InCondition(gamma, kernel, ["poly", "rbf", "chi2", - "sigmoid"]) + + gamma_kernels = ["poly", "rbf", "sigmoid"] + if allow_chi2: + gamma_kernels.append("chi2") + gamma_condition = InCondition(gamma, kernel, gamma_kernels) cs.add_condition(degree_depends_on_poly) cs.add_condition(coef0_condition) cs.add_condition(gamma_condition) diff --git a/tests/test_classification.py b/tests/test_classification.py index 2d1f62e4c9..6514242851 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,5 +1,7 @@ __author__ = 'feurerm' +import sys +import traceback import unittest import mock @@ -124,8 +126,8 @@ def test_configurations(self): def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - sampler = RandomSampler(cs, 1) - for i in range(10): + sampler = RandomSampler(cs, 123456) + for i in range(1000): config = sampler.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) @@ -134,7 +136,16 @@ def test_configurations_sparse(self): cls.fit(X_train, Y_train) predictions = cls.predict(X_test) except ValueError as e: - if "Floating-point under-/overflow occurred at epoch" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.message or \ + "removed all features" in e.message: + continue + else: + print config + traceback.print_tb(sys.exc_info()[2]) + raise e + except LinAlgError as e: + if "not positive definite, even with jitter" in e.message: continue else: print config @@ -146,6 +157,20 @@ def test_configurations_sparse(self): else: print config raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.message: + continue + elif "divide by zero encountered in divide" in e.message: + continue + else: + print config + raise e + except UserWarning as e: + if "FastICA did not converge" in e.message: + continue + else: + print config + raise e def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() From 6de26d7ed1eba30360b5e4a7f72d2dbeab47a072 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 2 May 2015 13:21:17 +0200 Subject: [PATCH 235/352] Feature: weighting for imbalanced classes --- ParamSklearn/base.py | 4 +- ParamSklearn/classification.py | 16 ++- .../components/classification/adaboost.py | 5 +- .../classification/decision_tree.py | 4 +- .../components/classification/extra_trees.py | 4 +- .../{liblinear.py => liblinear_svc.py} | 0 .../classification/passive_aggresive.py | 1 - .../classification/random_forest.py | 4 +- .../components/classification/ridge.py | 7 +- ParamSklearn/components/classification/sgd.py | 7 +- .../components/preprocessing/balancing.py | 113 +++++++++++++++++ ...extra_trees_preproc_for_classification.py} | 8 +- .../components/preprocessing/imputation.py | 9 +- ...inear.py => liblinear_svc_preprocessor.py} | 5 +- .../preprocessing/random_trees_embedding.py | 2 +- .../components/preprocessing/select_rates.py | 11 +- ParamSklearn/implementations/Imputation.py | 11 +- misc/support_for_imbalanced_classes.txt | 22 ++++ source/first_steps.rst | 4 +- .../classification/test_liblinear.py | 2 +- .../preprocessing/test_balancing.py | 117 ++++++++++++++++++ .../preprocessing/test_extra_trees.py | 2 +- .../preprocessing/test_liblinear.py | 2 +- tests/test_classification.py | 40 +++--- ..._create_searchspace_util_classification.py | 2 +- tests/test_textclassification.py | 2 +- 26 files changed, 341 insertions(+), 63 deletions(-) rename ParamSklearn/components/classification/{liblinear.py => liblinear_svc.py} (100%) create mode 100644 ParamSklearn/components/preprocessing/balancing.py rename ParamSklearn/components/preprocessing/{extra_trees.py => extra_trees_preproc_for_classification.py} (96%) rename ParamSklearn/components/preprocessing/{liblinear.py => liblinear_svc_preprocessor.py} (97%) create mode 100644 misc/support_for_imbalanced_classes.txt create mode 100644 tests/components/preprocessing/test_balancing.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 0b728faadc..91b9651596 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -88,8 +88,8 @@ def fit(self, X, Y, fit_params=None, init_params=None): # seperate the init parameters for the single methods init_params_per_method = defaultdict(dict) - if init_params is not None: - for init_param, value in init_params: + if init_params is not None and len(init_params) != 0: + for init_param, value in init_params.items(): method, param = init_param.split(":") init_params_per_method[method][param] = value diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index f7851624b8..6f21555878 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -11,6 +11,7 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator from ParamSklearn.util import SPARSE +from ParamSklearn.components.preprocessing.balancing import Balancing import ParamSklearn.create_searchspace_util @@ -61,9 +62,19 @@ class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): """ def fit(self, X, Y, fit_params=None, init_params=None): + self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + + # Weighting samples has to be done here, not in the components + if self.configuration['balancing:strategy'].value == 'weighting': + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, self.configuration['classifier'].value, + self.configuration['preprocessor'].value, + init_params, fit_params) + super(ParamSklearnClassifier, self).fit(X, Y, fit_params=fit_params, init_params=init_params) - self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + return self def predict_proba(self, X, batch_size=None): @@ -415,4 +426,5 @@ def _get_estimator_components(): @staticmethod def _get_pipeline(): - return ["imputation", "rescaling", "__preprocessor__", "__estimator__"] \ No newline at end of file + return ["imputation", "rescaling", "balancing", "__preprocessor__", + "__estimator__"] \ No newline at end of file diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index b8381ff082..4974d2de75 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -25,7 +25,7 @@ def __init__(self, n_estimators, learning_rate, algorithm='SAMME.R', self.estimator = None - def fit(self, X, Y): + def fit(self, X, Y, sample_weight=None): base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth) self.estimator = sklearn.ensemble.AdaBoostClassifier( @@ -34,9 +34,8 @@ def fit(self, X, Y): learning_rate=self.learning_rate, algorithm=self.algorithm, random_state=self.random_state - ) - self.estimator.fit(X, Y) + self.estimator.fit(X, Y, sample_weight=sample_weight) return self def predict(self, X): diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index 1c6c9ff06f..d0f8a98357 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -35,7 +35,7 @@ def __init__(self, criterion, max_features, max_depth, self.random_state = random_state self.estimator = None - def fit(self, X, y): + def fit(self, X, y, sample_weight=None): self.estimator = DecisionTreeClassifier( criterion=self.criterion, max_depth=self.max_depth, @@ -43,7 +43,7 @@ def fit(self, X, y): min_samples_leaf=self.min_samples_leaf, max_leaf_nodes=self.max_leaf_nodes, random_state=self.random_state) - self.estimator.fit(X, y) + self.estimator.fit(X, y, sample_weight=sample_weight) return self def predict(self, X): diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index c4cf44a5af..1d923bc7d1 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -59,7 +59,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y): + def fit(self, X, Y, sample_weight=None): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features @@ -78,7 +78,7 @@ def fit(self, X, Y): while len(self.estimator.estimators_) < self.n_estimators: tmp = self.estimator # TODO copy ? tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) + tmp.fit(X, Y, sample_weight=sample_weight) self.estimator = tmp return self diff --git a/ParamSklearn/components/classification/liblinear.py b/ParamSklearn/components/classification/liblinear_svc.py similarity index 100% rename from ParamSklearn/components/classification/liblinear.py rename to ParamSklearn/components/classification/liblinear_svc.py diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index 070845bfd7..61d95dd617 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -4,7 +4,6 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, UnParametrizedHyperparameter, \ UniformIntegerHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition from ParamSklearn.components.classification_base import \ ParamSklearnClassificationAlgorithm diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 7c45259c94..e3f13e71a7 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -28,7 +28,7 @@ def __init__(self, n_estimators, criterion, max_features, self.n_jobs = n_jobs self.estimator = None - def fit(self, X, Y): + def fit(self, X, Y, sample_weight=None): self.n_estimators = int(self.n_estimators) if self.max_depth == "None": @@ -67,7 +67,7 @@ def fit(self, X, Y): while len(self.estimator.estimators_) < self.n_estimators: tmp = self.estimator # TODO I think we need to copy here! tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) + tmp.fit(X, Y, sample_weight=sample_weight) self.estimator = tmp return self diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index ac7d4966a2..6cfbc01be9 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -13,17 +13,20 @@ class Ridge(ParamSklearnClassificationAlgorithm): - def __init__(self, alpha, fit_intercept, tol, random_state=None): + def __init__(self, alpha, fit_intercept, tol, class_weight=None, + random_state=None): self.alpha = float(alpha) self.fit_intercept = bool(fit_intercept) self.tol = float(tol) + self.class_weight = class_weight self.random_state = random_state self.estimator = None def fit(self, X, Y): self.estimator = RidgeClassifier(alpha=self.alpha, fit_intercept=self.fit_intercept, - tol=self.tol) + tol=self.tol, + class_weight=self.class_weight) self.estimator.fit(X, Y) return self diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index d1aecf3547..38f673eaad 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -13,7 +13,7 @@ class SGD(ParamSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, - learning_rate, class_weight, l1_ratio=0.15, epsilon=0.1, + learning_rate, class_weight=None, l1_ratio=0.15, epsilon=0.1, eta0=0.01, power_t=0.5, random_state=None): self.loss = loss self.penalty = penalty @@ -111,10 +111,6 @@ def get_hyperparameter_search_space(dataset_properties=None): ["optimal", "invscaling", "constant"], default="optimal") eta0 = UniformFloatHyperparameter("eta0", 10**-7, 0.1, default=0.01) power_t = UniformFloatHyperparameter("power_t", 1e-5, 1, default=0.5) - # This does not allow for other resampling methods! - class_weight = CategoricalHyperparameter("class_weight", - ["None", "auto"], - default="None") cs = ConfigurationSpace() cs.add_hyperparameter(loss) cs.add_hyperparameter(penalty) @@ -126,7 +122,6 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(learning_rate) cs.add_hyperparameter(eta0) cs.add_hyperparameter(power_t) - cs.add_hyperparameter(class_weight) # TODO add passive/aggressive here, although not properly documented? elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py new file mode 100644 index 0000000000..bde52ea0c5 --- /dev/null +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -0,0 +1,113 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from ParamSklearn.components.preprocessor_base import \ + ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, SPARSE, INPUT + + +class Balancing(ParamSklearnPreprocessingAlgorithm): + def __init__(self, strategy, random_state=None): + self.strategy = strategy + + def fit(self, X, y=None): + raise NotImplementedError() + + def transform(self, X): + raise NotImplementedError() + + def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): + if init_params is None: + init_params = {} + + if fit_params is None: + fit_params = {} + + # Classifiers which require sample weights: + # We can have adaboost in here, because in the fit method, + # the sample weights are normalized: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 + clf_ = ['adaboost', 'decision_tree', 'extra_trees', 'random_forest', + 'gradient_boosting'] + pre_ = ['extra_trees_preproc_for_classification'] + if classifier in clf_ or preprocessor in pre_: + if len(Y.shape) > 1: + offsets = [2 ** i for i in range(Y.shape[1])] + Y_ = np.sum(Y * offsets, axis=1) + else: + Y_ = Y + + unique, counts = np.unique(Y_, return_counts=True) + cw = 1. / counts + cw = cw / np.mean(cw) + + sample_weights = np.ones(Y_.shape) + + for i, ue in enumerate(unique): + mask = Y_ == ue + sample_weights[mask] *= cw[i] + + if classifier in clf_: + fit_params['%s:sample_weight' % classifier] = sample_weights + if preprocessor in pre_: + fit_params['%s:sample_weight' % preprocessor] = sample_weights + + # Classifiers which can adjust sample weights themselves via the + # argument `class_weight` + clf_ = ['liblinear_svc', 'libsvm_svc', 'sgd'] + pre_ = ['liblinear_svc_preprocessor'] + if classifier in clf_: + init_params['%s:class_weight' % classifier] = 'auto' + if preprocessor in pre_: + init_params['%s:class_weight' % preprocessor] = 'auto' + + clf_ = ['ridge'] + if classifier in clf_: + class_weights = {} + + unique, counts = np.unique(Y, return_counts=True) + cw = 1. / counts + cw = cw / np.mean(cw) + + for i, ue in enumerate(unique): + class_weights[ue] = cw[i] + + if classifier in clf_: + init_params['%s:class_weight' % classifier] = class_weights + + return init_params, fit_params + + @staticmethod + def get_properties(): + return {'shortname': 'Balancing', + 'name': 'Balancing Imbalanced Class Distributions', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE), + 'output': INPUT, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # TODO add replace by zero! + strategy = CategoricalHyperparameter( + "strategy", ["none", "weighting"], default="none") + cs = ConfigurationSpace() + cs.add_hyperparameter(strategy) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/extra_trees.py b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py similarity index 96% rename from ParamSklearn/components/preprocessing/extra_trees.py rename to ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py index f0787dca7f..254eb2fb00 100644 --- a/ParamSklearn/components/preprocessing/extra_trees.py +++ b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py @@ -7,7 +7,7 @@ from ParamSklearn.components.preprocessor_base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, INPUT # get our own forests to replace the sklearn ones from ParamSklearn.implementations import forest @@ -60,7 +60,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.verbose = int(verbose) self.preprocessor = None - def fit(self, X, Y): + def fit(self, X, Y, sample_weight=None): num_features = X.shape[1] max_features = int( float(self.max_features) * (np.log(num_features) + 1)) @@ -80,7 +80,7 @@ def fit(self, X, Y): while len(self.preprocessor.estimators_) < self.n_estimators: tmp = self.preprocessor # TODO copy ? tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) + tmp.fit(X, Y, sample_weight=sample_weight) self.preprocessor = tmp return self @@ -106,7 +106,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': INPUT, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index df2005f021..81f3a8274c 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -1,4 +1,5 @@ -import ParamSklearn.implementations.Imputation +#import ParamSklearn.implementations.Imputation +import sklearn.preprocessing from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -13,9 +14,9 @@ def __init__(self, strategy, random_state=None): self.strategy = strategy def fit(self, X, y=None): - self.preprocessor = ParamSklearn.implementations.Imputation.Imputer( - strategy=self.strategy, copy=False, dtype=X.dtype) - self.preprocessor.fit(X) + self.preprocessor = sklearn.preprocessing.Imputer( + strategy=self.strategy, copy=False) #, dtype=X.dtype) + self.preprocessor = self.preprocessor.fit(X) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/liblinear.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py similarity index 97% rename from ParamSklearn/components/preprocessing/liblinear.py rename to ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index d6f03b131a..fc087fd619 100644 --- a/ParamSklearn/components/preprocessing/liblinear.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -8,8 +8,7 @@ from ParamSklearn.components.preprocessor_base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.util import softmax -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.util import SPARSE, DENSE, INPUT class LibLinear_Preprocessor(ParamSklearnPreprocessingAlgorithm): @@ -73,7 +72,7 @@ def get_properties(): # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': PREDICTIONS, + 'output': INPUT, # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py index 6c6a0d9c76..4640680ec9 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -39,7 +39,7 @@ def fit(self, X, Y=None): n_jobs=self.n_jobs, random_state=self.random_state ) - self.preprocessor.fit(X) + self.preprocessor.fit(X, Y) return self def transform(self, X): diff --git a/ParamSklearn/components/preprocessing/select_rates.py b/ParamSklearn/components/preprocessing/select_rates.py index 284b5c222b..582a8d038f 100644 --- a/ParamSklearn/components/preprocessing/select_rates.py +++ b/ParamSklearn/components/preprocessing/select_rates.py @@ -35,7 +35,16 @@ def fit(self, X, y): def transform(self, X): if self.preprocessor is None: raise NotImplementedError() - Xt = self.preprocessor.transform(X) + try: + Xt = self.preprocessor.transform(X) + except ValueError as e: + if "zero-size array to reduction operation maximum which has no " \ + "identity" in e.message: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + else: + raise e + if Xt.shape[1] == 0: raise ValueError( "%s removed all features." % self.__class__.__name__) diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 1115f2c2a5..300f96c284 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -115,8 +115,9 @@ class Imputer(BaseEstimator, TransformerMixin): - If `axis=0`, then impute along columns. - If `axis=1`, then impute along rows. - dtype : np.dtype - Determines the dtype of the transformed array. + dtype : np.dtype (default=np.float64) + Determines the dtype of the transformed array if it is dense. Has no + effect otherwise. verbose : integer, optional (default=0) Controls the verbosity of the imputer. @@ -183,14 +184,18 @@ def fit(self, X, y=None): # transform(X), the imputation data will be computed in transform() # when the imputation is done per sample (i.e., when axis=1). if self.axis == 0: - X = atleast2d_or_csc(X, dtype=self.dtype, force_all_finite=False) + if sparse.issparse(X): + X = atleast2d_or_csc(X, dtype=np.float64, + force_all_finite=False) self.statistics_ = self._sparse_fit(X, self.strategy, self.missing_values, self.axis) else: + X = atleast2d_or_csc(X, dtype=self.dtype, + force_all_finite=False) self.statistics_ = self._dense_fit(X, self.strategy, self.missing_values, diff --git a/misc/support_for_imbalanced_classes.txt b/misc/support_for_imbalanced_classes.txt new file mode 100644 index 0000000000..e69229c5a0 --- /dev/null +++ b/misc/support_for_imbalanced_classes.txt @@ -0,0 +1,22 @@ +AdaBoost: Sample weights. If None, the sample weights are initialized to 1 / n_samples. +Bernoulli_NB: Weights applied to individual samples (1. for unweighted). +DecisionTree: Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. +ExtraTrees: Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. +GaussianNB: - +GB: - +kNN: - +LDA: priors : array, optional, shape = [n_classes] ? +LibLinear: class_weight : {dict, ‘auto’}, optional +SVC: class_weight : {dict, ‘auto’}, optional; Per-sample weights. Rescale C per sample. Higher weights force the classifier to put more emphasis on these points. +MultinomialNB: - +PA: sample_weight : array-like, shape = [n_samples], optional +QDA: - +RF: sample_weight : array-like, shape = [n_samples] or None +RidgeClassifier:class_weight : dict, optional +SGD :class_weight : dict, {class_label + + + + +Preprocessors: + diff --git a/source/first_steps.rst b/source/first_steps.rst index c1c06c9597..2285ddb2be 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -18,10 +18,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 5) + >>> sampler = RandomSampler(configuration_space, 3) >>> configuration = sampler.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.95999999999999996 + 0.90000000000000002 diff --git a/tests/components/classification/test_liblinear.py b/tests/components/classification/test_liblinear.py index 7242f946ab..167397fd33 100644 --- a/tests/components/classification/test_liblinear.py +++ b/tests/components/classification/test_liblinear.py @@ -1,6 +1,6 @@ import unittest -from ParamSklearn.components.classification.liblinear import LibLinear_SVC +from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC from ParamSklearn.util import _test_classifier diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py new file mode 100644 index 0000000000..d916778b9e --- /dev/null +++ b/tests/components/preprocessing/test_balancing.py @@ -0,0 +1,117 @@ +__author__ = 'feurerm' + +import unittest + +import numpy as np +import sklearn.metrics + +from HPOlibConfigSpace.hyperparameters import InactiveHyperparameter + +from ParamSklearn.components.preprocessing.balancing import Balancing +from ParamSklearn.classification import ParamSklearnClassifier +from ParamSklearn.components.classification.adaboost import AdaboostClassifier +from ParamSklearn.components.classification.decision_tree import DecisionTree +from ParamSklearn.components.classification.extra_trees import ExtraTreesClassifier +from ParamSklearn.components.classification.gradient_boosting import GradientBoostingClassifier +from ParamSklearn.components.classification.random_forest import RandomForest +from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC +from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC +from ParamSklearn.components.classification.sgd import SGD +from ParamSklearn.components.classification.ridge import Ridge +from ParamSklearn.components.preprocessing\ + .extra_trees_preproc_for_classification import ExtraTreesPreprocessor +from ParamSklearn.components.preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor +from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding +from ParamSklearn.util import get_dataset + + +class BalancingComponentTest(unittest.TestCase): + def test_balancing_get_weights_treed_single_label(self): + Y = np.array([0] * 80 + [1] * 20) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'random_forest', None, None, None) + self.assertTrue(np.allclose(fit_params['random_forest:sample_weight'], + np.array([0.4] * 80 + [1.6] * 20))) + init_params, fit_params = balancing.get_weights( + Y, None, 'extra_trees_preproc_for_classification', None, None) + self.assertTrue(np.allclose(fit_params['extra_trees_preproc_for_classification:sample_weight'], + np.array([0.4] * 80 + [1.6] * 20))) + + def test_balancing_get_weights_treed_multilabel(self): + Y = np.array([[0, 0, 0]] * 100 + [[1, 0, 0]] * 100 + [[0, 1, 0]] * 100 + + [[1, 1, 0]] * 100 + [[0, 0, 1]] * 100 + [[1, 0, 1]] * 10) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'random_forest', None, None, None) + self.assertTrue(np.allclose(fit_params['random_forest:sample_weight'], + np.array([0.4] * 500 + [4.0] * 10))) + init_params, fit_params = balancing.get_weights( + Y, None, 'extra_trees_preproc_for_classification', None, None) + self.assertTrue(np.allclose(fit_params['extra_trees_preproc_for_classification:sample_weight'], + np.array([0.4] * 500 + [4.0] * 10))) + + def test_balancing_get_weights_svm_sgd(self): + Y = np.array([0] * 80 + [1] * 20) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'libsvm_svc', None, None, None) + self.assertEqual(("libsvm_svc:class_weight", "auto"), + init_params.items()[0]) + init_params, fit_params = balancing.get_weights( + Y, None, 'liblinear_svc_preprocessor', None, None) + self.assertEqual(("liblinear_svc_preprocessor:class_weight", "auto"), + init_params.items()[0]) + + def test_balancing_get_weights_ridge(self): + Y = np.array([0] * 80 + [1] * 20) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'ridge', None, None, None) + self.assertAlmostEqual(0.4, init_params['ridge:class_weight'][0]) + self.assertAlmostEqual(1.6, init_params['ridge:class_weight'][1]) + + def test_weighting_effect(self): + for name, clf, acc_no_weighting, acc_weighting in \ + [('adaboost', AdaboostClassifier, 0.692, 0.719), + ('decision_tree', DecisionTree, 0.712, 0.668), + ('extra_trees', ExtraTreesClassifier, 0.910, 0.913), + ('random_forest', RandomForest, 0.896, 0.895), + ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), + ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), + ('sgd', SGD, 0.879, 0.906), + ('ridge', Ridge, 0.868, 0.880)]: + for strategy, acc in [('none', acc_no_weighting), + ('weighting', acc_weighting)]: + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include_estimators=[name]) + default = cs.get_default_configuration() + default.values['balancing:strategy'].value = strategy + classifier = ParamSklearnClassifier(default, random_state=1) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.accuracy_score(predictions, Y_test), + places=3) + + for name, pre, acc_no_weighting, acc_weighting in \ + [('extra_trees_preproc_for_classification', + ExtraTreesPreprocessor, 0.900, 0.908), + ('liblinear_svc_preprocessor', LibLinear_Preprocessor, + 0.907, 0.882)]: + for strategy, acc in [('none', acc_no_weighting), + ('weighting', acc_weighting)]: + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include_estimators=['sgd'], include_preprocessors=[name]) + default = cs.get_default_configuration() + default.values['balancing:strategy'].value = strategy + classifier = ParamSklearnClassifier(default, random_state=1) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.accuracy_score( + predictions, Y_test), + places=3) \ No newline at end of file diff --git a/tests/components/preprocessing/test_extra_trees.py b/tests/components/preprocessing/test_extra_trees.py index 3826f45b0e..2e912475f6 100644 --- a/tests/components/preprocessing/test_extra_trees.py +++ b/tests/components/preprocessing/test_extra_trees.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.extra_trees import \ +from ParamSklearn.components.preprocessing.extra_trees_preproc_for_classification import \ ExtraTreesPreprocessor from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_liblinear.py b/tests/components/preprocessing/test_liblinear.py index e7532d8aab..a6c1b394ae 100644 --- a/tests/components/preprocessing/test_liblinear.py +++ b/tests/components/preprocessing/test_liblinear.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.liblinear import \ +from ParamSklearn.components.preprocessing.liblinear_svc_preprocessor import \ LibLinear_Preprocessor from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/test_classification.py b/tests/test_classification.py index 6514242851..46e6dd67b5 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -126,8 +126,8 @@ def test_configurations(self): def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - sampler = RandomSampler(cs, 123456) - for i in range(1000): + sampler = RandomSampler(cs, 1) + for i in range(10): config = sampler.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) @@ -178,10 +178,10 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(130, len(hyperparameters)) + self.assertEqual(146, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy - self.assertEqual(len(hyperparameters) - 4, len(conditions)) + self.assertEqual(len(hyperparameters) - 5, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( @@ -205,6 +205,7 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Default Configuration:\n" + " balancing:strategy, Value: none\n" " classifier, Value: multinomial_nb\n" " imputation:strategy, Value: mean\n" " multinomial_nb:alpha, Value: 1.000000\n" @@ -222,23 +223,24 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): # It must also be catched that no classifiers which can handle sparse # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Configuration:\n" - " classifier, Value: liblinear\n" + " balancing:strategy, Value: none\n" + " classifier, Value: liblinear_svc\n" " imputation:strategy, Value: mean\n" - " liblinear:C, Value: 1.000000\n" - " liblinear:class_weight, Value: None\n" - " liblinear:dual, Constant: False\n" - " liblinear:fit_intercept, Constant: True\n" - " liblinear:intercept_scaling, Constant: 1\n" - " liblinear:loss, Value: l2\n" - " liblinear:multi_class, Constant: ovr\n" - " liblinear:penalty, Value: l2\n" - " liblinear:tol, Value: 0.000100\n" + " liblinear_svc:C, Value: 1.000000\n" + " liblinear_svc:class_weight, Value: None\n" + " liblinear_svc:dual, Constant: False\n" + " liblinear_svc:fit_intercept, Constant: True\n" + " liblinear_svc:intercept_scaling, Constant: 1\n" + " liblinear_svc:loss, Value: l2\n" + " liblinear_svc:multi_class, Constant: ovr\n" + " liblinear_svc:penalty, Value: l2\n" + " liblinear_svc:tol, Value: 0.000100\n" " preprocessor, Value: densifier\n" " rescaling:strategy, Value: min/max\n" - "violates forbidden clause \(Forbidden: classifier == liblinear &&" + "violates forbidden clause \(Forbidden: classifier == liblinear_svc &&" " Forbidden: preprocessor == densifier\)", ParamSklearnClassifier.get_hyperparameter_search_space, - include_estimators=['liblinear'], + include_estimators=['liblinear_svc'], include_preprocessors=['densifier'], dataset_properties={'sparse': True}) @@ -309,7 +311,8 @@ def test_predict_batched_sparse(self): # Densifier + RF is the only combination that easily tests sparse # data with multilabel classification! config = Configuration(cs, - hyperparameters={"classifier": "random_forest", + hyperparameters={"balancing:strategy": "none", + "classifier": "random_forest", "imputation:strategy": "mean", "preprocessor": "densifier", 'random_forest:bootstrap': 'True', @@ -392,7 +395,8 @@ def test_predict_proba_batched_sparse(self): # Densifier + RF is the only combination that easily tests sparse # data with multilabel classification! config = Configuration(cs, - hyperparameters={"classifier": "random_forest", + hyperparameters={"balancing:strategy": "none", + "classifier": "random_forest", "imputation:strategy": "mean", "preprocessor": "densifier", 'random_forest:bootstrap': 'True', diff --git a/tests/test_create_searchspace_util_classification.py b/tests/test_create_searchspace_util_classification.py index 8bba36527f..16eaea42c4 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/tests/test_create_searchspace_util_classification.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.classification.liblinear import LibLinear_SVC +from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC from ParamSklearn.components.classification.random_forest import RandomForest from ParamSklearn.components.preprocessing.pca import PCA diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index fd9427537f..21d304f9bf 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(129, len(hyperparameters)) + self.assertEqual(145, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 9c3feb11980fda6f3b6f3fc4ac2ee7f7bf20799b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 2 May 2015 13:29:48 +0200 Subject: [PATCH 236/352] Remove dictionary learning and sparse filtering --- ParamSklearn/classification.py | 8 +- .../preprocessing/dictionary_learning.py | 93 ------------------- .../preprocessing/sparse_filtering.py | 61 ------------ source/first_steps.rst | 4 +- .../preprocessing/test_dictionary_learning.py | 40 -------- .../preprocessing/test_sparse_filtering.py | 15 --- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- tests/test_textclassification.py | 2 +- 9 files changed, 8 insertions(+), 219 deletions(-) delete mode 100644 ParamSklearn/components/preprocessing/dictionary_learning.py delete mode 100644 ParamSklearn/components/preprocessing/sparse_filtering.py delete mode 100644 tests/components/preprocessing/test_dictionary_learning.py delete mode 100644 tests/components/preprocessing/test_sparse_filtering.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 6f21555878..5338bf9a0b 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -309,8 +309,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest", "gaussian_nb", "gaussian_process", "decision_tree"] - feature_learning = ["kitchen_sinks", "sparse_filtering", - "nystroem_sampler", "dictionary_learning"] + feature_learning = ["kitchen_sinks", "nystroem_sampler", "dictionary_learning"] for c, f in product(classifiers_, feature_learning): if c not in classifiers_list: @@ -357,9 +356,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Multinomial NB does not work with negative values, don't use # it with standardization, features learning, pca classifiers_ = ["multinomial_nb", "bernoulli_nb"] - preproc_with_negative_X = ["kitchen_sinks", "sparse_filtering", - "pca", "truncatedSVD", "fast_ica", - "kernel_pca", "nystroem_sampler"] + preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", + "fast_ica", "kernel_pca", "nystroem_sampler"] scaling_strategies = ['standard', 'none'] for c in classifiers_: if c not in classifiers_list: diff --git a/ParamSklearn/components/preprocessing/dictionary_learning.py b/ParamSklearn/components/preprocessing/dictionary_learning.py deleted file mode 100644 index 3b8e75dd0a..0000000000 --- a/ParamSklearn/components/preprocessing/dictionary_learning.py +++ /dev/null @@ -1,93 +0,0 @@ -import numpy as np -import sklearn.decomposition - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - UniformIntegerHyperparameter, UniformFloatHyperparameter, Constant - -from ParamSklearn.components.preprocessor_base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT - - -class DictionaryLearning(ParamSklearnPreprocessingAlgorithm): - def __init__(self, n_components, alpha, max_iter, tol, fit_algorithm, - transform_algorithm, transform_alpha, split_sign, - random_state=None): - self.n_components = int(n_components) - self.alpha = float(alpha) - self.max_iter = int(max_iter) - self.tol = float(tol) - self.fit_algorithm = fit_algorithm - self.transform_algorithm = transform_algorithm - self.transform_alpha = bool(transform_alpha) - self.split_sign = bool(split_sign) - self.random_state = random_state - - def fit(self, X, Y=None): - self.preprocessor = sklearn.decomposition.DictionaryLearning( - n_components=self.n_components, alpha=self.alpha, - max_iter=self.max_iter, tol=self.tol, - fit_algorithm=self.fit_algorithm, - transform_algorithm=self.transform_algorithm, - transform_alpha=self.transform_alpha, - split_sign=self.split_sign, random_state=self.random_state - ) - X = X.astype(np.float64) - self.preprocessor.fit(X) - return self - - def transform(self, X): - if self.preprocessor is None: - raise NotImplementedError() - return self.preprocessor.transform(X) - - @staticmethod - def get_properties(): - return {'shortname': 'Dictionary Learning', - 'name': 'Dictionary Learning', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': True, - 'prefers_data_normalized': True, - 'handles_regression': True, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': True, - 'is_deterministic': False, - 'handles_sparse': True, - 'handles_dense': True, - 'input': (DENSE, ), - 'output': INPUT, - 'preferred_dtype': None} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - n_components = UniformIntegerHyperparameter( - "n_components", 50, 2000, default=100) - alpha = UniformFloatHyperparameter( - "alpha", 1e-5, 10, 1, log=True) - max_iter = UniformIntegerHyperparameter( - "max_iter", 50, 500, default=100) - tol = UniformFloatHyperparameter('tol', 1e-9, 1e-3, 1e-8, log=True) - fit_algorithm = CategoricalHyperparameter('fit_algorithm', - ['lars', 'cd'], 'lars') - transform_algorithm = CategoricalHyperparameter('transform_algorithm', - ['lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'], 'omp') - transform_alpha = UniformFloatHyperparameter('transform_alpha', - 0.1, 10., 1., log=True) - split_sign = CategoricalHyperparameter('split_sign', ['False', - 'True'], 'False') - cs = ConfigurationSpace() - cs.add_hyperparameter(n_components) - cs.add_hyperparameter(alpha) - cs.add_hyperparameter(max_iter) - cs.add_hyperparameter(tol) - cs.add_hyperparameter(fit_algorithm) - cs.add_hyperparameter(transform_algorithm) - cs.add_hyperparameter(transform_alpha) - cs.add_hyperparameter(split_sign) - return cs - - diff --git a/ParamSklearn/components/preprocessing/sparse_filtering.py b/ParamSklearn/components/preprocessing/sparse_filtering.py deleted file mode 100644 index 6bbec7a954..0000000000 --- a/ParamSklearn/components/preprocessing/sparse_filtering.py +++ /dev/null @@ -1,61 +0,0 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter - -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.SparseFiltering import SparseFiltering as SparseFilteringImpl -from ParamSklearn.util import DENSE - - -class SparseFiltering(ParamSklearnPreprocessingAlgorithm): - - def __init__(self, N, maxiter=100, random_state=None): - self.N = N - self.maxiter = maxiter - self.random_state = random_state - - def fit(self, X, Y=None): - self.preprocessor = SparseFilteringImpl(self.N, self.maxiter, random_state = self.random_state) - self.preprocessor.fit(X) - return self - - def transform(self, X): - if self.preprocessor is None: - raise NotImplementedError() - return self.preprocessor.transform(X) - - @staticmethod - def get_properties(): - return {'shortname': 'PCA', - 'name': 'Principle Component Analysis', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': True, - 'prefers_data_normalized': True, - 'handles_regression': True, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': True, - 'is_deterministic': False, - 'handles_sparse': False, - 'handles_dense': True, - 'input': (DENSE, ), - 'output': DENSE, - 'preferred_dtype': None} - - - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - N = UniformIntegerHyperparameter( - "N", 50, 2000, default=100) - maxiter = UniformIntegerHyperparameter( - "maxiter", 50, 500, default=100) - cs = ConfigurationSpace() - cs.add_hyperparameter(N) - cs.add_hyperparameter(maxiter) - return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/source/first_steps.rst b/source/first_steps.rst index 2285ddb2be..6646ccf43a 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -18,10 +18,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 3) + >>> sampler = RandomSampler(configuration_space, 1) >>> configuration = sampler.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.90000000000000002 + 0.93999999999999995 diff --git a/tests/components/preprocessing/test_dictionary_learning.py b/tests/components/preprocessing/test_dictionary_learning.py deleted file mode 100644 index e64fb8019d..0000000000 --- a/tests/components/preprocessing/test_dictionary_learning.py +++ /dev/null @@ -1,40 +0,0 @@ -import unittest - -from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.dictionary_learning import \ - DictionaryLearning -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ - get_dataset -import sklearn.metrics - - -class DictionaryLearningComponentTest(PreprocessingTestCase): - def test_default_configuration(self): - transformation, original = _test_preprocessing(DictionaryLearning) - self.assertEqual(transformation.shape[0], original.shape[0]) - self.assertFalse((transformation == 0).all()) - - def test_default_configuration_classify(self): - for i in range(2): - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', - make_sparse=False) - configuration_space = DictionaryLearning.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = DictionaryLearning(random_state=1, - **{hp.hyperparameter.name: hp.value - for hp in default.values.values()}) - preprocessor.fit(X_train, Y_train) - X_train_trans = preprocessor.transform(X_train) - X_test_trans = preprocessor.transform(X_test) - - # fit a classifier on top - classifier = RidgeClassifier() - predictor = classifier.fit(X_train_trans, Y_train) - predictions = predictor.predict(X_test_trans) - accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertAlmostEqual(accuracy, 0.81, places=2) - - @unittest.skip("Always returns float64") - def test_preprocessing_dtype(self): - super(DictionaryLearningComponentTest, - self)._test_preprocessing_dtype(DictionaryLearning) diff --git a/tests/components/preprocessing/test_sparse_filtering.py b/tests/components/preprocessing/test_sparse_filtering.py deleted file mode 100644 index a7ddc18981..0000000000 --- a/tests/components/preprocessing/test_sparse_filtering.py +++ /dev/null @@ -1,15 +0,0 @@ -import unittest - -from ParamSklearn.components.preprocessing.sparse_filtering import SparseFiltering -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase - - -class SparseFilteringComponentTest(PreprocessingTestCase): - def test_default_configuration(self): - transformation, original = _test_preprocessing(SparseFiltering) - self.assertEqual(transformation.shape[0], original.shape[0]) - self.assertFalse((transformation == 0).all()) - - @unittest.skip("Right now, the SparseFiltering returns a float64 array!") - def test_preprocessing_dtype(self): - super(SparseFilteringComponentTest, self)._test_preprocessing_dtype(SparseFiltering) \ No newline at end of file diff --git a/tests/test_classification.py b/tests/test_classification.py index 46e6dd67b5..9b0956aab1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -178,7 +178,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(146, len(hyperparameters)) + self.assertEqual(136, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index 9f5f85f584..d2047e2018 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -79,7 +79,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(61, len(hyperparameters)) + self.assertEqual(51, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 21d304f9bf..8645da2d11 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(145, len(hyperparameters)) + self.assertEqual(135, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 90e04e5cf1095a857934648359a680a55a530eb1 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 2 May 2015 13:40:17 +0200 Subject: [PATCH 237/352] Remove Gaussian Process for classification --- ParamSklearn/classification.py | 52 ++++----- .../classification/gaussian_process.py | 103 ------------------ .../components/preprocessing/balancing.py | 3 +- .../components/preprocessing/fast_ica.py | 2 +- .../components/preprocessing/kernel_pca.py | 2 +- source/first_steps.rst | 2 +- .../classification/test_gaussian_process.py | 17 --- tests/test_classification.py | 6 +- tests/test_textclassification.py | 2 +- 9 files changed, 34 insertions(+), 155 deletions(-) delete mode 100644 ParamSklearn/components/classification/gaussian_process.py delete mode 100644 tests/components/classification/test_gaussian_process.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 5338bf9a0b..618580f87a 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -308,8 +308,8 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # Combinations of non-linear models with feature learning: classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", "k_nearest_neighbors", "libsvm_svc", "random_forest", - "gaussian_nb", "gaussian_process", "decision_tree"] - feature_learning = ["kitchen_sinks", "nystroem_sampler", "dictionary_learning"] + "gaussian_nb", "decision_tree"] + feature_learning = ["kitchen_sinks", "nystroem_sampler"] for c, f in product(classifiers_, feature_learning): if c not in classifiers_list: @@ -327,30 +327,30 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # We have seen empirically that tree-based models together with PCA # don't work better than tree-based models without preprocessing - classifiers_ = ["random_forest", "extra_trees", "gradient_boosting", - "decision_tree"] - for c in classifiers_: - if c not in classifiers_list: - continue - try: - configuration_space.add_forbidden_clause( - ForbiddenAndConjunction( - ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - "preprocessor"), "pca"), - ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - "classifier"), c))) - except KeyError: - pass - except ValueError as e: - if e.message.startswith("Forbidden clause must be " - "instantiated with a legal " - "hyperparameter value for " - "'preprocessor"): - pass - else: - raise e + #classifiers_ = ["random_forest", "extra_trees", "gradient_boosting", + # "decision_tree"] + #for c in classifiers_: + # if c not in classifiers_list: + # continue + # try: + # configuration_space.add_forbidden_clause( + # ForbiddenAndConjunction( + # ForbiddenEqualsClause( + # configuration_space.get_hyperparameter( + # "preprocessor"), "pca"), + # ForbiddenEqualsClause( + # configuration_space.get_hyperparameter( + # "classifier"), c))) + # except KeyError: + # pass + # except ValueError as e: + # if e.message.startswith("Forbidden clause must be " + # "instantiated with a legal " + # "hyperparameter value for " + # "'preprocessor"): + # pass + # else: + # raise e # Won't work # Multinomial NB does not work with negative values, don't use diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py deleted file mode 100644 index 7a00e6128f..0000000000 --- a/ParamSklearn/components/classification/gaussian_process.py +++ /dev/null @@ -1,103 +0,0 @@ -import numpy as np - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant - -from sklearn.preprocessing import OneHotEncoder - -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS - - - -class GPyClassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, random_state=None, n_inducing=20, ard=False): - import GPy - global GPy - - self.estimators = None - self.n_inducing = int(n_inducing) - - if ard == "True": - self.ard = True - elif ard == "False": - self.ard = False - else: - selfard = ard - - self.enc = None - - def fit(self, X, Y): - # one hot encode targets for one against all classification - self.enc = OneHotEncoder(sparse=False) - targets = self.enc.fit_transform(Y[:,None]) - - # create a list of GP models, one for each class - self.estimators = [] - for i in range(self.enc.n_values_): - # train model - white = GPy.kern._src.static.White(X.shape[1], variance=1.0, active_dims=None, name='white') - rbf = GPy.kern._src.rbf.RBF(X.shape[1], variance=1.0, lengthscale=1.0, ARD=self.ard) - kern = rbf + white - # dense - # model = GPy.models.GPClassification(X, targets[:,i,None], kernel=kern) - # sparse - model = GPy.models.SparseGPClassification(X, - targets[:,i,None], - kernel=kern, - num_inducing=self.n_inducing) - # fit kernel hyperparameters - model.optimize('bfgs', max_iters=100) - # add to list of estimators - self.estimators.append(model) - return self - - def predict(self, X): - if self.estimators is None: - raise NotImplementedError - # get probabilities for each class - probs = np.zeros([len(X), len(self.estimators)]) - for i, model in enumerate(self.estimators): - probs[:,i] = model.predict(X)[0].flatten() - # return the most probable label - return self.enc.active_features_[np.argmax(probs, 1)] - - def predict_proba(self, X): - if self.estimators is None: - raise NotImplementedError() - probs = np.zeros([len(X), len(self.estimators)]) - for i, model in enumerate(self.estimators): - probs[:,i] = model.predict(X)[0].flatten() - # normalize to get probabilities - return probs / np.sum(probs,1)[:,None] - - @staticmethod - def get_properties(): - return {'shortname': 'GPy', - 'name': 'Gaussian Process Classifier', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - # TODO find out if this is good because of sparcity... - 'prefers_data_normalized': False, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), - 'output': PREDICTIONS, - # TODO find out what is best used here! - 'preferred_dtype': np.float32} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - ard = CategoricalHyperparameter("ard", ["True", "False"], default="False") - cs = ConfigurationSpace() - cs.add_hyperparameter(ard) - return cs - diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py index bde52ea0c5..ba25498996 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -29,8 +29,7 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # We can have adaboost in here, because in the fit method, # the sample weights are normalized: # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 - clf_ = ['adaboost', 'decision_tree', 'extra_trees', 'random_forest', - 'gradient_boosting'] + clf_ = ['adaboost', 'decision_tree', 'extra_trees', 'random_forest'] pre_ = ['extra_trees_preproc_for_classification'] if classifier in clf_ or preprocessor in pre_: if len(Y.shape) > 1: diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py index 072df494d5..b8cf56810d 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -63,7 +63,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): n_components = UniformIntegerHyperparameter( - "n_components", 50, 2000, default=100) + "n_components", 10, 2000, default=100) algorithm = CategoricalHyperparameter('algorithm', ['parallel', 'deflation'], 'parallel') whiten = CategoricalHyperparameter('whiten', diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index 16a47adc28..d46bf39508 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -63,7 +63,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): n_components = UniformIntegerHyperparameter( - "n_components", 50, 2000, default=100) + "n_components", 10, 2000, default=100) kernel = CategoricalHyperparameter('kernel', ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') degree = UniformIntegerHyperparameter('degree', 2, 5, 3) diff --git a/source/first_steps.rst b/source/first_steps.rst index 6646ccf43a..51b9e0ff46 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -24,4 +24,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 + 0.73999999999999999 diff --git a/tests/components/classification/test_gaussian_process.py b/tests/components/classification/test_gaussian_process.py deleted file mode 100644 index 55ee01a818..0000000000 --- a/tests/components/classification/test_gaussian_process.py +++ /dev/null @@ -1,17 +0,0 @@ -import unittest - -from ParamSklearn.components.classification.gaussian_process import GPyClassifier -from ParamSklearn.util import _test_classifier - -import sklearn.metrics - - -class GPyClassifierComponentTest(unittest.TestCase): - def test_default_configuration(self): - for i in range(2): - predictions, targets = _test_classifier(GPyClassifier) - self.assertGreaterEqual( - sklearn.metrics.accuracy_score(predictions, targets), 0.958) - self.assertLessEqual( - sklearn.metrics.accuracy_score(predictions, targets), 0.98) - diff --git a/tests/test_classification.py b/tests/test_classification.py index 9b0956aab1..c7bf6254c1 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -76,10 +76,11 @@ def test_default_configuration(self): def test_configurations(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() sampler = RandomSampler(cs, 1) - for i in range(10): + for i in range(1000): config = sampler.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) + print config try: cls.fit(X_train, Y_train) X_test_ = X_test.copy() @@ -174,11 +175,10 @@ def test_configurations_sparse(self): def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() - print cs self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(136, len(hyperparameters)) + self.assertEqual(135, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 8645da2d11..eceae86b87 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(135, len(hyperparameters)) + self.assertEqual(134, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 894f1afb651d0e68aef057c92b4184834dd4d7b9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 2 May 2015 13:45:42 +0200 Subject: [PATCH 238/352] Forbid decision tree with feature learning --- ParamSklearn/classification.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 618580f87a..032f1f6881 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -306,9 +306,10 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # which would take too long # Combinations of non-linear models with feature learning: - classifiers_ = ["adaboost", "extra_trees", "gradient_boosting", - "k_nearest_neighbors", "libsvm_svc", "random_forest", - "gaussian_nb", "decision_tree"] + classifiers_ = ["adaboost", "decision_tree", "extra_trees", + "gradient_boosting", "k_nearest_neighbors", + "libsvm_svc", "random_forest", "gaussian_nb", + "decision_tree"] feature_learning = ["kitchen_sinks", "nystroem_sampler"] for c, f in product(classifiers_, feature_learning): From eec7e69363eb6be3217a88b3eb65b310a3992604 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 5 May 2015 11:17:16 +0200 Subject: [PATCH 239/352] Reduce unnecessary memory consumption --- .../components/preprocessing/rescaling.py | 2 +- ParamSklearn/implementations/Imputation.py | 23 +++++-------------- 2 files changed, 7 insertions(+), 18 deletions(-) diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 43bf8fc67d..b8210398be 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -30,7 +30,7 @@ def fit(self, X, Y=None): elif self.strategy == 'none': self.preprocessor = none() elif self.strategy == 'normalize': - self.preprocessor = Normalizer(norm='l2', copy=True) + self.preprocessor = Normalizer(norm='l2', copy=False) else: raise ValueError(self.strategy) self.preprocessor.fit(X) diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 300f96c284..5ccf4ed2eb 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -184,23 +184,12 @@ def fit(self, X, y=None): # transform(X), the imputation data will be computed in transform() # when the imputation is done per sample (i.e., when axis=1). if self.axis == 0: - - - if sparse.issparse(X): - X = atleast2d_or_csc(X, dtype=np.float64, - force_all_finite=False) - self.statistics_ = self._sparse_fit(X, - self.strategy, - self.missing_values, - self.axis) - else: - X = atleast2d_or_csc(X, dtype=self.dtype, - force_all_finite=False) - self.statistics_ = self._dense_fit(X, - self.strategy, - self.missing_values, - self.axis) - + X = atleast2d_or_csc(X, dtype=self.dtype, + force_all_finite=False) + self.statistics_ = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) return self def _sparse_fit(self, X, strategy, missing_values, axis): From e4dd5576e23795b2b5a93d965b2973d36a46ec00 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 5 May 2015 11:17:30 +0200 Subject: [PATCH 240/352] Fix test --- tests/test_classification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index c7bf6254c1..568834b951 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -76,7 +76,7 @@ def test_default_configuration(self): def test_configurations(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() sampler = RandomSampler(cs, 1) - for i in range(1000): + for i in range(10): config = sampler.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) From 27d586e19497386e83a79c7d3070d357cc193c31 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 6 May 2015 18:04:44 +0200 Subject: [PATCH 241/352] Forbid: chi^2 + normalization, fix bug with sparse matrices --- ParamSklearn/classification.py | 6 ++++++ ParamSklearn/implementations/Imputation.py | 20 ++++++++++++++------ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 032f1f6881..db452a6d9a 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -391,14 +391,20 @@ def get_hyperparameter_search_space(cls, include_estimators=None, forbidden_hyperparameter_combinations = \ [("select_percentile_classification:score_func", "chi2", "rescaling:strategy", "standard"), + ("select_percentile_classification:score_func", "chi2", + "rescaling:strategy", "normalize"), ("select_percentile_classification:score_func", "chi2", "rescaling:strategy", "none"), ("select_rates:score_func", "chi2", "rescaling:strategy", "standard"), ("select_rates:score_func", "chi2", "rescaling:strategy", "none"), + ("select_rates:score_func", "chi2", + "rescaling:strategy", "normalize"), ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "standard"), + ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + "normalize"), ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "none")] for hp_name_1, hp_value_1, hp_name_2, hp_value_2 in \ diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 5ccf4ed2eb..8e6c942b05 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -184,12 +184,20 @@ def fit(self, X, y=None): # transform(X), the imputation data will be computed in transform() # when the imputation is done per sample (i.e., when axis=1). if self.axis == 0: - X = atleast2d_or_csc(X, dtype=self.dtype, - force_all_finite=False) - self.statistics_ = self._dense_fit(X, - self.strategy, - self.missing_values, - self.axis) + if sparse.issparse(X): + X = atleast2d_or_csc(X, dtype=np.float64, + force_all_finite=False) + self.statistics_ = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + X = atleast2d_or_csc(X, dtype=self.dtype, + force_all_finite=False) + self.statistics_ = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) return self def _sparse_fit(self, X, strategy, missing_values, axis): From 4dd0a3a244e5168faf55ef0bc071e02c6dd69187 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 6 May 2015 18:05:55 +0200 Subject: [PATCH 242/352] Remove non-sklearn components in this branch --- .../components/classification/proj_logit.py | 62 ------------- ParamSklearn/components/preprocessing/gem.py | 57 ------------ ParamSklearn/implementations/ProjLogit.py | 90 ------------------- .../implementations/SparseFiltering.py | 73 --------------- ParamSklearn/implementations/gem.py | 46 ---------- tests/implementations/test_ProjLogit.py | 39 -------- .../implementations/test_sparse_filtering.py | 74 --------------- 7 files changed, 441 deletions(-) delete mode 100644 ParamSklearn/components/classification/proj_logit.py delete mode 100644 ParamSklearn/components/preprocessing/gem.py delete mode 100644 ParamSklearn/implementations/ProjLogit.py delete mode 100644 ParamSklearn/implementations/SparseFiltering.py delete mode 100644 ParamSklearn/implementations/gem.py delete mode 100644 tests/implementations/test_ProjLogit.py delete mode 100644 tests/implementations/test_sparse_filtering.py diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py deleted file mode 100644 index e0b1cbeb4a..0000000000 --- a/ParamSklearn/components/classification/proj_logit.py +++ /dev/null @@ -1,62 +0,0 @@ -import numpy as np - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant - -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS -from ParamSklearn.implementations import ProjLogit - - - -class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): - - def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): - self.max_epochs = max_epochs - self.estimator = None - - - def fit(self, X, Y): - self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) - self.estimator.fit(X, Y) - return self - - def predict(self, X): - if self.estimator is None: - raise NotImplementedError - return self.estimator.predict(X) - - def predict_proba(self, X): - if self.estimator is None: - raise NotImplementedError() - return self.estimator.predict_proba(X) - - @staticmethod - def get_properties(): - return {'shortname': 'PLogit', - 'name': 'Logistic Regresion using Least Squares', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': True, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), - 'output': PREDICTIONS, - 'preferred_dtype': np.float32} - - - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) - cs = ConfigurationSpace() - cs.add_hyperparameter(max_epochs) - return cs diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py deleted file mode 100644 index 8c7deac191..0000000000 --- a/ParamSklearn/components/preprocessing/gem.py +++ /dev/null @@ -1,57 +0,0 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter - -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.gem import GEM as GEMImpl -from ParamSklearn.util import DENSE - -class GEM(ParamSklearnPreprocessingAlgorithm): - - def __init__(self, N, precond, random_state=None): - self.N = N - self.precond = precond - - def fit(self, X, Y): - self.preprocessor = GEMImpl(self.N, self.precond) - self.preprocessor.fit(X, Y) - return self - - - def transform(self, X): - return self.preprocessor.transform(X) - - - @staticmethod - def get_properties(): - return {'shortname': 'GEM', - 'name': 'Generalized Eigenvector extraction', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': True, - 'prefers_data_normalized': True, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'handles_dense': True, - 'input': (DENSE, ), - 'output': DENSE, - 'preferred_dtype': None} - - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - N = UniformIntegerHyperparameter("N", 5, 20, default=10) - precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) - cs = ConfigurationSpace() - cs.add_hyperparameter(N) - cs.add_hyperparameter(precond) - return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/implementations/ProjLogit.py b/ParamSklearn/implementations/ProjLogit.py deleted file mode 100644 index cf12df75d9..0000000000 --- a/ParamSklearn/implementations/ProjLogit.py +++ /dev/null @@ -1,90 +0,0 @@ -import numpy as np -import numpy.random as npr - -# from http://arxiv.org/pdf/1309.1541v1.pdf -def proj_simplex(Y): - N,D = np.shape(Y) - # sort in descending order - X = -np.sort(-Y) - Xsum = np.cumsum(X, axis = 1) - 1 - Xsum = Xsum * (1./np.arange(1,D+1)) - biggest = np.sum(X > Xsum, axis = 1) - # TODO last step could be made faster - # via ravel / linear indexing - subtract = np.zeros((N, 1)) - for i in range(N): - subtract[i] = Xsum[i, biggest[i]-1] - return np.maximum(Y - subtract, 0) - - -class ProjLogit(object): - - def __init__(self, max_epochs = 10, verbose = False): - self.w0 = None - self.ws_all = [] - self.w_all = [] - self.max_epochs = max_epochs - self.verbose = verbose - - def fit(self, X, Y): - # get one hot encoding and add a bias - n = X.shape[0] - trainx = np.hstack([np.ones((n, 1)), X]) - k = np.max(Y) + 1 - if self.verbose: - print("Using {} samples of {} classes".format(n,k)) - yt = np.zeros((n, k)) - for i in range(n): - yt[i, Y[i]] = 1 - # initialize with linear regression - precond = np.eye(trainx.shape[1]) * np.sqrt(n) - C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) - wp = np.linalg.solve(C, np.dot(trainx.T, yt)) - w = np.linalg.solve(C.T, wp) - self.w0 = np.copy(w) - pred_train = np.dot(trainx, w) - for i in range(self.max_epochs): - # expand prediction - res = np.hstack([pred_train, np.power(pred_train, 2) / 2., np.power(pred_train, 3) / 6., np.power(pred_train, 4) / 24.]) - # solve with linear regression - precond = np.eye(res.shape[1]) * np.sqrt(n) - Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) - ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) - self.ws_all.append(np.copy(ws)) - # project to probability simplex - p_res = proj_simplex(np.dot(res, ws)) - # and solve again with updated residual - wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) - w = np.linalg.solve(C.T, wp) - self.w_all.append(np.copy(w)) - pred_train = p_res + np.dot(trainx, w) - obj = np.linalg.norm(yt - pred_train) - - # compute train error - errort = np.sum(np.argmax(pred_train, axis = 1) != Y) - # print training error - if self.verbose: - print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) - return self - - - def predict(self, X): - res = self.predict_proba(X) - return np.argmax(res, axis = 1) - - def predict_proba(self, X): - if self.w0 == None: - raise NotImplementedError - testx = np.hstack([np.ones((X.shape[0], 1)), X]) - pred = np.dot(testx, self.w0) - for ws, w in zip(self.ws_all, self.w_all): - res = np.hstack([pred, np.power(pred, 2) / 2., np.power(pred, 3) / 6., np.power(pred, 4) / 24.]) - p_res = proj_simplex(np.dot(res, ws)) - pred = p_res + np.dot(testx, w) - return proj_simplex(pred) - - def predict_log_proba(self, X): - if self.w == None: - return np.zeros(X.shape[0]) - res = np.log(self.predict_proba(X)) - return res diff --git a/ParamSklearn/implementations/SparseFiltering.py b/ParamSklearn/implementations/SparseFiltering.py deleted file mode 100644 index d9a15e1079..0000000000 --- a/ParamSklearn/implementations/SparseFiltering.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -This quickly adapted version of sparse filtering requires scipy and numpy -""" -import numpy as np -from scipy.optimize import minimize - -def l2row(X): - """ - L2 normalize X by rows. We also use this to normalize by column with l2row(X.T) - """ - N = np.sqrt((X**2).sum(axis=1)+1e-8) - Y = (X.T/N).T - return Y,N - - -def l2rowg(X,Y,N,D): - """ - Compute L2 normalized gradient. - """ - return (D.T/N - Y.T * (D*X).sum(axis=1) / N**2).T - - -class SparseFiltering(object): - def __init__(self, N, maxiter=100, random_state=None): - self.N = N - self.W = None - self.maxiter = maxiter - if random_state is None: - self.rng = np.random - elif isinstance(random_state, int): - self.rng = np.random.RandomState(random_state) - else: - self.rng = random_state - - def step(self, X, W): - # returns current objective and gradient - W = W.reshape((X.shape[1], self.N)) - features = X.dot(W) #W.dot(X) - features_norm = np.sqrt(features**2 + 1e-8) - features_column, column_norm = l2row(features_norm.T) - features_row, row_norm = l2row(features_norm) - # compute objective function (l1 norm of features) - obj = features_row.sum() - # backprop through the whole process - deltaW = l2rowg(features_norm, features_row, row_norm, np.ones(features_row.shape)) - deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T).T - deltaW = X.T.dot(deltaW*(features/features_norm)) - return obj, deltaW.flatten() - - - def fit(self, X, y=None): - """ fit sparse filtering to data - this completely ignores y - """ - # init random weights - W = self.rng.randn(self.N,X.shape[1]) - # build a closure for the objective - obj_fun = lambda w: self.step(X, w) - # evaluate once for testing - obj, grad = obj_fun(W) - # and run optimization - opt = {'maxiter': self.maxiter} - res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) - self.W = res.x.reshape(X.shape[1], self.N) - - def transform(self, X): - # compute responses - features = X.dot(self.W) - # sparsify - features_norm = np.sqrt(features**2 + 1e-8) - features_column = l2row(features_norm.T)[0] - features_row = l2row(features_column)[0].T - return features_row diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py deleted file mode 100644 index c220ffd70d..0000000000 --- a/ParamSklearn/implementations/gem.py +++ /dev/null @@ -1,46 +0,0 @@ -import numpy as np -from scipy.sparse.linalg import eigs - - -class GEM(object): - - - def __init__(self, N, precond): - self.N = N - self.precond = precond - self.W = None - self.verbose = False - - - def fit(self, X, Y): - self.N = min(self.N, X.shape[1]-2) - y_max = int(np.max(Y) + 1) - self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) - off = 0 - for i in range(y_max): - Xi = X[Y == i] - covi = np.dot(Xi.T, Xi) - covi /= np.float32(Xi.shape[0]) - for j in range(y_max): - if j == i: - continue - if self.verbose: - print("Finding eigenvectors for pair ({}/{})".format(i,j)) - Xj = X[Y == j] - covj = np.dot(Xj.T, Xj) / np.float32(Xj.shape[0]) - E = np.linalg.pinv(np.linalg.cholesky(covj + np.eye(covj.shape[0]) * self.precond).T) - C = np.dot(np.dot(E.T, covi), E) - C2 = 0.5 * (C + C.T) - S,U = eigs(C2, self.N) - gev = np.dot(E, U[:, :self.N]) - self.W[:, off:off+self.N] = gev - off += self.N - print("DONE") - return self - - - def transform(self, X, Y=None): - features = np.maximum(np.dot(X, self.W), 0) - return features - - diff --git a/tests/implementations/test_ProjLogit.py b/tests/implementations/test_ProjLogit.py deleted file mode 100644 index 5b9dc0442c..0000000000 --- a/tests/implementations/test_ProjLogit.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest -import os -import numpy as np -#import scipy.io - -from ParamSklearn.implementations.ProjLogit import ProjLogit - - -class TestProjLogit(unittest.TestCase): - def test_sparse_filtering(self): - """Test logistic regression implementation based on least squares""" - - # simple test that should work out - trainx = np.random.rand(100,3) - trainy = np.zeros(10000) - testx = np.random.rand(100,3) - testy = np.zeros(100) - for i in range(100): - if trainx[i, 2] > 0.5: - trainy[i] = 1 - for i in range(100): - if testx[i, 2] > 0.5: - testy[i] = 1 - - model = ProjLogit(max_epochs = 10, verbose = True) - model.fit(trainx, trainy) - print("weights 0:") - print(model.w0) - predicted_prob = model.predict_proba(testx) - predicted2 = np.argmax(predicted_prob, axis = 1) - predicted = model.predict(testx) - - #print(predicted) - #print(testy) - #print((predicted != testy).sum()) - #print((predicted2 != testy).sum()) - self.assertTrue((predicted == predicted2).all()) - self.assertTrue(((1 - predicted_prob.sum(axis=1)) < 1e-3).all()) - self.assertTrue((predicted != testy).sum() < 20) diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py deleted file mode 100644 index 42d504b53f..0000000000 --- a/tests/implementations/test_sparse_filtering.py +++ /dev/null @@ -1,74 +0,0 @@ -import unittest -import os -import numpy as np - -from ParamSklearn.implementations.SparseFiltering import SparseFiltering - - -class TestSparseFiltering(unittest.TestCase): - def test_sparse_filtering(self): - """Test sparse filtering on a simple dataset""" - # load a few patches of image data from a file which is currently hard coded :) - # JTS TODO: remove this hard coding - dataset = "/home/springj/data/image_patches.npz" - # try not to break testing if data is not available - if (not os.path.isfile(dataset)): - return - patches = np.load(dataset) - data = patches['data'] - preprocess = SparseFiltering(256, random_state = 123456) - print("BEFORE") - preprocess.fit(data) - # JTS TODO: figure out a better test than this nonsense here ;) - self.assertFalse((preprocess.W == 0).all()) - """ - # JTS: the following is only useful for visualization purposes - # turn it on if you want to see sparse filtering in action on image data ;) - import pylab - # method for eyeballing the features - # assumes features in ROWS not columns! - def displayData(X, example_width = False, display_cols = False): - # compute rows, cols - m,n = X.shape - if not example_width: - example_width = int(np.round(np.sqrt(n))) - example_height = (n/example_width) - # Compute number of items to display - if not display_cols: - display_cols = int(np.sqrt(m)) - display_rows = int(np.ceil(m/display_cols)) - pad = 1 - # Setup blank display - display_array = -np.ones((pad+display_rows * (example_height+pad), - pad+display_cols * (example_width+pad))) - # Copy each example into a patch on the display array - curr_ex = 0 - for j in range(display_rows): - for i in range(display_cols): - if curr_ex>=m: - break - # Copy the patch - # Get the max value of the patch - max_val = abs(X[curr_ex,:]).max() - i_inds = example_width*[pad+j * (example_height+pad)+q for q in range(example_height)] - j_inds = [pad+i * (example_width+pad)+q - for q in range(example_width) - for nn in range(example_height)] - try: - newData = (X[curr_ex,:].reshape((example_height,example_width)))/max_val - except: - print X[curr_ex,:].shape - print (example_height,example_width) - raise - display_array[i_inds,j_inds] = newData.flatten() - curr_ex+=1 - if curr_ex>=m: - break - # Display the image - pylab.imshow(display_array,vmin=-1,vmax=1,interpolation='nearest',cmap=pylab.cm.gray) - pylab.xticks([]) - pylab.yticks([]) - displayData(preprocess.W.T) - pylab.show() - #""" - From 2d832c00064ff6166d368409ae6e65a1d9ad9fb9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 6 May 2015 18:04:44 +0200 Subject: [PATCH 243/352] Forbid: chi^2 + normalization, fix bug with sparse matrices --- ParamSklearn/classification.py | 6 ++++++ ParamSklearn/implementations/Imputation.py | 20 ++++++++++++++------ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 032f1f6881..db452a6d9a 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -391,14 +391,20 @@ def get_hyperparameter_search_space(cls, include_estimators=None, forbidden_hyperparameter_combinations = \ [("select_percentile_classification:score_func", "chi2", "rescaling:strategy", "standard"), + ("select_percentile_classification:score_func", "chi2", + "rescaling:strategy", "normalize"), ("select_percentile_classification:score_func", "chi2", "rescaling:strategy", "none"), ("select_rates:score_func", "chi2", "rescaling:strategy", "standard"), ("select_rates:score_func", "chi2", "rescaling:strategy", "none"), + ("select_rates:score_func", "chi2", + "rescaling:strategy", "normalize"), ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "standard"), + ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + "normalize"), ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "none")] for hp_name_1, hp_value_1, hp_name_2, hp_value_2 in \ diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 5ccf4ed2eb..8e6c942b05 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -184,12 +184,20 @@ def fit(self, X, y=None): # transform(X), the imputation data will be computed in transform() # when the imputation is done per sample (i.e., when axis=1). if self.axis == 0: - X = atleast2d_or_csc(X, dtype=self.dtype, - force_all_finite=False) - self.statistics_ = self._dense_fit(X, - self.strategy, - self.missing_values, - self.axis) + if sparse.issparse(X): + X = atleast2d_or_csc(X, dtype=np.float64, + force_all_finite=False) + self.statistics_ = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + X = atleast2d_or_csc(X, dtype=self.dtype, + force_all_finite=False) + self.statistics_ = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) return self def _sparse_fit(self, X, strategy, missing_values, axis): From a1b5d900dcaed6fdc9aaf9e8032c94e180a6a1c6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 8 May 2015 11:34:34 +0200 Subject: [PATCH 244/352] Classification; forbidden: normalize with nb --- ParamSklearn/classification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index db452a6d9a..b44a8f9f29 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -359,7 +359,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", "fast_ica", "kernel_pca", "nystroem_sampler"] - scaling_strategies = ['standard', 'none'] + scaling_strategies = ['standard', 'none', "normalize"] for c in classifiers_: if c not in classifiers_list: continue From 16ee7549d3a6c15460cfabab29d487f0d5a2a1d8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 8 May 2015 11:34:34 +0200 Subject: [PATCH 245/352] Classification; forbidden: normalize with nb --- ParamSklearn/classification.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index db452a6d9a..b44a8f9f29 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -359,7 +359,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", "fast_ica", "kernel_pca", "nystroem_sampler"] - scaling_strategies = ['standard', 'none'] + scaling_strategies = ['standard', 'none', "normalize"] for c in classifiers_: if c not in classifiers_list: continue From 4ab60a22347f46e1740cac3e3be678a966a29716 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 8 May 2015 16:00:53 +0200 Subject: [PATCH 246/352] fix typo --- ParamSklearn/components/classification/gaussian_process.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/gaussian_process.py b/ParamSklearn/components/classification/gaussian_process.py index 7a00e6128f..4a003f162c 100644 --- a/ParamSklearn/components/classification/gaussian_process.py +++ b/ParamSklearn/components/classification/gaussian_process.py @@ -25,7 +25,7 @@ def __init__(self, random_state=None, n_inducing=20, ard=False): elif ard == "False": self.ard = False else: - selfard = ard + self.ard = ard self.enc = None From 9bcfd87fa03e114fba50deeac3bb782210107b74 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 12 May 2015 09:55:26 +0200 Subject: [PATCH 247/352] Remove GPy from dependencies --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index e5ef5b3cf0..326c611233 100644 --- a/setup.py +++ b/setup.py @@ -9,8 +9,7 @@ "scipy==0.14.0", "scikit-learn==0.15.2", "nose", - "HPOlibConfigSpace", - "GPy==0.6.0"], + "HPOlibConfigSpace"], test_requires=["mock"], test_suite="nose.collector", package_data={'': ['*.txt', '*.md']}, From 113e984e70d693ae8686fd01438225cc193dae6d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 22 May 2015 13:30:21 +0200 Subject: [PATCH 248/352] Use new version of HPOlibConfigSpace; tests run much faster now --- ParamSklearn/base.py | 36 ++++------ ParamSklearn/classification.py | 6 +- .../components/preprocessing/kitchen_sinks.py | 2 +- ParamSklearn/util.py | 33 ++++----- source/first_steps.rst | 6 +- .../preprocessing/test_balancing.py | 6 +- .../preprocessing/test_extra_trees.py | 6 +- .../components/preprocessing/test_fast_ica.py | 6 +- .../test_feature_agglomeration.py | 4 +- tests/components/preprocessing/test_gem.py | 4 +- .../preprocessing/test_kernel_pca.py | 6 +- .../preprocessing/test_liblinear.py | 8 +-- tests/components/preprocessing/test_pca.py | 10 ++- .../preprocessing/test_polynomial.py | 6 +- .../test_select_percentile_classification.py | 20 +++--- .../test_select_percentile_regression.py | 6 +- .../preprocessing/test_select_rates.py | 28 +++----- .../regression/test_ridge_regression.py | 8 ++- tests/test_classification.py | 67 +++++++++---------- tests/test_regression.py | 6 +- 20 files changed, 117 insertions(+), 157 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 91b9651596..b67a658346 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -13,11 +13,8 @@ from sklearn.utils import check_random_state from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - InactiveHyperparameter +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction -from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction, \ - ForbiddenEqualsClause from . import components as components @@ -31,12 +28,7 @@ class ParamSklearnBaseEstimator(BaseEstimator): __metaclass__ = ABCMeta def __init__(self, configuration, random_state=None): - # TODO check sklearn version! self.configuration = configuration - - cs = self.get_hyperparameter_search_space() - cs.check_configuration(configuration) - self._pipeline = None if random_state is None: @@ -95,21 +87,18 @@ def fit(self, X, Y, fit_params=None, init_params=None): # List of preprocessing steps (and their order) preprocessors_names = ["imputation", "rescaling", - self.configuration['preprocessor'].value] + self.configuration['preprocessor']] for preproc_name in preprocessors_names: preproc_params = {} for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name \ - .startswith(preproc_name): + if not instantiated_hyperparameter.startswith(preproc_name): continue - if isinstance(instantiated_hyperparameter, - InactiveHyperparameter): + if self.configuration[instantiated_hyperparameter] is None: continue - name_ = instantiated_hyperparameter.hyperparameter.name. \ - split(":")[1] - preproc_params[name_] = instantiated_hyperparameter.value + name_ = instantiated_hyperparameter.split(":")[1] + preproc_params[name_] = self.configuration[instantiated_hyperparameter] preproc_params.update(init_params_per_method[preproc_name]) preprocessor_object = components.preprocessing_components. \ @@ -119,18 +108,17 @@ def fit(self, X, Y, fit_params=None, init_params=None): # Extract Estimator Hyperparameters from the configuration object estimator_name = self.configuration[ - self._get_estimator_hyperparameter_name()].value + self._get_estimator_hyperparameter_name()] estimator_parameters = {} for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.hyperparameter.name.startswith( - estimator_name): + if not instantiated_hyperparameter.startswith(estimator_name): continue - if isinstance(instantiated_hyperparameter, InactiveHyperparameter): + if self.configuration[instantiated_hyperparameter] is None: continue - name_ = instantiated_hyperparameter.hyperparameter.name. \ - split(":")[1] - estimator_parameters[name_] = instantiated_hyperparameter.value + name_ = instantiated_hyperparameter. split(":")[1] + estimator_parameters[name_] = self.configuration[ + instantiated_hyperparameter] estimator_parameters.update(init_params_per_method[estimator_name]) estimator_object = self._get_estimator_components()[ diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index b44a8f9f29..7f65436ca1 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -65,11 +65,11 @@ def fit(self, X, Y, fit_params=None, init_params=None): self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] # Weighting samples has to be done here, not in the components - if self.configuration['balancing:strategy'].value == 'weighting': + if self.configuration['balancing:strategy'] == 'weighting': balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( - Y, self.configuration['classifier'].value, - self.configuration['preprocessor'].value, + Y, self.configuration['classifier'], + self.configuration['preprocessor'], init_params, fit_params) super(ParamSklearnClassifier, self).fit(X, Y, fit_params=fit_params, diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index 9bfad4d6f7..5f72d5cf0b 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -9,7 +9,7 @@ class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): - def __init__(self, gamma, n_components, random_state = None): + def __init__(self, gamma, n_components, random_state=None): """ Parameters: gamma: float Parameter of the rbf kernel to be approximated exp(-gamma * x^2) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index b2b2562a2d..ffdf81bf33 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -88,8 +88,8 @@ def _test_classifier(classifier, dataset='iris', sparse=False): dataset_properties={'sparse': sparse}) default = configuration_space.get_default_configuration() classifier = classifier(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) return predictions, Y_test @@ -101,8 +101,8 @@ def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False): configuration_space = classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() classifier = classifier(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict_proba(X_test) return predictions, Y_test @@ -114,9 +114,10 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): original_X_train = X_train.copy() configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() + print default preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) transformer = preprocessor.fit(X_train, Y_train) return transformer.transform(X_train), original_X_train @@ -133,8 +134,8 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -145,8 +146,8 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) @@ -160,8 +161,8 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name + in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -173,8 +174,8 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = Preprocessor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name + in default}) preprocessor.fit(X_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) @@ -186,8 +187,8 @@ def _test_regressor(Regressor, dataset='diabetes'): configuration_space = Regressor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() regressor = Regressor(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) # Dumb incomplete hacky test to check that we do not alter the data X_train_hash = hash(str(X_train)) X_test_hash = hash(str(X_test)) diff --git a/source/first_steps.rst b/source/first_steps.rst index 51b9e0ff46..cc7d3971a5 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -7,7 +7,6 @@ HPOlibConfigSpace package and then train a classifier with a random configuration on the iris dataset. >>> from ParamSklearn.classification import ParamSklearnClassifier - >>> from HPOlibConfigSpace.random_sampler import RandomSampler >>> import sklearn.datasets >>> import sklearn.metrics >>> import numpy as np @@ -18,10 +17,9 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> sampler = RandomSampler(configuration_space, 1) - >>> configuration = sampler.sample_configuration() + >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.73999999999999999 + 0.80000000000000004 diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index d916778b9e..ff35d09ef8 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -5,8 +5,6 @@ import numpy as np import sklearn.metrics -from HPOlibConfigSpace.hyperparameters import InactiveHyperparameter - from ParamSklearn.components.preprocessing.balancing import Balancing from ParamSklearn.classification import ParamSklearnClassifier from ParamSklearn.components.classification.adaboost import AdaboostClassifier @@ -87,7 +85,7 @@ def test_weighting_effect(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_estimators=[name]) default = cs.get_default_configuration() - default.values['balancing:strategy'].value = strategy + default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) @@ -107,7 +105,7 @@ def test_weighting_effect(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_estimators=['sgd'], include_preprocessors=[name]) default = cs.get_default_configuration() - default.values['balancing:strategy'].value = strategy + default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) diff --git a/tests/components/preprocessing/test_extra_trees.py b/tests/components/preprocessing/test_extra_trees.py index 2e912475f6..457c8be9e8 100644 --- a/tests/components/preprocessing/test_extra_trees.py +++ b/tests/components/preprocessing/test_extra_trees.py @@ -21,10 +21,8 @@ def test_default_configuration_classify(self): configuration_space = ExtraTreesPreprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = ExtraTreesPreprocessor(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/preprocessing/test_fast_ica.py index f0d521073a..bf15bb0d31 100644 --- a/tests/components/preprocessing/test_fast_ica.py +++ b/tests/components/preprocessing/test_fast_ica.py @@ -21,10 +21,8 @@ def test_default_configuration_classify(self): configuration_space = FastICA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = FastICA(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_feature_agglomeration.py b/tests/components/preprocessing/test_feature_agglomeration.py index 2e6a9b1ca0..0c69179763 100644 --- a/tests/components/preprocessing/test_feature_agglomeration.py +++ b/tests/components/preprocessing/test_feature_agglomeration.py @@ -20,8 +20,8 @@ def test_default_configuration_classify(self): configuration_space = FeatureAgglomeration.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = FeatureAgglomeration(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for + hp_name in default}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_gem.py b/tests/components/preprocessing/test_gem.py index 6263e3b0e2..a3fcae6ed6 100644 --- a/tests/components/preprocessing/test_gem.py +++ b/tests/components/preprocessing/test_gem.py @@ -19,8 +19,8 @@ def test_default_configuration_classify(self): configuration_space = GEM.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = GEM(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_kernel_pca.py b/tests/components/preprocessing/test_kernel_pca.py index 9b00a92e61..5d4c7825c5 100644 --- a/tests/components/preprocessing/test_kernel_pca.py +++ b/tests/components/preprocessing/test_kernel_pca.py @@ -22,10 +22,8 @@ def test_default_configuration_classify(self): configuration_space = KernelPCA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = KernelPCA(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_liblinear.py b/tests/components/preprocessing/test_liblinear.py index a6c1b394ae..5bf40a1107 100644 --- a/tests/components/preprocessing/test_liblinear.py +++ b/tests/components/preprocessing/test_liblinear.py @@ -21,10 +21,10 @@ def test_default_configuration_classify(self): configuration_space = LibLinear_Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = LibLinear_Preprocessor(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in + default if default[ + hp_name] is not None}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index bc7f3f7918..88d91bf861 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -26,9 +26,8 @@ def test_preprocessing_dtype(self): configuration_space = PCA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = PCA(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -39,9 +38,8 @@ def test_preprocessing_dtype(self): configuration_space = PCA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = PCA(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file diff --git a/tests/components/preprocessing/test_polynomial.py b/tests/components/preprocessing/test_polynomial.py index c82e2a574b..ba210e6325 100644 --- a/tests/components/preprocessing/test_polynomial.py +++ b/tests/components/preprocessing/test_polynomial.py @@ -21,10 +21,8 @@ def test_default_configuration_classify(self): configuration_space = PolynomialFeatures.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = PolynomialFeatures(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp in - default.values.values()}) + **{hp_name: default[hp_name] for + hp_name in default}) preprocessor.fit(X_train, Y_train) X_train_trans = preprocessor.transform(X_train) X_test_trans = preprocessor.transform(X_test) diff --git a/tests/components/preprocessing/test_select_percentile_classification.py b/tests/components/preprocessing/test_select_percentile_classification.py index 29b8d02fe0..fb856f3fa5 100644 --- a/tests/components/preprocessing/test_select_percentile_classification.py +++ b/tests/components/preprocessing/test_select_percentile_classification.py @@ -28,9 +28,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileClassification(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -41,9 +40,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileClassification(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) @@ -55,9 +53,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileClassification(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -68,9 +65,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileClassification(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_select_percentile_regression.py b/tests/components/preprocessing/test_select_percentile_regression.py index 337837d2f5..6326023918 100644 --- a/tests/components/preprocessing/test_select_percentile_regression.py +++ b/tests/components/preprocessing/test_select_percentile_regression.py @@ -22,7 +22,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileRegression(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -33,7 +34,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectPercentileRegression(random_state=1, - **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + **{hp_name: default[hp_name] + for hp_name in default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_select_rates.py b/tests/components/preprocessing/test_select_rates.py index 51684f4d7e..5d89c99cd3 100644 --- a/tests/components/preprocessing/test_select_rates.py +++ b/tests/components/preprocessing/test_select_rates.py @@ -30,11 +30,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectRates.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectRates(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -45,11 +42,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectRates.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectRates(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) @@ -61,11 +55,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectRates.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectRates(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float32) @@ -76,11 +67,8 @@ def test_preprocessing_dtype(self): configuration_space = SelectRates.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = SelectRates(random_state=1, - **{ - hp.hyperparameter.name: hp.value - for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index 0517dc8aab..cdbceb7c34 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -7,7 +7,7 @@ import sklearn.metrics -class RandomForestComponentTest(unittest.TestCase): +class RidgeComponentTest(unittest.TestCase): def test_default_configuration(self): configuration_space = RidgeRegression.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() @@ -26,7 +26,8 @@ def test_default_configuration(self): make_sparse=False) preprocessor = RandomKitchenSinks( random_state=1, - **{hp.hyperparameter.name: hp.value for hp in default_preproc.values.values()}) + **{hp_name: default_preproc[hp_name] for hp_name in + default_preproc if default_preproc[hp_name] is not None}) transformer = preprocessor.fit(X_train, Y_train) X_train_transformed = transformer.transform(X_train) @@ -34,7 +35,8 @@ def test_default_configuration(self): regressor = RidgeRegression( random_state=1, - **{hp.hyperparameter.name: hp.value for hp in default.values.values()}) + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) diff --git a/tests/test_classification.py b/tests/test_classification.py index 568834b951..43a9d74020 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -16,7 +16,6 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ Configuration from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from HPOlibConfigSpace.random_sampler import RandomSampler from ParamSklearn.classification import ParamSklearnClassifier from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm @@ -75,9 +74,8 @@ def test_default_configuration(self): def test_configurations(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() - sampler = RandomSampler(cs, 1) for i in range(10): - config = sampler.sample_configuration() + config = cs.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) print config @@ -127,9 +125,8 @@ def test_configurations(self): def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - sampler = RandomSampler(cs, 1) for i in range(10): - config = sampler.sample_configuration() + config = cs.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) cls = ParamSklearnClassifier(config, random_state=1) @@ -204,11 +201,11 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): self.assertNotIn('select_percentile_classification', str(cs)) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): - self.assertRaisesRegexp(ValueError, "Default Configuration:\n" + self.assertRaisesRegexp(ValueError, "Configuration:\n" " balancing:strategy, Value: none\n" " classifier, Value: multinomial_nb\n" " imputation:strategy, Value: mean\n" - " multinomial_nb:alpha, Value: 1.000000\n" + " multinomial_nb:alpha, Value: 1.0\n" " multinomial_nb:fit_prior, Value: True\n" " preprocessor, Value: truncatedSVD\n" " rescaling:strategy, Value: min/max\n" @@ -226,7 +223,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " balancing:strategy, Value: none\n" " classifier, Value: liblinear_svc\n" " imputation:strategy, Value: mean\n" - " liblinear_svc:C, Value: 1.000000\n" + " liblinear_svc:C, Value: 1.0\n" " liblinear_svc:class_weight, Value: None\n" " liblinear_svc:dual, Constant: False\n" " liblinear_svc:fit_intercept, Constant: True\n" @@ -234,7 +231,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " liblinear_svc:loss, Value: l2\n" " liblinear_svc:multi_class, Constant: ovr\n" " liblinear_svc:penalty, Value: l2\n" - " liblinear_svc:tol, Value: 0.000100\n" + " liblinear_svc:tol, Value: 0.0001\n" " preprocessor, Value: densifier\n" " rescaling:strategy, Value: min/max\n" "violates forbidden clause \(Forbidden: classifier == liblinear_svc &&" @@ -311,19 +308,19 @@ def test_predict_batched_sparse(self): # Densifier + RF is the only combination that easily tests sparse # data with multilabel classification! config = Configuration(cs, - hyperparameters={"balancing:strategy": "none", - "classifier": "random_forest", - "imputation:strategy": "mean", - "preprocessor": "densifier", - 'random_forest:bootstrap': 'True', - 'random_forest:criterion': 'gini', - 'random_forest:max_depth': 'None', - 'random_forest:min_samples_split': 2, - 'random_forest:min_samples_leaf': 2, - 'random_forest:max_features': 0.5, - 'random_forest:max_leaf_nodes': 'None', - 'random_forest:n_estimators': 100, - "rescaling:strategy": "min/max"}) + values={"balancing:strategy": "none", + "classifier": "random_forest", + "imputation:strategy": "mean", + "preprocessor": "densifier", + 'random_forest:bootstrap': 'True', + 'random_forest:criterion': 'gini', + 'random_forest:max_depth': 'None', + 'random_forest:min_samples_split': 2, + 'random_forest:min_samples_leaf': 2, + 'random_forest:max_features': 0.5, + 'random_forest:max_leaf_nodes': 'None', + 'random_forest:n_estimators': 100, + "rescaling:strategy": "min/max"}) cls = ParamSklearnClassifier(config) # Multiclass @@ -395,19 +392,19 @@ def test_predict_proba_batched_sparse(self): # Densifier + RF is the only combination that easily tests sparse # data with multilabel classification! config = Configuration(cs, - hyperparameters={"balancing:strategy": "none", - "classifier": "random_forest", - "imputation:strategy": "mean", - "preprocessor": "densifier", - 'random_forest:bootstrap': 'True', - 'random_forest:criterion': 'gini', - 'random_forest:max_depth': 'None', - 'random_forest:min_samples_split': 2, - 'random_forest:min_samples_leaf': 2, - 'random_forest:max_features': 0.5, - 'random_forest:max_leaf_nodes': 'None', - 'random_forest:n_estimators': 100, - "rescaling:strategy": "min/max"}) + values={"balancing:strategy": "none", + "classifier": "random_forest", + "imputation:strategy": "mean", + "preprocessor": "densifier", + 'random_forest:bootstrap': 'True', + 'random_forest:criterion': 'gini', + 'random_forest:max_depth': 'None', + 'random_forest:min_samples_split': 2, + 'random_forest:min_samples_leaf': 2, + 'random_forest:max_features': 0.5, + 'random_forest:max_leaf_nodes': 'None', + 'random_forest:n_estimators': 100, + "rescaling:strategy": "min/max"}) # Multiclass cls = ParamSklearnClassifier(config) diff --git a/tests/test_regression.py b/tests/test_regression.py index d2047e2018..4f88f7046c 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -105,13 +105,13 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" " imputation:strategy, Value: mean\n" - " kitchen_sinks:gamma, Value: 1.000000\n" + " kitchen_sinks:gamma, Value: 1.0\n" " kitchen_sinks:n_components, Value: 100\n" " preprocessor, Value: kitchen_sinks\n" " random_forest:bootstrap, Value: True\n" " random_forest:criterion, Constant: mse\n" " random_forest:max_depth, Constant: None\n" - " random_forest:max_features, Value: 1.000000\n" + " random_forest:max_features, Value: 1.0\n" " random_forest:min_samples_leaf, Value: 1\n" " random_forest:min_samples_split, Value: 2\n" " random_forest:n_estimators, Constant: 100\n" @@ -130,7 +130,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " preprocessor, Value: densifier\n" " regressor, Value: ridge_regression\n" " rescaling:strategy, Value: min/max\n" - " ridge_regression:alpha, Value: 1.000000\n" + " ridge_regression:alpha, Value: 1.0\n" "violates forbidden clause \(Forbidden: regressor == " "ridge_regression && Forbidden: preprocessor == densifier\)", ParamSklearnRegressor.get_hyperparameter_search_space, From 1d39db980c5d92c8b5dc057befb420e404d951e0 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 6 May 2015 18:05:55 +0200 Subject: [PATCH 249/352] Remove non-sklearn components in this branch --- .../components/classification/proj_logit.py | 62 ------------- ParamSklearn/components/preprocessing/gem.py | 57 ------------ ParamSklearn/implementations/ProjLogit.py | 90 ------------------- .../implementations/SparseFiltering.py | 73 --------------- ParamSklearn/implementations/gem.py | 46 ---------- tests/implementations/test_ProjLogit.py | 39 -------- .../implementations/test_sparse_filtering.py | 74 --------------- 7 files changed, 441 deletions(-) delete mode 100644 ParamSklearn/components/classification/proj_logit.py delete mode 100644 ParamSklearn/components/preprocessing/gem.py delete mode 100644 ParamSklearn/implementations/ProjLogit.py delete mode 100644 ParamSklearn/implementations/SparseFiltering.py delete mode 100644 ParamSklearn/implementations/gem.py delete mode 100644 tests/implementations/test_ProjLogit.py delete mode 100644 tests/implementations/test_sparse_filtering.py diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py deleted file mode 100644 index e0b1cbeb4a..0000000000 --- a/ParamSklearn/components/classification/proj_logit.py +++ /dev/null @@ -1,62 +0,0 @@ -import numpy as np - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, \ - UnParametrizedHyperparameter, Constant - -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS -from ParamSklearn.implementations import ProjLogit - - - -class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): - - def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): - self.max_epochs = max_epochs - self.estimator = None - - - def fit(self, X, Y): - self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) - self.estimator.fit(X, Y) - return self - - def predict(self, X): - if self.estimator is None: - raise NotImplementedError - return self.estimator.predict(X) - - def predict_proba(self, X): - if self.estimator is None: - raise NotImplementedError() - return self.estimator.predict_proba(X) - - @staticmethod - def get_properties(): - return {'shortname': 'PLogit', - 'name': 'Logistic Regresion using Least Squares', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': True, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), - 'output': PREDICTIONS, - 'preferred_dtype': np.float32} - - - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) - cs = ConfigurationSpace() - cs.add_hyperparameter(max_epochs) - return cs diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py deleted file mode 100644 index 8c7deac191..0000000000 --- a/ParamSklearn/components/preprocessing/gem.py +++ /dev/null @@ -1,57 +0,0 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter - -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.gem import GEM as GEMImpl -from ParamSklearn.util import DENSE - -class GEM(ParamSklearnPreprocessingAlgorithm): - - def __init__(self, N, precond, random_state=None): - self.N = N - self.precond = precond - - def fit(self, X, Y): - self.preprocessor = GEMImpl(self.N, self.precond) - self.preprocessor.fit(X, Y) - return self - - - def transform(self, X): - return self.preprocessor.transform(X) - - - @staticmethod - def get_properties(): - return {'shortname': 'GEM', - 'name': 'Generalized Eigenvector extraction', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': True, - 'prefers_data_normalized': True, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': False, - 'handles_dense': True, - 'input': (DENSE, ), - 'output': DENSE, - 'preferred_dtype': None} - - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - N = UniformIntegerHyperparameter("N", 5, 20, default=10) - precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) - cs = ConfigurationSpace() - cs.add_hyperparameter(N) - cs.add_hyperparameter(precond) - return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/implementations/ProjLogit.py b/ParamSklearn/implementations/ProjLogit.py deleted file mode 100644 index cf12df75d9..0000000000 --- a/ParamSklearn/implementations/ProjLogit.py +++ /dev/null @@ -1,90 +0,0 @@ -import numpy as np -import numpy.random as npr - -# from http://arxiv.org/pdf/1309.1541v1.pdf -def proj_simplex(Y): - N,D = np.shape(Y) - # sort in descending order - X = -np.sort(-Y) - Xsum = np.cumsum(X, axis = 1) - 1 - Xsum = Xsum * (1./np.arange(1,D+1)) - biggest = np.sum(X > Xsum, axis = 1) - # TODO last step could be made faster - # via ravel / linear indexing - subtract = np.zeros((N, 1)) - for i in range(N): - subtract[i] = Xsum[i, biggest[i]-1] - return np.maximum(Y - subtract, 0) - - -class ProjLogit(object): - - def __init__(self, max_epochs = 10, verbose = False): - self.w0 = None - self.ws_all = [] - self.w_all = [] - self.max_epochs = max_epochs - self.verbose = verbose - - def fit(self, X, Y): - # get one hot encoding and add a bias - n = X.shape[0] - trainx = np.hstack([np.ones((n, 1)), X]) - k = np.max(Y) + 1 - if self.verbose: - print("Using {} samples of {} classes".format(n,k)) - yt = np.zeros((n, k)) - for i in range(n): - yt[i, Y[i]] = 1 - # initialize with linear regression - precond = np.eye(trainx.shape[1]) * np.sqrt(n) - C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) - wp = np.linalg.solve(C, np.dot(trainx.T, yt)) - w = np.linalg.solve(C.T, wp) - self.w0 = np.copy(w) - pred_train = np.dot(trainx, w) - for i in range(self.max_epochs): - # expand prediction - res = np.hstack([pred_train, np.power(pred_train, 2) / 2., np.power(pred_train, 3) / 6., np.power(pred_train, 4) / 24.]) - # solve with linear regression - precond = np.eye(res.shape[1]) * np.sqrt(n) - Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) - ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) - self.ws_all.append(np.copy(ws)) - # project to probability simplex - p_res = proj_simplex(np.dot(res, ws)) - # and solve again with updated residual - wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) - w = np.linalg.solve(C.T, wp) - self.w_all.append(np.copy(w)) - pred_train = p_res + np.dot(trainx, w) - obj = np.linalg.norm(yt - pred_train) - - # compute train error - errort = np.sum(np.argmax(pred_train, axis = 1) != Y) - # print training error - if self.verbose: - print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) - return self - - - def predict(self, X): - res = self.predict_proba(X) - return np.argmax(res, axis = 1) - - def predict_proba(self, X): - if self.w0 == None: - raise NotImplementedError - testx = np.hstack([np.ones((X.shape[0], 1)), X]) - pred = np.dot(testx, self.w0) - for ws, w in zip(self.ws_all, self.w_all): - res = np.hstack([pred, np.power(pred, 2) / 2., np.power(pred, 3) / 6., np.power(pred, 4) / 24.]) - p_res = proj_simplex(np.dot(res, ws)) - pred = p_res + np.dot(testx, w) - return proj_simplex(pred) - - def predict_log_proba(self, X): - if self.w == None: - return np.zeros(X.shape[0]) - res = np.log(self.predict_proba(X)) - return res diff --git a/ParamSklearn/implementations/SparseFiltering.py b/ParamSklearn/implementations/SparseFiltering.py deleted file mode 100644 index d9a15e1079..0000000000 --- a/ParamSklearn/implementations/SparseFiltering.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -This quickly adapted version of sparse filtering requires scipy and numpy -""" -import numpy as np -from scipy.optimize import minimize - -def l2row(X): - """ - L2 normalize X by rows. We also use this to normalize by column with l2row(X.T) - """ - N = np.sqrt((X**2).sum(axis=1)+1e-8) - Y = (X.T/N).T - return Y,N - - -def l2rowg(X,Y,N,D): - """ - Compute L2 normalized gradient. - """ - return (D.T/N - Y.T * (D*X).sum(axis=1) / N**2).T - - -class SparseFiltering(object): - def __init__(self, N, maxiter=100, random_state=None): - self.N = N - self.W = None - self.maxiter = maxiter - if random_state is None: - self.rng = np.random - elif isinstance(random_state, int): - self.rng = np.random.RandomState(random_state) - else: - self.rng = random_state - - def step(self, X, W): - # returns current objective and gradient - W = W.reshape((X.shape[1], self.N)) - features = X.dot(W) #W.dot(X) - features_norm = np.sqrt(features**2 + 1e-8) - features_column, column_norm = l2row(features_norm.T) - features_row, row_norm = l2row(features_norm) - # compute objective function (l1 norm of features) - obj = features_row.sum() - # backprop through the whole process - deltaW = l2rowg(features_norm, features_row, row_norm, np.ones(features_row.shape)) - deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T).T - deltaW = X.T.dot(deltaW*(features/features_norm)) - return obj, deltaW.flatten() - - - def fit(self, X, y=None): - """ fit sparse filtering to data - this completely ignores y - """ - # init random weights - W = self.rng.randn(self.N,X.shape[1]) - # build a closure for the objective - obj_fun = lambda w: self.step(X, w) - # evaluate once for testing - obj, grad = obj_fun(W) - # and run optimization - opt = {'maxiter': self.maxiter} - res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) - self.W = res.x.reshape(X.shape[1], self.N) - - def transform(self, X): - # compute responses - features = X.dot(self.W) - # sparsify - features_norm = np.sqrt(features**2 + 1e-8) - features_column = l2row(features_norm.T)[0] - features_row = l2row(features_column)[0].T - return features_row diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py deleted file mode 100644 index c220ffd70d..0000000000 --- a/ParamSklearn/implementations/gem.py +++ /dev/null @@ -1,46 +0,0 @@ -import numpy as np -from scipy.sparse.linalg import eigs - - -class GEM(object): - - - def __init__(self, N, precond): - self.N = N - self.precond = precond - self.W = None - self.verbose = False - - - def fit(self, X, Y): - self.N = min(self.N, X.shape[1]-2) - y_max = int(np.max(Y) + 1) - self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) - off = 0 - for i in range(y_max): - Xi = X[Y == i] - covi = np.dot(Xi.T, Xi) - covi /= np.float32(Xi.shape[0]) - for j in range(y_max): - if j == i: - continue - if self.verbose: - print("Finding eigenvectors for pair ({}/{})".format(i,j)) - Xj = X[Y == j] - covj = np.dot(Xj.T, Xj) / np.float32(Xj.shape[0]) - E = np.linalg.pinv(np.linalg.cholesky(covj + np.eye(covj.shape[0]) * self.precond).T) - C = np.dot(np.dot(E.T, covi), E) - C2 = 0.5 * (C + C.T) - S,U = eigs(C2, self.N) - gev = np.dot(E, U[:, :self.N]) - self.W[:, off:off+self.N] = gev - off += self.N - print("DONE") - return self - - - def transform(self, X, Y=None): - features = np.maximum(np.dot(X, self.W), 0) - return features - - diff --git a/tests/implementations/test_ProjLogit.py b/tests/implementations/test_ProjLogit.py deleted file mode 100644 index 5b9dc0442c..0000000000 --- a/tests/implementations/test_ProjLogit.py +++ /dev/null @@ -1,39 +0,0 @@ -import unittest -import os -import numpy as np -#import scipy.io - -from ParamSklearn.implementations.ProjLogit import ProjLogit - - -class TestProjLogit(unittest.TestCase): - def test_sparse_filtering(self): - """Test logistic regression implementation based on least squares""" - - # simple test that should work out - trainx = np.random.rand(100,3) - trainy = np.zeros(10000) - testx = np.random.rand(100,3) - testy = np.zeros(100) - for i in range(100): - if trainx[i, 2] > 0.5: - trainy[i] = 1 - for i in range(100): - if testx[i, 2] > 0.5: - testy[i] = 1 - - model = ProjLogit(max_epochs = 10, verbose = True) - model.fit(trainx, trainy) - print("weights 0:") - print(model.w0) - predicted_prob = model.predict_proba(testx) - predicted2 = np.argmax(predicted_prob, axis = 1) - predicted = model.predict(testx) - - #print(predicted) - #print(testy) - #print((predicted != testy).sum()) - #print((predicted2 != testy).sum()) - self.assertTrue((predicted == predicted2).all()) - self.assertTrue(((1 - predicted_prob.sum(axis=1)) < 1e-3).all()) - self.assertTrue((predicted != testy).sum() < 20) diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py deleted file mode 100644 index 42d504b53f..0000000000 --- a/tests/implementations/test_sparse_filtering.py +++ /dev/null @@ -1,74 +0,0 @@ -import unittest -import os -import numpy as np - -from ParamSklearn.implementations.SparseFiltering import SparseFiltering - - -class TestSparseFiltering(unittest.TestCase): - def test_sparse_filtering(self): - """Test sparse filtering on a simple dataset""" - # load a few patches of image data from a file which is currently hard coded :) - # JTS TODO: remove this hard coding - dataset = "/home/springj/data/image_patches.npz" - # try not to break testing if data is not available - if (not os.path.isfile(dataset)): - return - patches = np.load(dataset) - data = patches['data'] - preprocess = SparseFiltering(256, random_state = 123456) - print("BEFORE") - preprocess.fit(data) - # JTS TODO: figure out a better test than this nonsense here ;) - self.assertFalse((preprocess.W == 0).all()) - """ - # JTS: the following is only useful for visualization purposes - # turn it on if you want to see sparse filtering in action on image data ;) - import pylab - # method for eyeballing the features - # assumes features in ROWS not columns! - def displayData(X, example_width = False, display_cols = False): - # compute rows, cols - m,n = X.shape - if not example_width: - example_width = int(np.round(np.sqrt(n))) - example_height = (n/example_width) - # Compute number of items to display - if not display_cols: - display_cols = int(np.sqrt(m)) - display_rows = int(np.ceil(m/display_cols)) - pad = 1 - # Setup blank display - display_array = -np.ones((pad+display_rows * (example_height+pad), - pad+display_cols * (example_width+pad))) - # Copy each example into a patch on the display array - curr_ex = 0 - for j in range(display_rows): - for i in range(display_cols): - if curr_ex>=m: - break - # Copy the patch - # Get the max value of the patch - max_val = abs(X[curr_ex,:]).max() - i_inds = example_width*[pad+j * (example_height+pad)+q for q in range(example_height)] - j_inds = [pad+i * (example_width+pad)+q - for q in range(example_width) - for nn in range(example_height)] - try: - newData = (X[curr_ex,:].reshape((example_height,example_width)))/max_val - except: - print X[curr_ex,:].shape - print (example_height,example_width) - raise - display_array[i_inds,j_inds] = newData.flatten() - curr_ex+=1 - if curr_ex>=m: - break - # Display the image - pylab.imshow(display_array,vmin=-1,vmax=1,interpolation='nearest',cmap=pylab.cm.gray) - pylab.xticks([]) - pylab.yticks([]) - displayData(preprocess.W.T) - pylab.show() - #""" - From 73d8643b2849db753ddc7b8909d01e6cee9bafc6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sun, 31 May 2015 17:28:42 +0200 Subject: [PATCH 250/352] Allow a kernel approximation to be the sole preprocessor --- ParamSklearn/classification.py | 12 ++++++++++-- tests/test_classification.py | 6 ++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 7f65436ca1..9f76bba374 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -250,7 +250,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, preprocessors[name] = available_preprocessors[name] # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear', 'sgd', + classifier_defaults = ['random_forest', 'liblinear_svc', 'sgd', 'libsvm_svc'] classifier_default = None for cd_ in classifier_defaults: @@ -325,6 +325,14 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "preprocessor"), f))) except KeyError: pass + except ValueError as e: + if "violates forbidden clause (Forbidden: classifier == %s " \ + "&& Forbidden: preprocessor == %s)" % (classifiers_, + feature_learning): + # TODO: super-hacky, build a method for that in the + # configuration space module + configuration_space._hyperparameters[ + 'classifier'].default = classifier_defaults[1] # We have seen empirically that tree-based models together with PCA # don't work better than tree-based models without preprocessing @@ -354,7 +362,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # raise e # Won't work - # Multinomial NB does not work with negative values, don't use + # Multinomial NB etc does not work with negative values, don't use # it with standardization, features learning, pca classifiers_ = ["multinomial_nb", "bernoulli_nb"] preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", diff --git a/tests/test_classification.py b/tests/test_classification.py index 43a9d74020..6adefa4bd9 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -200,6 +200,12 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): exclude_preprocessors=['select_percentile_classification']) self.assertNotIn('select_percentile_classification', str(cs)) + def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include_preprocessors=['nystroem_sampler']) + self.assertEqual(cs.get_hyperparameter('preprocessor').choices, + ['nystroem_sampler']) + def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" " balancing:strategy, Value: none\n" From e3043851c801f00e956d395dc1d18cbd6b37a714 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 14:51:32 +0200 Subject: [PATCH 251/352] Revert "Remove non-sklearn components in this branch" This reverts commit 1d39db980c5d92c8b5dc057befb420e404d951e0. --- .../components/classification/proj_logit.py | 62 +++++++++++++ ParamSklearn/components/preprocessing/gem.py | 57 ++++++++++++ ParamSklearn/implementations/ProjLogit.py | 90 +++++++++++++++++++ .../implementations/SparseFiltering.py | 73 +++++++++++++++ ParamSklearn/implementations/gem.py | 46 ++++++++++ tests/implementations/test_ProjLogit.py | 39 ++++++++ .../implementations/test_sparse_filtering.py | 74 +++++++++++++++ 7 files changed, 441 insertions(+) create mode 100644 ParamSklearn/components/classification/proj_logit.py create mode 100644 ParamSklearn/components/preprocessing/gem.py create mode 100644 ParamSklearn/implementations/ProjLogit.py create mode 100644 ParamSklearn/implementations/SparseFiltering.py create mode 100644 ParamSklearn/implementations/gem.py create mode 100644 tests/implementations/test_ProjLogit.py create mode 100644 tests/implementations/test_sparse_filtering.py diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py new file mode 100644 index 0000000000..e0b1cbeb4a --- /dev/null +++ b/ParamSklearn/components/classification/proj_logit.py @@ -0,0 +1,62 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.implementations import ProjLogit + + + +class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): + + def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): + self.max_epochs = max_epochs + self.estimator = None + + + def fit(self, X, Y): + self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'PLogit', + 'name': 'Logistic Regresion using Least Squares', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, ), + 'output': PREDICTIONS, + 'preferred_dtype': np.float32} + + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) + cs = ConfigurationSpace() + cs.add_hyperparameter(max_epochs) + return cs diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py new file mode 100644 index 0000000000..8c7deac191 --- /dev/null +++ b/ParamSklearn/components/preprocessing/gem.py @@ -0,0 +1,57 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter + +from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.implementations.gem import GEM as GEMImpl +from ParamSklearn.util import DENSE + +class GEM(ParamSklearnPreprocessingAlgorithm): + + def __init__(self, N, precond, random_state=None): + self.N = N + self.precond = precond + + def fit(self, X, Y): + self.preprocessor = GEMImpl(self.N, self.precond) + self.preprocessor.fit(X, Y) + return self + + + def transform(self, X): + return self.preprocessor.transform(X) + + + @staticmethod + def get_properties(): + return {'shortname': 'GEM', + 'name': 'Generalized Eigenvector extraction', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, ), + 'output': DENSE, + 'preferred_dtype': None} + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + N = UniformIntegerHyperparameter("N", 5, 20, default=10) + precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) + cs = ConfigurationSpace() + cs.add_hyperparameter(N) + cs.add_hyperparameter(precond) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + diff --git a/ParamSklearn/implementations/ProjLogit.py b/ParamSklearn/implementations/ProjLogit.py new file mode 100644 index 0000000000..cf12df75d9 --- /dev/null +++ b/ParamSklearn/implementations/ProjLogit.py @@ -0,0 +1,90 @@ +import numpy as np +import numpy.random as npr + +# from http://arxiv.org/pdf/1309.1541v1.pdf +def proj_simplex(Y): + N,D = np.shape(Y) + # sort in descending order + X = -np.sort(-Y) + Xsum = np.cumsum(X, axis = 1) - 1 + Xsum = Xsum * (1./np.arange(1,D+1)) + biggest = np.sum(X > Xsum, axis = 1) + # TODO last step could be made faster + # via ravel / linear indexing + subtract = np.zeros((N, 1)) + for i in range(N): + subtract[i] = Xsum[i, biggest[i]-1] + return np.maximum(Y - subtract, 0) + + +class ProjLogit(object): + + def __init__(self, max_epochs = 10, verbose = False): + self.w0 = None + self.ws_all = [] + self.w_all = [] + self.max_epochs = max_epochs + self.verbose = verbose + + def fit(self, X, Y): + # get one hot encoding and add a bias + n = X.shape[0] + trainx = np.hstack([np.ones((n, 1)), X]) + k = np.max(Y) + 1 + if self.verbose: + print("Using {} samples of {} classes".format(n,k)) + yt = np.zeros((n, k)) + for i in range(n): + yt[i, Y[i]] = 1 + # initialize with linear regression + precond = np.eye(trainx.shape[1]) * np.sqrt(n) + C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) + wp = np.linalg.solve(C, np.dot(trainx.T, yt)) + w = np.linalg.solve(C.T, wp) + self.w0 = np.copy(w) + pred_train = np.dot(trainx, w) + for i in range(self.max_epochs): + # expand prediction + res = np.hstack([pred_train, np.power(pred_train, 2) / 2., np.power(pred_train, 3) / 6., np.power(pred_train, 4) / 24.]) + # solve with linear regression + precond = np.eye(res.shape[1]) * np.sqrt(n) + Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) + ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) + self.ws_all.append(np.copy(ws)) + # project to probability simplex + p_res = proj_simplex(np.dot(res, ws)) + # and solve again with updated residual + wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) + w = np.linalg.solve(C.T, wp) + self.w_all.append(np.copy(w)) + pred_train = p_res + np.dot(trainx, w) + obj = np.linalg.norm(yt - pred_train) + + # compute train error + errort = np.sum(np.argmax(pred_train, axis = 1) != Y) + # print training error + if self.verbose: + print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) + return self + + + def predict(self, X): + res = self.predict_proba(X) + return np.argmax(res, axis = 1) + + def predict_proba(self, X): + if self.w0 == None: + raise NotImplementedError + testx = np.hstack([np.ones((X.shape[0], 1)), X]) + pred = np.dot(testx, self.w0) + for ws, w in zip(self.ws_all, self.w_all): + res = np.hstack([pred, np.power(pred, 2) / 2., np.power(pred, 3) / 6., np.power(pred, 4) / 24.]) + p_res = proj_simplex(np.dot(res, ws)) + pred = p_res + np.dot(testx, w) + return proj_simplex(pred) + + def predict_log_proba(self, X): + if self.w == None: + return np.zeros(X.shape[0]) + res = np.log(self.predict_proba(X)) + return res diff --git a/ParamSklearn/implementations/SparseFiltering.py b/ParamSklearn/implementations/SparseFiltering.py new file mode 100644 index 0000000000..d9a15e1079 --- /dev/null +++ b/ParamSklearn/implementations/SparseFiltering.py @@ -0,0 +1,73 @@ +""" +This quickly adapted version of sparse filtering requires scipy and numpy +""" +import numpy as np +from scipy.optimize import minimize + +def l2row(X): + """ + L2 normalize X by rows. We also use this to normalize by column with l2row(X.T) + """ + N = np.sqrt((X**2).sum(axis=1)+1e-8) + Y = (X.T/N).T + return Y,N + + +def l2rowg(X,Y,N,D): + """ + Compute L2 normalized gradient. + """ + return (D.T/N - Y.T * (D*X).sum(axis=1) / N**2).T + + +class SparseFiltering(object): + def __init__(self, N, maxiter=100, random_state=None): + self.N = N + self.W = None + self.maxiter = maxiter + if random_state is None: + self.rng = np.random + elif isinstance(random_state, int): + self.rng = np.random.RandomState(random_state) + else: + self.rng = random_state + + def step(self, X, W): + # returns current objective and gradient + W = W.reshape((X.shape[1], self.N)) + features = X.dot(W) #W.dot(X) + features_norm = np.sqrt(features**2 + 1e-8) + features_column, column_norm = l2row(features_norm.T) + features_row, row_norm = l2row(features_norm) + # compute objective function (l1 norm of features) + obj = features_row.sum() + # backprop through the whole process + deltaW = l2rowg(features_norm, features_row, row_norm, np.ones(features_row.shape)) + deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T).T + deltaW = X.T.dot(deltaW*(features/features_norm)) + return obj, deltaW.flatten() + + + def fit(self, X, y=None): + """ fit sparse filtering to data + this completely ignores y + """ + # init random weights + W = self.rng.randn(self.N,X.shape[1]) + # build a closure for the objective + obj_fun = lambda w: self.step(X, w) + # evaluate once for testing + obj, grad = obj_fun(W) + # and run optimization + opt = {'maxiter': self.maxiter} + res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) + self.W = res.x.reshape(X.shape[1], self.N) + + def transform(self, X): + # compute responses + features = X.dot(self.W) + # sparsify + features_norm = np.sqrt(features**2 + 1e-8) + features_column = l2row(features_norm.T)[0] + features_row = l2row(features_column)[0].T + return features_row diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py new file mode 100644 index 0000000000..c220ffd70d --- /dev/null +++ b/ParamSklearn/implementations/gem.py @@ -0,0 +1,46 @@ +import numpy as np +from scipy.sparse.linalg import eigs + + +class GEM(object): + + + def __init__(self, N, precond): + self.N = N + self.precond = precond + self.W = None + self.verbose = False + + + def fit(self, X, Y): + self.N = min(self.N, X.shape[1]-2) + y_max = int(np.max(Y) + 1) + self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) + off = 0 + for i in range(y_max): + Xi = X[Y == i] + covi = np.dot(Xi.T, Xi) + covi /= np.float32(Xi.shape[0]) + for j in range(y_max): + if j == i: + continue + if self.verbose: + print("Finding eigenvectors for pair ({}/{})".format(i,j)) + Xj = X[Y == j] + covj = np.dot(Xj.T, Xj) / np.float32(Xj.shape[0]) + E = np.linalg.pinv(np.linalg.cholesky(covj + np.eye(covj.shape[0]) * self.precond).T) + C = np.dot(np.dot(E.T, covi), E) + C2 = 0.5 * (C + C.T) + S,U = eigs(C2, self.N) + gev = np.dot(E, U[:, :self.N]) + self.W[:, off:off+self.N] = gev + off += self.N + print("DONE") + return self + + + def transform(self, X, Y=None): + features = np.maximum(np.dot(X, self.W), 0) + return features + + diff --git a/tests/implementations/test_ProjLogit.py b/tests/implementations/test_ProjLogit.py new file mode 100644 index 0000000000..5b9dc0442c --- /dev/null +++ b/tests/implementations/test_ProjLogit.py @@ -0,0 +1,39 @@ +import unittest +import os +import numpy as np +#import scipy.io + +from ParamSklearn.implementations.ProjLogit import ProjLogit + + +class TestProjLogit(unittest.TestCase): + def test_sparse_filtering(self): + """Test logistic regression implementation based on least squares""" + + # simple test that should work out + trainx = np.random.rand(100,3) + trainy = np.zeros(10000) + testx = np.random.rand(100,3) + testy = np.zeros(100) + for i in range(100): + if trainx[i, 2] > 0.5: + trainy[i] = 1 + for i in range(100): + if testx[i, 2] > 0.5: + testy[i] = 1 + + model = ProjLogit(max_epochs = 10, verbose = True) + model.fit(trainx, trainy) + print("weights 0:") + print(model.w0) + predicted_prob = model.predict_proba(testx) + predicted2 = np.argmax(predicted_prob, axis = 1) + predicted = model.predict(testx) + + #print(predicted) + #print(testy) + #print((predicted != testy).sum()) + #print((predicted2 != testy).sum()) + self.assertTrue((predicted == predicted2).all()) + self.assertTrue(((1 - predicted_prob.sum(axis=1)) < 1e-3).all()) + self.assertTrue((predicted != testy).sum() < 20) diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py new file mode 100644 index 0000000000..42d504b53f --- /dev/null +++ b/tests/implementations/test_sparse_filtering.py @@ -0,0 +1,74 @@ +import unittest +import os +import numpy as np + +from ParamSklearn.implementations.SparseFiltering import SparseFiltering + + +class TestSparseFiltering(unittest.TestCase): + def test_sparse_filtering(self): + """Test sparse filtering on a simple dataset""" + # load a few patches of image data from a file which is currently hard coded :) + # JTS TODO: remove this hard coding + dataset = "/home/springj/data/image_patches.npz" + # try not to break testing if data is not available + if (not os.path.isfile(dataset)): + return + patches = np.load(dataset) + data = patches['data'] + preprocess = SparseFiltering(256, random_state = 123456) + print("BEFORE") + preprocess.fit(data) + # JTS TODO: figure out a better test than this nonsense here ;) + self.assertFalse((preprocess.W == 0).all()) + """ + # JTS: the following is only useful for visualization purposes + # turn it on if you want to see sparse filtering in action on image data ;) + import pylab + # method for eyeballing the features + # assumes features in ROWS not columns! + def displayData(X, example_width = False, display_cols = False): + # compute rows, cols + m,n = X.shape + if not example_width: + example_width = int(np.round(np.sqrt(n))) + example_height = (n/example_width) + # Compute number of items to display + if not display_cols: + display_cols = int(np.sqrt(m)) + display_rows = int(np.ceil(m/display_cols)) + pad = 1 + # Setup blank display + display_array = -np.ones((pad+display_rows * (example_height+pad), + pad+display_cols * (example_width+pad))) + # Copy each example into a patch on the display array + curr_ex = 0 + for j in range(display_rows): + for i in range(display_cols): + if curr_ex>=m: + break + # Copy the patch + # Get the max value of the patch + max_val = abs(X[curr_ex,:]).max() + i_inds = example_width*[pad+j * (example_height+pad)+q for q in range(example_height)] + j_inds = [pad+i * (example_width+pad)+q + for q in range(example_width) + for nn in range(example_height)] + try: + newData = (X[curr_ex,:].reshape((example_height,example_width)))/max_val + except: + print X[curr_ex,:].shape + print (example_height,example_width) + raise + display_array[i_inds,j_inds] = newData.flatten() + curr_ex+=1 + if curr_ex>=m: + break + # Display the image + pylab.imshow(display_array,vmin=-1,vmax=1,interpolation='nearest',cmap=pylab.cm.gray) + pylab.xticks([]) + pylab.yticks([]) + displayData(preprocess.W.T) + pylab.show() + #""" + From 50309488cc2bd812e253792d2723cb3df88f633e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 14:52:59 +0200 Subject: [PATCH 252/352] Remove sparse filtering again --- .../implementations/SparseFiltering.py | 73 ------------------ .../implementations/test_sparse_filtering.py | 74 ------------------- 2 files changed, 147 deletions(-) delete mode 100644 ParamSklearn/implementations/SparseFiltering.py delete mode 100644 tests/implementations/test_sparse_filtering.py diff --git a/ParamSklearn/implementations/SparseFiltering.py b/ParamSklearn/implementations/SparseFiltering.py deleted file mode 100644 index d9a15e1079..0000000000 --- a/ParamSklearn/implementations/SparseFiltering.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -This quickly adapted version of sparse filtering requires scipy and numpy -""" -import numpy as np -from scipy.optimize import minimize - -def l2row(X): - """ - L2 normalize X by rows. We also use this to normalize by column with l2row(X.T) - """ - N = np.sqrt((X**2).sum(axis=1)+1e-8) - Y = (X.T/N).T - return Y,N - - -def l2rowg(X,Y,N,D): - """ - Compute L2 normalized gradient. - """ - return (D.T/N - Y.T * (D*X).sum(axis=1) / N**2).T - - -class SparseFiltering(object): - def __init__(self, N, maxiter=100, random_state=None): - self.N = N - self.W = None - self.maxiter = maxiter - if random_state is None: - self.rng = np.random - elif isinstance(random_state, int): - self.rng = np.random.RandomState(random_state) - else: - self.rng = random_state - - def step(self, X, W): - # returns current objective and gradient - W = W.reshape((X.shape[1], self.N)) - features = X.dot(W) #W.dot(X) - features_norm = np.sqrt(features**2 + 1e-8) - features_column, column_norm = l2row(features_norm.T) - features_row, row_norm = l2row(features_norm) - # compute objective function (l1 norm of features) - obj = features_row.sum() - # backprop through the whole process - deltaW = l2rowg(features_norm, features_row, row_norm, np.ones(features_row.shape)) - deltaW = l2rowg(features_norm.T, features_column, column_norm, deltaW.T).T - deltaW = X.T.dot(deltaW*(features/features_norm)) - return obj, deltaW.flatten() - - - def fit(self, X, y=None): - """ fit sparse filtering to data - this completely ignores y - """ - # init random weights - W = self.rng.randn(self.N,X.shape[1]) - # build a closure for the objective - obj_fun = lambda w: self.step(X, w) - # evaluate once for testing - obj, grad = obj_fun(W) - # and run optimization - opt = {'maxiter': self.maxiter} - res = minimize(obj_fun, W, method='L-BFGS-B', jac = True, options = opt) - self.W = res.x.reshape(X.shape[1], self.N) - - def transform(self, X): - # compute responses - features = X.dot(self.W) - # sparsify - features_norm = np.sqrt(features**2 + 1e-8) - features_column = l2row(features_norm.T)[0] - features_row = l2row(features_column)[0].T - return features_row diff --git a/tests/implementations/test_sparse_filtering.py b/tests/implementations/test_sparse_filtering.py deleted file mode 100644 index 42d504b53f..0000000000 --- a/tests/implementations/test_sparse_filtering.py +++ /dev/null @@ -1,74 +0,0 @@ -import unittest -import os -import numpy as np - -from ParamSklearn.implementations.SparseFiltering import SparseFiltering - - -class TestSparseFiltering(unittest.TestCase): - def test_sparse_filtering(self): - """Test sparse filtering on a simple dataset""" - # load a few patches of image data from a file which is currently hard coded :) - # JTS TODO: remove this hard coding - dataset = "/home/springj/data/image_patches.npz" - # try not to break testing if data is not available - if (not os.path.isfile(dataset)): - return - patches = np.load(dataset) - data = patches['data'] - preprocess = SparseFiltering(256, random_state = 123456) - print("BEFORE") - preprocess.fit(data) - # JTS TODO: figure out a better test than this nonsense here ;) - self.assertFalse((preprocess.W == 0).all()) - """ - # JTS: the following is only useful for visualization purposes - # turn it on if you want to see sparse filtering in action on image data ;) - import pylab - # method for eyeballing the features - # assumes features in ROWS not columns! - def displayData(X, example_width = False, display_cols = False): - # compute rows, cols - m,n = X.shape - if not example_width: - example_width = int(np.round(np.sqrt(n))) - example_height = (n/example_width) - # Compute number of items to display - if not display_cols: - display_cols = int(np.sqrt(m)) - display_rows = int(np.ceil(m/display_cols)) - pad = 1 - # Setup blank display - display_array = -np.ones((pad+display_rows * (example_height+pad), - pad+display_cols * (example_width+pad))) - # Copy each example into a patch on the display array - curr_ex = 0 - for j in range(display_rows): - for i in range(display_cols): - if curr_ex>=m: - break - # Copy the patch - # Get the max value of the patch - max_val = abs(X[curr_ex,:]).max() - i_inds = example_width*[pad+j * (example_height+pad)+q for q in range(example_height)] - j_inds = [pad+i * (example_width+pad)+q - for q in range(example_width) - for nn in range(example_height)] - try: - newData = (X[curr_ex,:].reshape((example_height,example_width)))/max_val - except: - print X[curr_ex,:].shape - print (example_height,example_width) - raise - display_array[i_inds,j_inds] = newData.flatten() - curr_ex+=1 - if curr_ex>=m: - break - # Display the image - pylab.imshow(display_array,vmin=-1,vmax=1,interpolation='nearest',cmap=pylab.cm.gray) - pylab.xticks([]) - pylab.yticks([]) - displayData(preprocess.W.T) - pylab.show() - #""" - From 50f6974ca6acc260b1c4ae397752d363d2cd2f30 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 14:59:20 +0200 Subject: [PATCH 253/352] Update result in first steps --- source/first_steps.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/source/first_steps.rst b/source/first_steps.rst index cc7d3971a5..bae648b722 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -22,4 +22,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.80000000000000004 + 0.93999999999999995 From 2e20d83d07e6d6827bc8e960978b9435b20f07be Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 26 May 2015 18:19:41 +0200 Subject: [PATCH 254/352] create latex table for #hyp.params --- misc/create_hyperparameter_table.py | 211 ++++++++++++++++++++++++++++ 1 file changed, 211 insertions(+) create mode 100644 misc/create_hyperparameter_table.py diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py new file mode 100644 index 0000000000..0eb8adb0ac --- /dev/null +++ b/misc/create_hyperparameter_table.py @@ -0,0 +1,211 @@ +from argparse import ArgumentParser +import shlex +import subprocess +import HPOlibConfigSpace.hyperparameters +import ParamSklearn.classification +import ParamSklearn.regression +from collections import OrderedDict + +# Some macros +COND = "conditional" +CAT = "categorical" +CONT = "continuous" +CONST = "constant" +UN = "unparameterized" + +template_string = \ +""" +\documentclass{article} %% For LaTeX2 +\usepackage[a4paper, left=5mm, right=5mm, top=5mm, bottom=5mm]{geometry} + +%%\\usepackage[landscape]{geometry} +\\usepackage{multirow} %% import command \multicolmun +\\usepackage{tabularx} %% Convenient table formatting +\\usepackage{booktabs} %% provides \\toprule, \midrule and \\bottomrule + +\\begin{document} + +%s + +\\end{document} +""" + +caption_str = "Number of Hyperparameters for each possible %s " \ + "for a dataset with these properties: %s" + +table_str = \ +""" +\\begin{table}[t!] +\\centering +\\scriptsize +\\caption{ %s } +\\begin{tabularx}{\\textwidth}{ X X X X X X } +\\toprule +name & \#$\lambda$ & cat (cond) & cont (cond) & const & un \\\\ +\\toprule +\\\\ +%s +\\\\ +\\toprule +\\bottomrule +\\end{tabularx} +\\end{table} +""" + + +def get_dict(task_type="classifier", **kwargs): + assert task_type in ("classifier", "regressor") + + if task_type == "classifier": + cs = ParamSklearn.classification.ParamSklearnClassifier.get_hyperparameter_search_space(dataset_properties=kwargs) + elif task_type == "regressor": + cs = ParamSklearn.regression.ParamSklearnRegressor.get_hyperparameter_search_space(dataset_properties=kwargs) + else: + raise ValueError("'task_type' is not in ('classifier', 'regressor')") + + preprocessor = None + estimator = None + + for h in cs.get_hyperparameters(): + if h.name == "preprocessor": + preprocessor = h + elif h.name == task_type: + estimator = h + + if estimator is None: + raise ValueError("No classifier found") + elif preprocessor is None: + raise ValueError("No preprocessor found") + + estimator_dict = OrderedDict() + for i in estimator.choices: + estimator_dict[i] = OrderedDict() + estimator_dict[i][COND] = OrderedDict() + for t in (CAT, CONT, CONST): + estimator_dict[i][t] = 0 + estimator_dict[i][COND][t] = 0 + estimator_dict[i][UN] = 0 + + preprocessor_dict = OrderedDict() + for i in preprocessor.choices: + preprocessor_dict[i] = OrderedDict() + preprocessor_dict[i][COND] = OrderedDict() + for t in (CAT, CONT, CONST): + preprocessor_dict[i][t] = 0 + preprocessor_dict[i][COND][t] = 0 + preprocessor_dict[i][UN] = 0 + + for h in cs.get_hyperparameters(): + if h.name == "preprocessor" or h.name == task_type: + continue + # walk over both dicts + for d in (estimator_dict, preprocessor_dict): + est = h.name.split(":")[0] + if est not in d: + continue + if isinstance(h, HPOlibConfigSpace.hyperparameters.UniformIntegerHyperparameter): + d[est][CONT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.UniformFloatHyperparameter): + d[est][CONT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.CategoricalHyperparameter): + d[est][CAT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.Constant): + d[est][CONST] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.UnParametrizedHyperparameter): + d[est][UN] += 1 + else: + raise ValueError("Don't know that type: %s" % type(h)) + + for h in cs.get_conditions(): + if h.parent.name == task_type: + # ignore this condition + print "IGNORE", h + continue + + # walk over both dicts and collect hyperparams + for d in (estimator_dict, preprocessor_dict): + est = h.child.name.split(":")[0] + if est not in d: + print "Could not find %s" % est + continue + #print "####" + #print vars(h) + #print h.parent + #print type(h) + if isinstance(h.child, HPOlibConfigSpace.hyperparameters.UniformIntegerHyperparameter): + d[est][COND][CONT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.UniformFloatHyperparameter): + d[est][COND][CONT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.CategoricalHyperparameter): + d[est][COND][CAT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.Constant): + d[est][COND][CONST] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.UnParametrizedHyperparameter): + d[est][COND][UN] += 1 + else: + raise ValueError("Don't know that type: %s" % type(h)) + + return (estimator_dict, preprocessor_dict) + + +def build_table(d): + lines = list() + for est in d.keys(): + sum_ = 0 + t_list = list([est.replace("_", " "), ]) + for t in (CAT, CONT): + sum_ += d[est][t] + t_list.append("%d (%d)" % (d[est][t], d[est][COND][t])) + t_list.append("%d" % d[est][CONST]) + t_list.append("%d" % d[est][UN]) + sum_ += d[est][CONST] + d[est][UN] + t_list.insert(1, "%d" % sum_) + lines.append(" & ".join(t_list)) + return "\\\\ \n".join(lines) + + +def main(): + parser = ArgumentParser() + + # General Options + parser.add_argument("-s", "--save", dest="save", default=None, + help="Where to save plot instead of showing it?") + parser.add_argument("-t", "--type", dest="task_type", default="classifier", + choices=("classifier", ), help="Type of dataset") + parser.add_argument("--sparse", dest="sparse", default=False, + action="store_true", help="dataset property") + prop = parser.add_mutually_exclusive_group() + prop.add_argument("--multilabel", dest="multilabel", default=False, + action="store_true", help="dataset property") + prop.add_argument("--multiclass", dest="multiclass", default=False, + action="store_true", help="dataset property") + prop.add_argument("--binary", dest="binary", default=False, + action="store_true", help="dataset property") + + args, unknown = parser.parse_known_args() + + props = {"sparse": False, + "multilabel.classification": args.multilabel, + "multiclass.classification": args.multiclass, + "binary.classification": args.binary} + est_dict, preproc_dict = get_dict(task_type=args.task_type, **props) + + est_table = build_table(est_dict) + preproc_table = build_table(preproc_dict) + + est_table = table_str % (caption_str % (args.task_type, str(props)), est_table) + preproc_table = table_str % (caption_str % ("preprocessor", str(props)), preproc_table) + + tex_doc = template_string % "\n".join([est_table, preproc_table]) + if args.save is None: + print tex_doc + else: + fh = open(args.save, "w") + fh.write(tex_doc) + fh.close() + proc = subprocess.Popen(shlex.split('pdflatex %s' % args.save)) + proc.communicate() + + +if __name__ == "__main__": + main() \ No newline at end of file From 50a2d33cf7f953d30b24c4ae5076e59b4c0d2d73 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 26 May 2015 18:25:29 +0200 Subject: [PATCH 255/352] FIX: --sparse was ignored --- misc/create_hyperparameter_table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index 0eb8adb0ac..35d1b82e87 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -184,7 +184,7 @@ def main(): args, unknown = parser.parse_known_args() - props = {"sparse": False, + props = {"sparse": args.sparse, "multilabel.classification": args.multilabel, "multiclass.classification": args.multiclass, "binary.classification": args.binary} From 802537f9c01e2bea077e9455e8d15e204446302d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 26 May 2015 18:29:32 +0200 Subject: [PATCH 256/352] remove *.aux and *.log files after using pdflatex --- misc/create_hyperparameter_table.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index 35d1b82e87..b328a98903 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -1,10 +1,12 @@ from argparse import ArgumentParser +from collections import OrderedDict +import os import shlex import subprocess + import HPOlibConfigSpace.hyperparameters import ParamSklearn.classification import ParamSklearn.regression -from collections import OrderedDict # Some macros COND = "conditional" @@ -119,14 +121,14 @@ def get_dict(task_type="classifier", **kwargs): for h in cs.get_conditions(): if h.parent.name == task_type: # ignore this condition - print "IGNORE", h + # print "IGNORE", h continue # walk over both dicts and collect hyperparams for d in (estimator_dict, preprocessor_dict): est = h.child.name.split(":")[0] if est not in d: - print "Could not find %s" % est + #print "Could not find %s" % est continue #print "####" #print vars(h) @@ -205,6 +207,12 @@ def main(): fh.close() proc = subprocess.Popen(shlex.split('pdflatex %s' % args.save)) proc.communicate() + try: + os.remove(args.save.replace(".tex", ".aux")) + os.remove(args.save.replace(".tex", ".log")) + except OSError: + # This is fine + pass if __name__ == "__main__": From afb0ddac1ebea9fea602b1587a019ad2ffa62b9d Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Wed, 27 May 2015 10:45:04 +0200 Subject: [PATCH 257/352] fix preprocessing hyperparams --- misc/create_hyperparameter_table.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index b328a98903..c7d6fd16a4 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -119,7 +119,7 @@ def get_dict(task_type="classifier", **kwargs): raise ValueError("Don't know that type: %s" % type(h)) for h in cs.get_conditions(): - if h.parent.name == task_type: + if h.parent.name == task_type or h.parent.name == "preprocessor": # ignore this condition # print "IGNORE", h continue @@ -130,6 +130,7 @@ def get_dict(task_type="classifier", **kwargs): if est not in d: #print "Could not find %s" % est continue + #print "####" #print vars(h) #print h.parent @@ -146,7 +147,7 @@ def get_dict(task_type="classifier", **kwargs): d[est][COND][UN] += 1 else: raise ValueError("Don't know that type: %s" % type(h)) - + print preprocessor_dict return (estimator_dict, preprocessor_dict) @@ -176,20 +177,17 @@ def main(): choices=("classifier", ), help="Type of dataset") parser.add_argument("--sparse", dest="sparse", default=False, action="store_true", help="dataset property") - prop = parser.add_mutually_exclusive_group() + prop = parser.add_mutually_exclusive_group(required=True) prop.add_argument("--multilabel", dest="multilabel", default=False, action="store_true", help="dataset property") prop.add_argument("--multiclass", dest="multiclass", default=False, action="store_true", help="dataset property") - prop.add_argument("--binary", dest="binary", default=False, - action="store_true", help="dataset property") args, unknown = parser.parse_known_args() props = {"sparse": args.sparse, - "multilabel.classification": args.multilabel, - "multiclass.classification": args.multiclass, - "binary.classification": args.binary} + "multilabel": args.multilabel, + "multiclass": args.multiclass} est_dict, preproc_dict = get_dict(task_type=args.task_type, **props) est_table = build_table(est_dict) From bce6ae07f1e2f0c55b97dcf23f0e668d63d80492 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 27 May 2015 21:52:10 +0200 Subject: [PATCH 258/352] create hyperparameter table: sort alphabetically --- misc/create_hyperparameter_table.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index c7d6fd16a4..57081b85bd 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -80,7 +80,7 @@ def get_dict(task_type="classifier", **kwargs): raise ValueError("No preprocessor found") estimator_dict = OrderedDict() - for i in estimator.choices: + for i in sorted(estimator.choices): estimator_dict[i] = OrderedDict() estimator_dict[i][COND] = OrderedDict() for t in (CAT, CONT, CONST): @@ -89,7 +89,7 @@ def get_dict(task_type="classifier", **kwargs): estimator_dict[i][UN] = 0 preprocessor_dict = OrderedDict() - for i in preprocessor.choices: + for i in sorted(preprocessor.choices): preprocessor_dict[i] = OrderedDict() preprocessor_dict[i][COND] = OrderedDict() for t in (CAT, CONT, CONST): From e4719b4ec095d517f182ffefe6c5e8c6dc04d4e9 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sun, 31 May 2015 13:51:57 +0200 Subject: [PATCH 259/352] add binary --- misc/create_hyperparameter_table.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index 57081b85bd..eb809d4a3c 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -182,6 +182,8 @@ def main(): action="store_true", help="dataset property") prop.add_argument("--multiclass", dest="multiclass", default=False, action="store_true", help="dataset property") + prop.add_argument("--binary", dest="binary", default=False, + action="store_true", help="dataset property") args, unknown = parser.parse_known_args() From 35d61fd07b39e8740cc6a3a3fc6cd1d087942621 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 13 Jun 2015 15:51:21 +0200 Subject: [PATCH 260/352] Add new test; check if OHE can deal with new values at transform time --- tests/implementations/test_OneHotEncoder.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index 23db1230d8..d8207da713 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -121,3 +121,12 @@ def fit_then_transform_dense(self, expected, input, categorical_features='all'): self.assertIsInstance(transformation, np.ndarray) assert_array_almost_equal(expected, transformation) + def test_transform_with_unknown_value(self): + input = np.array(((0, 1, 2, 3, 4, 5), (0, 1, 2, 3, 4, 5))).transpose() + ohe = OneHotEncoder() + ohe.fit(input) + test_data = np.array(((0, 1, 2, 6), (0, 1, 6, 7))).transpose() + output = ohe.transform(test_data).todense() + self.assertEqual(5, np.sum(output)) + + From ec73e1c57807ccc0ca111953f02b9aa0a14fe862 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 23 Jun 2015 17:28:12 +0200 Subject: [PATCH 261/352] Change hyperparameters for DT and RidgeClassifier --- ParamSklearn/components/classification/decision_tree.py | 9 ++++++--- ParamSklearn/components/classification/ridge.py | 4 ++-- tests/components/classification/test_decision_tree.py | 2 +- tests/components/classification/test_ridge.py | 4 ++-- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index d0f8a98357..33d2ba46bb 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -22,7 +22,7 @@ def __init__(self, criterion, max_features, max_depth, if max_depth == "None": self.max_depth = None else: - self.max_depth = int(self.max_depth) + self.max_depth = max_depth self.min_samples_split = int(min_samples_split) self.min_samples_leaf = int(min_samples_leaf) @@ -36,9 +36,12 @@ def __init__(self, criterion, max_features, max_depth, self.estimator = None def fit(self, X, y, sample_weight=None): + num_features = X.shape[1] + max_depth = max(1, self.max_depth * np.ceil(np.log2(num_features))) + self.estimator = DecisionTreeClassifier( criterion=self.criterion, - max_depth=self.max_depth, + max_depth=max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, max_leaf_nodes=self.max_leaf_nodes, @@ -83,7 +86,7 @@ def get_hyperparameter_search_space(dataset_properties=None): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = Constant('max_features', 1.0) - max_depth = UnParametrizedHyperparameter("max_depth", "None") + max_depth = UniformFloatHyperparameter('max_depth', 0., 1., default=1.) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index 6cfbc01be9..a9270285d1 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -64,8 +64,8 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - alpha = UniformFloatHyperparameter("alpha", 10 ** -7, 10 ** -1, - log=True, default=0.0001) + alpha = UniformFloatHyperparameter("alpha", 10 ** -5, 10., + log=True, default=1.) fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, log=True) diff --git a/tests/components/classification/test_decision_tree.py b/tests/components/classification/test_decision_tree.py index a6daff0cea..005b340906 100644 --- a/tests/components/classification/test_decision_tree.py +++ b/tests/components/classification/test_decision_tree.py @@ -19,5 +19,5 @@ def test_default_configuration_predict_proba(self): for i in range(10): predictions, targets = _test_classifier_predict_proba( DecisionTree, dataset='iris') - self.assertAlmostEqual(2.7631021115928571, + self.assertAlmostEqual(0.28069887755912964, sklearn.metrics.log_loss(targets, predictions)) \ No newline at end of file diff --git a/tests/components/classification/test_ridge.py b/tests/components/classification/test_ridge.py index 99986a5da8..0b5892c98b 100644 --- a/tests/components/classification/test_ridge.py +++ b/tests/components/classification/test_ridge.py @@ -10,7 +10,7 @@ class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(Ridge, dataset='iris') - self.assertAlmostEqual(0.9, + self.assertAlmostEqual(0.88, sklearn.metrics.accuracy_score(predictions, targets)) @@ -18,6 +18,6 @@ def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ _test_classifier(classifier=Ridge, dataset='digits') - self.assertAlmostEqual(0.8682452944748027, + self.assertAlmostEqual(0.87553126897389189, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file From ce6e0657e9355cc577d8e6d58f411418568b4d21 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 23 Jun 2015 17:50:52 +0200 Subject: [PATCH 262/352] FIX: use polynomial features --- ParamSklearn/components/preprocessing/polynomial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py index 5559ebbc7c..77cb819e2d 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -52,7 +52,7 @@ def get_properties(): # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (DENSE,), - 'output': PREDICTIONS, + 'output': DENSE, # TODO find out what is best used here! 'preferred_dtype': None} From ef390d13cbadfd1d54717b0d59162d2f27e2119c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 23 Jun 2015 17:51:12 +0200 Subject: [PATCH 263/352] Change parametrization of DT and fix tests --- ParamSklearn/components/classification/decision_tree.py | 4 ++-- tests/components/preprocessing/test_balancing.py | 3 ++- tests/test_classification.py | 4 +++- tests/test_textclassification.py | 2 +- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index 33d2ba46bb..4a4d918cfc 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -37,7 +37,7 @@ def __init__(self, criterion, max_features, max_depth, def fit(self, X, y, sample_weight=None): num_features = X.shape[1] - max_depth = max(1, self.max_depth * np.ceil(np.log2(num_features))) + max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) self.estimator = DecisionTreeClassifier( criterion=self.criterion, @@ -86,7 +86,7 @@ def get_hyperparameter_search_space(dataset_properties=None): criterion = CategoricalHyperparameter( "criterion", ["gini", "entropy"], default="gini") max_features = Constant('max_features', 1.0) - max_depth = UniformFloatHyperparameter('max_depth', 0., 1., default=1.) + max_depth = UniformFloatHyperparameter('max_depth', 0., 2., default=0.5) min_samples_split = UniformIntegerHyperparameter( "min_samples_split", 2, 20, default=2) min_samples_leaf = UniformIntegerHyperparameter( diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index ff35d09ef8..0bef60688c 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -78,7 +78,8 @@ def test_weighting_effect(self): ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), ('sgd', SGD, 0.879, 0.906), - ('ridge', Ridge, 0.868, 0.880)]: + ('ridge', Ridge, 0.89071038251366119, + 0.91013964784456591)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') diff --git a/tests/test_classification.py b/tests/test_classification.py index 6adefa4bd9..53a554a509 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -121,6 +121,8 @@ def test_configurations(self): else: print config raise e + except MemoryError as e: + continue def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( @@ -175,7 +177,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(135, len(hyperparameters)) + self.assertEqual(138, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index eceae86b87..fdb3fc6a96 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(134, len(hyperparameters)) + self.assertEqual(137, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 6a93e91cd17905962687017232ab3e65ea7735c8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 23 Jun 2015 18:01:59 +0200 Subject: [PATCH 264/352] Remove unnecessary hyperparameter class_weight in SVM models --- ParamSklearn/components/classification/liblinear_svc.py | 8 ++------ ParamSklearn/components/classification/libsvm_svc.py | 8 ++------ source/first_steps.rst | 1 + tests/test_classification.py | 3 +-- tests/test_textclassification.py | 2 +- 5 files changed, 7 insertions(+), 15 deletions(-) diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index c53ac60c67..ef91476214 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -15,7 +15,7 @@ class LibLinear_SVC(ParamSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside # TODO: maybe add dual and crammer-singer? def __init__(self, penalty, loss, dual, tol, C, multi_class, - fit_intercept, intercept_scaling, class_weight, + fit_intercept, intercept_scaling, class_weight=None, random_state=None): self.penalty = penalty self.loss = loss @@ -100,10 +100,7 @@ def get_hyperparameter_search_space(dataset_properties=None): # These are set ad-hoc fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") intercept_scaling = UnParametrizedHyperparameter("intercept_scaling", 1) - # This does not allow for other resampling methods! - class_weight = CategoricalHyperparameter("class_weight", - ["None", "auto"], - default="None") + cs = ConfigurationSpace() cs.add_hyperparameter(penalty) cs.add_hyperparameter(loss) @@ -113,7 +110,6 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(multi_class) cs.add_hyperparameter(fit_intercept) cs.add_hyperparameter(intercept_scaling) - cs.add_hyperparameter(class_weight) penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(penalty, "l1"), ForbiddenEqualsClause(loss, "l1") diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index bcdc2382ec..3f88cb288e 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -11,8 +11,8 @@ class LibSVM_SVC(ParamSklearnClassificationAlgorithm): - def __init__(self, C, kernel, gamma, shrinking, tol, class_weight, max_iter, - degree=3, coef0=0, random_state=None): + def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, + class_weight=None, degree=3, coef0=0, random_state=None): self.C = C self.kernel = kernel self.degree = degree @@ -110,9 +110,6 @@ def get_hyperparameter_search_space(dataset_properties=None): tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, log=True) # cache size is not a hyperparameter, but an argument to the program! - class_weight = CategoricalHyperparameter("class_weight", - ["None", "auto"], - default="None") max_iter = UnParametrizedHyperparameter("max_iter", -1) cs = ConfigurationSpace() @@ -123,7 +120,6 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(coef0) cs.add_hyperparameter(shrinking) cs.add_hyperparameter(tol) - cs.add_hyperparameter(class_weight) cs.add_hyperparameter(max_iter) degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") diff --git a/source/first_steps.rst b/source/first_steps.rst index bae648b722..6d82ad86a5 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -17,6 +17,7 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() + >>> configuration_space.seed(2) >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) diff --git a/tests/test_classification.py b/tests/test_classification.py index 53a554a509..1382da9f78 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -177,7 +177,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(138, len(hyperparameters)) + self.assertEqual(136, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) @@ -232,7 +232,6 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " classifier, Value: liblinear_svc\n" " imputation:strategy, Value: mean\n" " liblinear_svc:C, Value: 1.0\n" - " liblinear_svc:class_weight, Value: None\n" " liblinear_svc:dual, Constant: False\n" " liblinear_svc:fit_intercept, Constant: True\n" " liblinear_svc:intercept_scaling, Constant: 1\n" diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index fdb3fc6a96..8645da2d11 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(137, len(hyperparameters)) + self.assertEqual(135, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor and imputation strategy self.assertEqual(len(hyperparameters) - 3, len(conditions)) From 21c045090b4d6d5bbf8a8bbdb88dedb024a49b01 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 15 Jul 2015 16:45:46 +0200 Subject: [PATCH 265/352] Upgrade to sklearn 0.16.1; hyperparameters etc not yet adapted --- ParamSklearn/base.py | 4 +- .../components/classification/extra_trees.py | 5 +- .../classification/random_forest.py | 5 +- .../extra_trees_preproc_for_classification.py | 5 +- .../components/regression/random_forest.py | 5 +- ParamSklearn/implementations/Imputation.py | 55 +- ParamSklearn/implementations/MinMaxScaler.py | 40 +- ParamSklearn/implementations/Normalizer.py | 19 +- ParamSklearn/implementations/OneHotEncoder.py | 19 +- .../implementations/StandardScaler.py | 29 +- ParamSklearn/implementations/forest.py | 1121 ----------------- ParamSklearn/regression.py | 4 +- setup.py | 6 +- source/conf.py | 2 + tests/components/classification/test_qda.py | 4 +- tests/components/preprocessing/test_gem.py | 4 +- .../preprocessing/test_polynomial.py | 1 - .../test_random_trees_embedding.py | 12 +- .../components/preprocessing/test_scaling.py | 3 +- .../regression/test_ridge_regression.py | 2 +- tests/implementations/test_OneHotEncoder.py | 48 +- tests/implementations/test_minmaxscaler.py | 36 + 22 files changed, 171 insertions(+), 1258 deletions(-) delete mode 100644 ParamSklearn/implementations/forest.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index b67a658346..fbe8344001 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -4,8 +4,8 @@ import numpy as np import sklearn -if sklearn.__version__ != "0.15.2": - raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " +if sklearn.__version__ != "0.16.1": + raise ValueError("ParamSklearn supports only sklearn version 0.16.1, " "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 1d923bc7d1..9bc85b0082 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -8,8 +8,9 @@ from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS +from sklearn.ensemble import ExtraTreesClassifier as ETC # get our own forests to replace the sklearn ones -from ParamSklearn.implementations import forest +#from ParamSklearn.implementations import forest class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): @@ -64,7 +65,7 @@ def fit(self, X, Y, sample_weight=None): max_features = int(float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.estimator = forest.ExtraTreesClassifier( + self.estimator = ETC( n_estimators=0, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index e3f13e71a7..646a841452 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -1,4 +1,5 @@ import numpy as np +from sklearn.ensemble import RandomForestClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -8,7 +9,7 @@ from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones -from ParamSklearn.implementations import forest +#from ParamSklearn.implementations import forest class RandomForest(ParamSklearnClassificationAlgorithm): @@ -50,7 +51,7 @@ def fit(self, X, Y, sample_weight=None): self.max_leaf_nodes = None # initial fit of only increment trees - self.estimator = forest.RandomForestClassifier( + self.estimator = RandomForestClassifier( n_estimators=0, criterion=self.criterion, max_features=max_features, diff --git a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py index 254eb2fb00..8a8ed0f61c 100644 --- a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py @@ -1,4 +1,5 @@ import numpy as np +from sklearn.ensemble import ExtraTreesClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -10,7 +11,7 @@ from ParamSklearn.util import DENSE, INPUT # get our own forests to replace the sklearn ones -from ParamSklearn.implementations import forest +#from ParamSklearn.implementations import forest class ExtraTreesPreprocessor(ParamSklearnPreprocessingAlgorithm): @@ -66,7 +67,7 @@ def fit(self, X, Y, sample_weight=None): float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.preprocessor = forest.ExtraTreesClassifier( + self.preprocessor = ExtraTreesClassifier( n_estimators=0, criterion=self.criterion, max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index 4e2c01b9ad..40a47a960b 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -8,7 +8,8 @@ from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones -from ParamSklearn.implementations import forest +#from ParamSklearn.implementations import forest +from sklearn.ensemble import RandomForestRegressor class RandomForest(ParamSklearnRegressionAlgorithm): @@ -65,7 +66,7 @@ def fit(self, X, Y): max_features = int(float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.estimator = forest.RandomForestRegressor( + self.estimator = RandomForestRegressor( n_estimators=0, criterion=self.criterion, max_features=max_features, diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 8e6c942b05..bfafb05988 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -9,11 +9,11 @@ from scipy import stats from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import array2d -from sklearn.utils import atleast2d_or_csr -from sklearn.utils import atleast2d_or_csc +from sklearn.utils import check_array from sklearn.utils import as_float_array from sklearn.utils.fixes import astype +from sklearn.utils.sparsefuncs import _get_median +from sklearn.utils.validation import check_is_fitted from sklearn.externals import six @@ -33,34 +33,6 @@ def _get_mask(X, value_to_mask): return X == value_to_mask -def _get_median(data, n_zeros): - """Compute the median of data with n_zeros additional zeros. - - This function is used to support sparse matrices; it modifies data in-place - """ - n_elems = len(data) + n_zeros - if not n_elems: - return np.nan - n_negative = np.count_nonzero(data < 0) - middle, is_odd = divmod(n_elems, 2) - data.sort() - - if is_odd: - return _get_elem_at_rank(middle, data, n_negative, n_zeros) - - return (_get_elem_at_rank(middle - 1, data, n_negative, n_zeros) + - _get_elem_at_rank(middle, data, n_negative, n_zeros)) / 2. - - -def _get_elem_at_rank(rank, data, n_negative, n_zeros): - """Find the value in data augmented with n_zeros for the given rank""" - if rank < n_negative: - return data[rank] - if rank - n_negative < n_zeros: - return 0 - return data[rank - n_zeros] - - def _most_frequent(array, extra_value, n_repeat): """Compute the most frequent value in a 1d array extended with [extra_value] * n_repeat, where extra_value is assumed to be not part @@ -134,7 +106,7 @@ class Imputer(BaseEstimator, TransformerMixin): Attributes ---------- - `statistics_` : array of shape (n_features,) + statistics_ : array of shape (n_features,) The imputation fill value for each feature if axis == 0. Notes @@ -185,15 +157,15 @@ def fit(self, X, y=None): # when the imputation is done per sample (i.e., when axis=1). if self.axis == 0: if sparse.issparse(X): - X = atleast2d_or_csc(X, dtype=np.float64, - force_all_finite=False) + X = check_array(X, accept_sparse='csc', dtype=np.float64, + force_all_finite=False) self.statistics_ = self._sparse_fit(X, self.strategy, self.missing_values, self.axis) else: - X = atleast2d_or_csc(X, dtype=self.dtype, - force_all_finite=False) + print type(X), X.dtype, self.dtype + X = check_array(X, dtype=self.dtype, force_all_finite=False) self.statistics_ = self._dense_fit(X, self.strategy, self.missing_values, @@ -282,7 +254,7 @@ def _sparse_fit(self, X, strategy, missing_values, axis): def _dense_fit(self, X, strategy, missing_values, axis): """Fit the transformer on dense data.""" - X = array2d(X, force_all_finite=False) + X = check_array(X, force_all_finite=False) mask = _get_mask(X, missing_values) masked_X = ma.masked_array(X, mask=mask) @@ -339,6 +311,9 @@ def transform(self, X): X : {array-like, sparse matrix}, shape = [n_samples, n_features] The input data to complete. """ + if self.axis == 0: + check_is_fitted(self, 'statistics_') + # Copy just once X = as_float_array(X, copy=self.copy, force_all_finite=False) @@ -346,7 +321,8 @@ def transform(self, X): # transform(X), the imputation data need to be recomputed # when the imputation is done per sample if self.axis == 1: - X = atleast2d_or_csr(X, force_all_finite=False, copy=False) + X = check_array(X, accept_sparse='csr', force_all_finite=False, + copy=False) if sparse.issparse(X): statistics = self._sparse_fit(X, @@ -360,7 +336,8 @@ def transform(self, X): self.missing_values, self.axis) else: - X = atleast2d_or_csc(X, force_all_finite=False, copy=False) + X = check_array(X, accept_sparse='csc', force_all_finite=False, + copy=False) statistics = self.statistics_ # Delete the invalid rows/columns diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/ParamSklearn/implementations/MinMaxScaler.py index e491266867..ff6215fcaf 100644 --- a/ParamSklearn/implementations/MinMaxScaler.py +++ b/ParamSklearn/implementations/MinMaxScaler.py @@ -2,40 +2,43 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import check_arrays, warn_if_not_float +from sklearn.utils.validation import check_is_fitted, check_array, warn_if_not_float class MinMaxScaler(BaseEstimator, TransformerMixin): - """Standardizes features by scaling each feature to a given range. + """Transforms features by scaling each feature to a given range. This estimator scales and translates each feature individually such that it is in the given range on the training set, i.e. between zero and one. - The standardization is given by:: + The transformation is given by:: + X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) X_scaled = X_std * (max - min) + min where min, max = feature_range. - This standardization is often used as an alternative to zero mean, + This transformation is often used as an alternative to zero mean, unit variance scaling. + Read more in the :ref:`User Guide `. + Parameters ---------- feature_range: tuple (min, max), default=(0, 1) Desired range of transformed data. - copy : boolean, optional, default is True + copy : boolean, optional, default True Set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array). Attributes ---------- - `min_` : ndarray, shape (n_features,) + min_ : ndarray, shape (n_features,) Per feature adjustment for minimum. - `scale_` : ndarray, shape (n_features,) + scale_ : ndarray, shape (n_features,) Per feature relative scaling of the data. """ @@ -51,11 +54,15 @@ def fit(self, X, y=None): The data used to compute the per-feature minimum and maximum used for later scaling along the features axis. """ - X = check_arrays(X, sparse_format="csc", copy=self.copy)[0] + X = check_array(X, copy=self.copy, ensure_2d=True, + accept_sparse="csc", dtype=np.float32, + ensure_min_samples=2) + if warn_if_not_float(X, estimator=self): # Costly conversion, but otherwise the pipeline will break: # https://github.com/scikit-learn/scikit-learn/issues/1709 X = X.astype(np.float) + feature_range = self.feature_range if feature_range[0] >= feature_range[1]: raise ValueError("Minimum of desired feature range must be smaller" @@ -97,15 +104,18 @@ def fit(self, X, y=None): self.data_min = data_min return self - def transform(self, X): """Scaling features of X according to feature_range. + Parameters ---------- X : array-like with shape [n_samples, n_features] - Input data that will be transformed. + Input data that will be transformed. """ - X = check_arrays(X, sparse_format="csc", copy=self.copy)[0] + check_is_fitted(self, 'scale_') + + X = check_array(X, accept_sparse="csc", copy=self.copy) + if sparse.issparse(X): for i in range(X.shape[1]): X.data[X.indptr[i]:X.indptr[i + 1]] *= self.scale_[i] @@ -115,15 +125,17 @@ def transform(self, X): X += self.min_ return X - def inverse_transform(self, X): """Undo the scaling of X according to feature_range. + Parameters ---------- X : array-like with shape [n_samples, n_features] - Input data that will be transformed. + Input data that will be transformed. """ - X = check_arrays(X, sparse_format="dense", copy=self.copy)[0] + check_is_fitted(self, 'scale_') + + X = check_array(X, copy=self.copy, accept_sparse="csc", ensure_2d=False) X -= self.min_ X /= self.scale_ return X \ No newline at end of file diff --git a/ParamSklearn/implementations/Normalizer.py b/ParamSklearn/implementations/Normalizer.py index 97bec74e72..0d2be626f1 100644 --- a/ParamSklearn/implementations/Normalizer.py +++ b/ParamSklearn/implementations/Normalizer.py @@ -1,6 +1,5 @@ from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import check_arrays -from sklearn.utils import atleast2d_or_csr +from sklearn.utils import check_array from sklearn.utils import warn_if_not_float from sklearn.utils.extmath import row_norms from sklearn.utils.sparsefuncs_fast import inplace_csr_row_normalize_l1 @@ -10,7 +9,7 @@ from scipy import sparse def normalize(X, norm='l2', axis=1, copy=True): - """Normalize a dataset along any axis + """Scale input vectors individually to unit norm (vector length). Parameters ---------- @@ -27,7 +26,7 @@ def normalize(X, norm='l2', axis=1, copy=True): axis used to normalize the data along. If 1, independently normalize each sample, otherwise (if 0) normalize each feature. - copy : boolean, optional, default is True + copy : boolean, optional, default True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix and if axis is 1). @@ -48,13 +47,13 @@ def normalize(X, norm='l2', axis=1, copy=True): else: raise ValueError("'%d' is not a supported axis" % axis) - X = check_arrays(X, sparse_format=sparse_format, copy=copy, - dtype=np.float64)[0] + X = check_array(X, sparse_format, copy=copy) warn_if_not_float(X, 'The normalize function') if axis == 0: X = X.T if sparse.issparse(X): + X = check_array(X, accept_sparse=sparse_format, dtype=np.float64) if norm == 'l1': inplace_csr_row_normalize_l1(X) elif norm == 'l2': @@ -75,7 +74,7 @@ def normalize(X, norm='l2', axis=1, copy=True): class Normalizer(BaseEstimator, TransformerMixin): - """Normalize samples individually to unit norm + """Normalize samples individually to unit norm. Each sample (i.e. each row of the data matrix) with at least one non zero component is rescaled independently of other samples so @@ -96,7 +95,7 @@ class Normalizer(BaseEstimator, TransformerMixin): norm : 'l1' or 'l2', optional ('l2' by default) The norm to use to normalize each non zero sample. - copy : boolean, optional, default is True + copy : boolean, optional, default True set to False to perform inplace row normalization and avoid a copy (if the input is already a numpy array or a scipy.sparse CSR matrix). @@ -122,7 +121,7 @@ def fit(self, X, y=None): This method is just there to implement the usual API and hence work in pipelines. """ - atleast2d_or_csr(X) + X = check_array(X, accept_sparse='csr') return self def transform(self, X, y=None, copy=None): @@ -135,5 +134,5 @@ def transform(self, X, y=None, copy=None): in CSR format to avoid an un-necessary copy. """ copy = copy if copy is not None else self.copy - atleast2d_or_csr(X) + X = check_array(X, accept_sparse='csr') return normalize(X, norm=self.norm, axis=1, copy=copy) diff --git a/ParamSklearn/implementations/OneHotEncoder.py b/ParamSklearn/implementations/OneHotEncoder.py index 66055d5c8b..1715834824 100644 --- a/ParamSklearn/implementations/OneHotEncoder.py +++ b/ParamSklearn/implementations/OneHotEncoder.py @@ -2,13 +2,8 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.externals import six -from sklearn.utils import check_arrays -from sklearn.utils import atleast2d_or_csc, safe_asarray -#zip = six.moves.zip -#map = six.moves.map -#range = six.moves.range +from sklearn.utils import check_array def _transform_selected(X, transform, selected="all", copy=True): @@ -32,11 +27,11 @@ def _transform_selected(X, transform, selected="all", copy=True): ------- X : array or sparse matrix, shape=(n_samples, n_features_new) """ + if selected == "all": - X = safe_asarray(X, copy=copy, force_all_finite=False) return transform(X) - X = check_arrays(X, allow_nans=True)[0] + X = check_array(X, accept_sparse='csc', force_all_finite=False) n_features = X.shape[1] ind = np.arange(n_features) sel = np.zeros(n_features, dtype=bool) @@ -49,7 +44,7 @@ def _transform_selected(X, transform, selected="all", copy=True): if sparse.isspmatrix_csr(X): X.data += 1 subtract = True - X = atleast2d_or_csc(X, copy=copy, force_all_finite=False) + X = check_array(X, copy=copy, force_all_finite=False, accept_sparse="csc") if subtract: X.data -= 1 @@ -186,7 +181,7 @@ def _fit_transform(self, X): if sparse.isspmatrix_csr(X): X.data += 1 subtract = True - X = check_arrays(X, sparse_format="csc", allow_nans=True)[0] + X = check_array(X, accept_sparse="csc", force_all_finite=False) if subtract: X.data -= 1 @@ -255,12 +250,14 @@ def fit_transform(self, X, y=None): def _transform(self, X): """Assumes X contains only categorical features.""" + X = check_array(X, dtype=np.int, accept_sparse='csc') + # Add 1 to all categorical colums to avoid loosing them due to slicing subtract = False if sparse.isspmatrix_csr(X): X.data += 1 subtract = True - X = check_arrays(X, sparse_format="csc", allow_nans=True)[0] + X = check_array(X, accept_sparse="csc", force_all_finite=False) if subtract: X.data -= 1 n_samples, n_features = X.shape diff --git a/ParamSklearn/implementations/StandardScaler.py b/ParamSklearn/implementations/StandardScaler.py index 60d0ed19e9..511f1f983c 100644 --- a/ParamSklearn/implementations/StandardScaler.py +++ b/ParamSklearn/implementations/StandardScaler.py @@ -2,9 +2,9 @@ from scipy import sparse from sklearn.base import BaseEstimator, TransformerMixin -from sklearn.utils import check_arrays, warn_if_not_float +from sklearn.utils.validation import check_array, warn_if_not_float, check_is_fitted from sklearn.utils.sparsefuncs import inplace_column_scale, \ - mean_variance_axis0 + mean_variance_axis def _mean_and_std(X, axis=0, with_mean=True, with_std=True): @@ -15,14 +15,14 @@ def _mean_and_std(X, axis=0, with_mean=True, with_std=True): Xr = np.rollaxis(X, axis) if with_mean: - mean_ = Xr.mean(axis=0) + mean_ = Xr.mean(axis=0) else: mean_ = None if with_std: std_ = Xr.std(axis=0) if isinstance(std_, np.ndarray): - std_[std_ == 0.0] = 1.0 + std_[std_ == 0.] = 1.0 elif std_ == 0.: std_ = 1. else: @@ -61,7 +61,7 @@ class StandardScaler(BaseEstimator, TransformerMixin): If True, scale the data to unit variance (or equivalently, unit standard deviation). - copy : boolean, optional, default is True + copy : boolean, optional, default True If False, try to avoid a copy and do inplace scaling instead. This is not guaranteed to always work inplace; e.g. if the data is not a NumPy array or scipy.sparse CSR matrix, a copy may still be @@ -69,10 +69,10 @@ class StandardScaler(BaseEstimator, TransformerMixin): Attributes ---------- - `mean_` : array of floats with shape [n_features] + mean_ : array of floats with shape [n_features] The mean value for each feature in the training set. - `std_` : array of floats with shape [n_features] + std_ : array of floats with shape [n_features] The standard deviation for each feature in the training set. See also @@ -103,7 +103,8 @@ def fit(self, X, y=None): The data used to compute the mean and standard deviation used for later scaling along the features axis. """ - X = check_arrays(X, copy=self.copy, sparse_format="csc")[0] + X = check_array(X, copy=self.copy, accept_sparse="csc", + ensure_2d=False) if warn_if_not_float(X, estimator=self): # Costly conversion, but otherwise the pipeline will break: # https://github.com/scikit-learn/scikit-learn/issues/1709 @@ -141,7 +142,7 @@ def fit(self, X, y=None): self.mean_ = None if self.with_std: - var = mean_variance_axis0(X)[1] + var = mean_variance_axis(X, axis=0)[1] self.std_ = np.sqrt(var) self.std_[var == 0.0] = 1.0 else: @@ -150,7 +151,7 @@ def fit(self, X, y=None): else: self.mean_, self.std_ = _mean_and_std( X, axis=0, with_mean=self.with_mean, with_std=self.with_std) - return self + return self def transform(self, X, y=None, copy=None): """Perform standardization by centering and scaling @@ -160,18 +161,22 @@ def transform(self, X, y=None, copy=None): X : array-like with shape [n_samples, n_features] The data used to scale along the features axis. """ + check_is_fitted(self, 'std_') + copy = copy if copy is not None else self.copy - X = check_arrays(X, copy=copy, sparse_format="csc")[0] + X = check_array(X, copy=copy, accept_sparse="csc", ensure_2d=False) if warn_if_not_float(X, estimator=self): X = X.astype(np.float) if sparse.issparse(X): if self.center_sparse: for i in range(X.shape[1]): X.data[X.indptr[i]:X.indptr[i + 1]] -= self.mean_[i] + elif self.with_mean: raise ValueError( "Cannot center sparse matrices: pass `with_mean=False` " "instead. See docstring for motivation and alternatives.") + else: pass @@ -193,6 +198,8 @@ def inverse_transform(self, X, copy=None): X : array-like with shape [n_samples, n_features] The data used to scale along the features axis. """ + check_is_fitted(self, 'std_') + copy = copy if copy is not None else self.copy if sparse.issparse(X): if self.with_mean: diff --git a/ParamSklearn/implementations/forest.py b/ParamSklearn/implementations/forest.py deleted file mode 100644 index dc27d8dbe9..0000000000 --- a/ParamSklearn/implementations/forest.py +++ /dev/null @@ -1,1121 +0,0 @@ -"""Forest of trees-based ensemble methods - Jost Tobias Springenberg (JTS) - This is a quick and dirty hack to get some of the functionality (like warm-starting) - from sklearn 0.16 into 0.15.2, we should aim to remove this once we can safely use - the newer version! - Most of this is shamelessly copied from sklearn directly :) -""" - - -from __future__ import division - -import numpy as np - -from warnings import warn -from abc import ABCMeta, abstractmethod - -import numpy as np -from scipy.sparse import issparse - -from sklearn.base import ClassifierMixin, RegressorMixin -from sklearn.externals.joblib import Parallel, delayed -from sklearn.externals import six -from sklearn.feature_selection.from_model import _LearntSelectorMixin -from sklearn.metrics import r2_score -from sklearn.tree import (DecisionTreeClassifier, DecisionTreeRegressor, - ExtraTreeClassifier, ExtraTreeRegressor) -from sklearn.tree._tree import DTYPE, DOUBLE -from sklearn.utils import array2d, check_random_state, check_arrays, safe_asarray -from sklearn.utils.validation import DataConversionWarning -from sklearn.ensemble.base import BaseEnsemble, _partition_estimators - -# bring in all the stuff from forests that we can reuse -from sklearn.ensemble.forest import _parallel_predict_proba -from sklearn.ensemble.forest import _parallel_predict_regression -from sklearn.ensemble.forest import _parallel_apply - -# bring in the base forest class from sklearn -from sklearn.ensemble.forest import BaseForest - -MAX_INT = np.iinfo(np.int32).max -def _parallel_build_trees(tree, forest, X, y, sample_weight, tree_idx, n_trees, - verbose=0): - """Private function used to fit a single tree in parallel.""" - if verbose > 1: - print("building tree %d of %d" % (tree_idx + 1, n_trees)) - - if forest.bootstrap: - n_samples = X.shape[0] - if sample_weight is None: - curr_sample_weight = np.ones((n_samples,), dtype=np.float64) - else: - curr_sample_weight = sample_weight.copy() - - random_state = check_random_state(tree.random_state) - indices = random_state.randint(0, n_samples, n_samples) - sample_counts = np.bincount(indices, minlength=n_samples) - curr_sample_weight *= sample_counts - - tree.fit(X, y, sample_weight=curr_sample_weight, check_input=False) - - tree.indices_ = sample_counts > 0. - - else: - tree.fit(X, y, sample_weight=sample_weight, check_input=False) - - return tree - - -class MyBaseForest(six.with_metaclass(ABCMeta, BaseForest, - _LearntSelectorMixin)): - """Base class for forests of trees. - WARNING JTS: this is a mix between the 0.15.2 sklearn and 0.16 forest - """ - - @abstractmethod - def __init__(self, - base_estimator, - n_estimators=10, - estimator_params=tuple(), - bootstrap=False, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(BaseForest, self).__init__( - base_estimator=base_estimator, - n_estimators=n_estimators, - estimator_params=estimator_params) - - self.bootstrap = bootstrap - self.oob_score = oob_score - self.n_jobs = n_jobs - self.random_state = random_state - self.verbose = verbose - self.warm_start = warm_start - - - def fit(self, X, y, sample_weight=None): - """Build a forest of trees from the training set (X, y). - JTS: this now supports the warm_start procedure - - Parameters - ---------- - X : array-like of shape = [n_samples, n_features] - - y : array-like, shape = [n_samples] or [n_samples, n_outputs] - The target values (class labels in classification, real numbers in - regression). - - sample_weight : array-like, shape = [n_samples] or None - Sample weights. If None, then samples are equally weighted. Splits - that would create child nodes with net zero or negative weight are - ignored while searching for a split in each node. In the case of - classification, splits are also ignored if they would result in any - single class carrying a negative weight in either child node. - - Returns - ------- - self : object - Returns self. - """ - # Convert data - X, = check_arrays(X, dtype=DTYPE, sparse_format="dense") - - # Remap output - n_samples, self.n_features_ = X.shape - - y = np.atleast_1d(y) - if y.ndim == 2 and y.shape[1] == 1: - warn("A column-vector y was passed when a 1d array was" - " expected. Please change the shape of y to " - "(n_samples, ), for example using ravel().", - DataConversionWarning, stacklevel=2) - - if y.ndim == 1: - # reshape is necessary to preserve the data contiguity against vs - # [:, np.newaxis] that does not. - y = np.reshape(y, (-1, 1)) - - self.n_outputs_ = y.shape[1] - - y = self._validate_y(y) - - if getattr(y, "dtype", None) != DOUBLE or not y.flags.contiguous: - y = np.ascontiguousarray(y, dtype=DOUBLE) - - # Check parameters - self._validate_estimator() - - if not self.bootstrap and self.oob_score: - raise ValueError("Out of bag estimation only available" - " if bootstrap=True") - - random_state = check_random_state(self.random_state) - - if not self.warm_start: - # Free allocated memory, if any - self.estimators_ = [] - - n_more_estimators = self.n_estimators - len(self.estimators_) - - if n_more_estimators < 0: - raise ValueError('n_estimators=%d must be larger or equal to ' - 'len(estimators_)=%d when warm_start==True' - % (self.n_estimators, len(self.estimators_))) - - elif n_more_estimators == 0: - warn("Warm-start fitting without increasing n_estimators does not " - "fit new trees.") - else: - if self.warm_start and len(self.estimators_) > 0: - # We draw from the random state to get the random state we - # would have got if we hadn't used a warm_start. - random_state.randint(MAX_INT, size=len(self.estimators_)) - - trees = [] - for i in range(n_more_estimators): - tree = self._make_estimator(append=False) - tree.set_params(random_state=random_state.randint(MAX_INT)) - trees.append(tree) - - # Parallel loop: we use the threading backend as the Cython code - # for fitting the trees is internally releasing the Python GIL - # making threading always more efficient than multiprocessing in - # that case. - trees = Parallel(n_jobs=self.n_jobs, verbose=self.verbose, - backend="threading")( - delayed(_parallel_build_trees)( - t, self, X, y, sample_weight, i, len(trees), - verbose=self.verbose) - for i, t in enumerate(trees)) - - # Collect newly grown trees - self.estimators_.extend(trees) - - if self.oob_score: - self._set_oob_score(X, y) - - # Decapsulate classes_ attributes - if hasattr(self, "classes_") and self.n_outputs_ == 1: - self.n_classes_ = self.n_classes_[0] - self.classes_ = self.classes_[0] - - return self - -# Now that we have the base forest defined we can define all the other variants - -class ForestClassifier(six.with_metaclass(ABCMeta, MyBaseForest, - ClassifierMixin)): - """Base class for forest of trees-based classifiers. - - Warning: This class should not be used directly. Use derived classes - instead. - """ - - @abstractmethod - def __init__(self, - base_estimator, - n_estimators=10, - estimator_params=tuple(), - bootstrap=False, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - - super(ForestClassifier, self).__init__( - base_estimator, - n_estimators=n_estimators, - estimator_params=estimator_params, - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - def _set_oob_score(self, X, y): - n_classes_ = self.n_classes_ - n_samples = y.shape[0] - - oob_decision_function = [] - oob_score = 0.0 - predictions = [] - - for k in xrange(self.n_outputs_): - predictions.append(np.zeros((n_samples, - n_classes_[k]))) - - for estimator in self.estimators_: - mask = np.ones(n_samples, dtype=np.bool) - mask[estimator.indices_] = False - p_estimator = estimator.predict_proba(X[mask, :]) - - if self.n_outputs_ == 1: - p_estimator = [p_estimator] - - for k in xrange(self.n_outputs_): - predictions[k][mask, :] += p_estimator[k] - - for k in xrange(self.n_outputs_): - if (predictions[k].sum(axis=1) == 0).any(): - warn("Some inputs do not have OOB scores. " - "This probably means too few trees were used " - "to compute any reliable oob estimates.") - - decision = (predictions[k] / - predictions[k].sum(axis=1)[:, np.newaxis]) - oob_decision_function.append(decision) - oob_score += np.mean(y[:, k] == - np.argmax(predictions[k], axis=1), axis=0) - - if self.n_outputs_ == 1: - self.oob_decision_function_ = oob_decision_function[0] - else: - self.oob_decision_function_ = oob_decision_function - - self.oob_score_ = oob_score / self.n_outputs_ - - def _validate_y(self, y): - y = np.copy(y) - - self.classes_ = [] - self.n_classes_ = [] - - for k in xrange(self.n_outputs_): - classes_k, y[:, k] = np.unique(y[:, k], return_inverse=True) - self.classes_.append(classes_k) - self.n_classes_.append(classes_k.shape[0]) - - return y - - def predict(self, X): - """Predict class for X. - - The predicted class of an input sample is computed as the majority - prediction of the trees in the forest. - - Parameters - ---------- - X : array-like of shape = [n_samples, n_features] - The input samples. - - Returns - ------- - y : array of shape = [n_samples] or [n_samples, n_outputs] - The predicted classes. - """ - n_samples = len(X) - proba = self.predict_proba(X) - - if self.n_outputs_ == 1: - return self.classes_.take(np.argmax(proba, axis=1), axis=0) - - else: - predictions = np.zeros((n_samples, self.n_outputs_)) - - for k in xrange(self.n_outputs_): - predictions[:, k] = self.classes_[k].take(np.argmax(proba[k], - axis=1), - axis=0) - - return predictions - - def predict_proba(self, X): - """Predict class probabilities for X. - - The predicted class probabilities of an input sample is computed as - the mean predicted class probabilities of the trees in the forest. - - Parameters - ---------- - X : array-like of shape = [n_samples, n_features] - The input samples. - - Returns - ------- - p : array of shape = [n_samples, n_classes], or a list of n_outputs - such arrays if n_outputs > 1. - The class probabilities of the input samples. The order of the - classes corresponds to that in the attribute `classes_`. - """ - # Check data - if getattr(X, "dtype", None) != DTYPE or X.ndim != 2: - X = array2d(X, dtype=DTYPE) - - # Assign chunk of trees to jobs - n_jobs, n_trees, starts = _partition_estimators(self) - - # Bugfix for _parallel_predict_proba which expects a list for multi-label and integer for single-label problems - if not isinstance(self.n_classes_, int) and len(self.n_classes_) == 1: - n_classes_ = self.n_classes_[0] - else: - n_classes_ = self.n_classes_ - # Parallel loop - all_proba = Parallel(n_jobs=n_jobs, verbose=self.verbose, - backend="threading")( - delayed(_parallel_predict_proba)( - self.estimators_[starts[i]:starts[i + 1]], - X, - n_classes_, - self.n_outputs_) - for i in range(n_jobs)) - - # Reduce - proba = all_proba[0] - - if self.n_outputs_ == 1: - for j in xrange(1, len(all_proba)): - proba += all_proba[j] - - proba /= len(self.estimators_) - - else: - for j in xrange(1, len(all_proba)): - for k in xrange(self.n_outputs_): - proba[k] += all_proba[j][k] - - for k in xrange(self.n_outputs_): - proba[k] /= self.n_estimators - - return proba - - def predict_log_proba(self, X): - """Predict class log-probabilities for X. - - The predicted class log-probabilities of an input sample is computed as - the log of the mean predicted class probabilities of the trees in the - forest. - - Parameters - ---------- - X : array-like of shape = [n_samples, n_features] - The input samples. - - Returns - ------- - p : array of shape = [n_samples, n_classes], or a list of n_outputs - such arrays if n_outputs > 1. - The class probabilities of the input samples. The order of the - classes corresponds to that in the attribute `classes_`. - """ - proba = self.predict_proba(X) - - if self.n_outputs_ == 1: - return np.log(proba) - - else: - for k in xrange(self.n_outputs_): - proba[k] = np.log(proba[k]) - - return proba - -class ForestRegressor(six.with_metaclass(ABCMeta, MyBaseForest, RegressorMixin)): - """Base class for forest of trees-based regressors. - - Warning: This class should not be used directly. Use derived classes - instead. - """ - - @abstractmethod - def __init__(self, - base_estimator, - n_estimators=10, - estimator_params=tuple(), - bootstrap=False, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(ForestRegressor, self).__init__( - base_estimator, - n_estimators=n_estimators, - estimator_params=estimator_params, - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - - def predict(self, X): - """Predict regression target for X. - - The predicted regression target of an input sample is computed as the - mean predicted regression targets of the trees in the forest. - - Parameters - ---------- - X : array-like of shape = [n_samples, n_features] - The input samples. - - Returns - ------- - y: array of shape = [n_samples] or [n_samples, n_outputs] - The predicted values. - """ - # Check data - if getattr(X, "dtype", None) != DTYPE or X.ndim != 2: - X = array2d(X, dtype=DTYPE) - - # Assign chunk of trees to jobs - n_jobs, n_trees, starts = _partition_estimators(self) - - # Parallel loop - all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.verbose, - backend="threading")( - delayed(_parallel_predict_regression)( - self.estimators_[starts[i]:starts[i + 1]], X) - for i in range(n_jobs)) - - # Reduce - y_hat = sum(all_y_hat) / len(self.estimators_) - - return y_hat - - def _set_oob_score(self, X, y): - n_samples = y.shape[0] - - predictions = np.zeros((n_samples, self.n_outputs_)) - n_predictions = np.zeros((n_samples, self.n_outputs_)) - - for estimator in self.estimators_: - mask = np.ones(n_samples, dtype=np.bool) - mask[estimator.indices_] = False - p_estimator = estimator.predict(X[mask, :]) - - if self.n_outputs_ == 1: - p_estimator = p_estimator[:, np.newaxis] - - predictions[mask, :] += p_estimator - n_predictions[mask, :] += 1 - - if (n_predictions == 0).any(): - warn("Some inputs do not have OOB scores. " - "This probably means too few trees were used " - "to compute any reliable oob estimates.") - n_predictions[n_predictions == 0] = 1 - - predictions /= n_predictions - self.oob_prediction_ = predictions - - if self.n_outputs_ == 1: - self.oob_prediction_ = \ - self.oob_prediction_.reshape((n_samples, )) - - self.oob_score_ = 0.0 - - for k in xrange(self.n_outputs_): - self.oob_score_ += r2_score(y[:, k], - predictions[:, k]) - - self.oob_score_ /= self.n_outputs_ - - -class RandomForestClassifier(ForestClassifier): - """A random forest classifier. - - A random forest is a meta estimator that fits a number of decision tree - classifiers on various sub-samples of the dataset and use averaging to - improve the predictive accuracy and control over-fitting. - - Parameters - ---------- - n_estimators : integer, optional (default=10) - The number of trees in the forest. - - criterion : string, optional (default="gini") - The function to measure the quality of a split. Supported criteria are - "gini" for the Gini impurity and "entropy" for the information gain. - Note: this parameter is tree-specific. - - max_features : int, float, string or None, optional (default="auto") - The number of features to consider when looking for the best split: - - - If int, then consider `max_features` features at each split. - - If float, then `max_features` is a percentage and - `int(max_features * n_features)` features are considered at each - split. - - If "auto", then `max_features=sqrt(n_features)`. - - If "sqrt", then `max_features=sqrt(n_features)`. - - If "log2", then `max_features=log2(n_features)`. - - If None, then `max_features=n_features`. - - Note: the search for a split does not stop until at least one - valid partition of the node samples is found, even if it requires to - effectively inspect more than ``max_features`` features. - Note: this parameter is tree-specific. - - max_depth : integer or None, optional (default=None) - The maximum depth of the tree. If None, then nodes are expanded until - all leaves are pure or until all leaves contain less than - min_samples_split samples. - Ignored if ``max_leaf_nodes`` is not None. - Note: this parameter is tree-specific. - - min_samples_split : integer, optional (default=2) - The minimum number of samples required to split an internal node. - Note: this parameter is tree-specific. - - min_samples_leaf : integer, optional (default=1) - The minimum number of samples in newly created leaves. A split is - discarded if after the split, one of the leaves would contain less then - ``min_samples_leaf`` samples. - Note: this parameter is tree-specific. - - min_weight_fraction_leaf : float, optional (default=0.) - The minimum weighted fraction of the input samples required to be at a - leaf node. - Note: this parameter is tree-specific. - - max_leaf_nodes : int or None, optional (default=None) - Grow trees with ``max_leaf_nodes`` in best-first fashion. - Best nodes are defined as relative reduction in impurity. - If None then unlimited number of leaf nodes. - If not None then ``max_depth`` will be ignored. - Note: this parameter is tree-specific. - - bootstrap : boolean, optional (default=True) - Whether bootstrap samples are used when building trees. - - oob_score : bool - Whether to use out-of-bag samples to estimate - the generalization error. - - n_jobs : integer, optional (default=1) - The number of jobs to run in parallel for both `fit` and `predict`. - If -1, then the number of jobs is set to the number of cores. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance used - by `np.random`. - - verbose : int, optional (default=0) - Controls the verbosity of the tree building process. - - warm_start : bool, optional (default=False) - When set to ``True``, reuse the solution of the previous call to fit - and add more estimators to the ensemble, otherwise, just fit a whole - new forest. - - Attributes - ---------- - estimators_ : list of DecisionTreeClassifier - The collection of fitted sub-estimators. - - classes_ : array of shape = [n_classes] or a list of such arrays - The classes labels (single output problem), or a list of arrays of - class labels (multi-output problem). - - n_classes_ : int or list - The number of classes (single output problem), or a list containing the - number of classes for each output (multi-output problem). - - feature_importances_ : array of shape = [n_features] - The feature importances (the higher, the more important the feature). - - oob_score_ : float - Score of the training dataset obtained using an out-of-bag estimate. - - oob_decision_function_ : array of shape = [n_samples, n_classes] - Decision function computed with out-of-bag estimate on the training - set. If n_estimators is small it might be possible that a data point - was never left out during the bootstrap. In this case, - `oob_decision_function_` might contain NaN. - - References - ---------- - - .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. - - See also - -------- - DecisionTreeClassifier, ExtraTreesClassifier - """ - def __init__(self, - n_estimators=10, - criterion="gini", - max_depth=None, - min_samples_split=2, - min_samples_leaf=1, - max_features="auto", - max_leaf_nodes=None, - bootstrap=True, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(RandomForestClassifier, self).__init__( - base_estimator=DecisionTreeClassifier(), - n_estimators=n_estimators, - estimator_params=("criterion", "max_depth", "min_samples_split", - "min_samples_leaf", - "max_features", "max_leaf_nodes", - "random_state"), - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - self.criterion = criterion - self.max_depth = max_depth - self.min_samples_split = min_samples_split - self.min_samples_leaf = min_samples_leaf - self.max_features = max_features - self.max_leaf_nodes = max_leaf_nodes - - -class RandomForestRegressor(ForestRegressor): - """A random forest regressor. - - A random forest is a meta estimator that fits a number of classifying - decision trees on various sub-samples of the dataset and use averaging - to improve the predictive accuracy and control over-fitting. - - Parameters - ---------- - n_estimators : integer, optional (default=10) - The number of trees in the forest. - - criterion : string, optional (default="mse") - The function to measure the quality of a split. The only supported - criterion is "mse" for the mean squared error. - Note: this parameter is tree-specific. - - max_features : int, float, string or None, optional (default="auto") - The number of features to consider when looking for the best split: - - - If int, then consider `max_features` features at each split. - - If float, then `max_features` is a percentage and - `int(max_features * n_features)` features are considered at each - split. - - If "auto", then `max_features=n_features`. - - If "sqrt", then `max_features=sqrt(n_features)`. - - If "log2", then `max_features=log2(n_features)`. - - If None, then `max_features=n_features`. - - Note: the search for a split does not stop until at least one - valid partition of the node samples is found, even if it requires to - effectively inspect more than ``max_features`` features. - Note: this parameter is tree-specific. - - max_depth : integer or None, optional (default=None) - The maximum depth of the tree. If None, then nodes are expanded until - all leaves are pure or until all leaves contain less than - min_samples_split samples. - Ignored if ``max_leaf_nodes`` is not None. - Note: this parameter is tree-specific. - - min_samples_split : integer, optional (default=2) - The minimum number of samples required to split an internal node. - Note: this parameter is tree-specific. - - min_samples_leaf : integer, optional (default=1) - The minimum number of samples in newly created leaves. A split is - discarded if after the split, one of the leaves would contain less then - ``min_samples_leaf`` samples. - Note: this parameter is tree-specific. - - max_leaf_nodes : int or None, optional (default=None) - Grow trees with ``max_leaf_nodes`` in best-first fashion. - Best nodes are defined as relative reduction in impurity. - If None then unlimited number of leaf nodes. - If not None then ``max_depth`` will be ignored. - Note: this parameter is tree-specific. - - bootstrap : boolean, optional (default=True) - Whether bootstrap samples are used when building trees. - - oob_score : bool - whether to use out-of-bag samples to estimate - the generalization error. - - n_jobs : integer, optional (default=1) - The number of jobs to run in parallel for both `fit` and `predict`. - If -1, then the number of jobs is set to the number of cores. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance used - by `np.random`. - - verbose : int, optional (default=0) - Controls the verbosity of the tree building process. - - warm_start : bool, optional (default=False) - When set to ``True``, reuse the solution of the previous call to fit - and add more estimators to the ensemble, otherwise, just fit a whole - new forest. - - Attributes - ---------- - estimators_ : list of DecisionTreeRegressor - The collection of fitted sub-estimators. - - feature_importances_ : array of shape = [n_features] - The feature importances (the higher, the more important the feature). - - oob_score_ : float - Score of the training dataset obtained using an out-of-bag estimate. - - oob_prediction_ : array of shape = [n_samples] - Prediction computed with out-of-bag estimate on the training set. - - References - ---------- - - .. [1] L. Breiman, "Random Forests", Machine Learning, 45(1), 5-32, 2001. - - See also - -------- - DecisionTreeRegressor, ExtraTreesRegressor - """ - def __init__(self, - n_estimators=10, - criterion="mse", - max_depth=None, - min_samples_split=2, - min_samples_leaf=1, - max_features="auto", - max_leaf_nodes=None, - bootstrap=True, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(RandomForestRegressor, self).__init__( - base_estimator=DecisionTreeRegressor(), - n_estimators=n_estimators, - estimator_params=("criterion", "max_depth", "min_samples_split", - "min_samples_leaf", - "max_features", "max_leaf_nodes", - "random_state"), - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - self.criterion = criterion - self.max_depth = max_depth - self.min_samples_split = min_samples_split - self.min_samples_leaf = min_samples_leaf - self.max_features = max_features - self.max_leaf_nodes = max_leaf_nodes - - -class ExtraTreesClassifier(ForestClassifier): - """An extra-trees classifier. - - This class implements a meta estimator that fits a number of - randomized decision trees (a.k.a. extra-trees) on various sub-samples - of the dataset and use averaging to improve the predictive accuracy - and control over-fitting. - - Parameters - ---------- - n_estimators : integer, optional (default=10) - The number of trees in the forest. - - criterion : string, optional (default="gini") - The function to measure the quality of a split. Supported criteria are - "gini" for the Gini impurity and "entropy" for the information gain. - Note: this parameter is tree-specific. - - max_features : int, float, string or None, optional (default="auto") - The number of features to consider when looking for the best split: - - - If int, then consider `max_features` features at each split. - - If float, then `max_features` is a percentage and - `int(max_features * n_features)` features are considered at each - split. - - If "auto", then `max_features=sqrt(n_features)`. - - If "sqrt", then `max_features=sqrt(n_features)`. - - If "log2", then `max_features=log2(n_features)`. - - If None, then `max_features=n_features`. - - Note: the search for a split does not stop until at least one - valid partition of the node samples is found, even if it requires to - effectively inspect more than ``max_features`` features. - Note: this parameter is tree-specific. - - max_depth : integer or None, optional (default=None) - The maximum depth of the tree. If None, then nodes are expanded until - all leaves are pure or until all leaves contain less than - min_samples_split samples. - Ignored if ``max_leaf_nodes`` is not None. - Note: this parameter is tree-specific. - - min_samples_split : integer, optional (default=2) - The minimum number of samples required to split an internal node. - Note: this parameter is tree-specific. - - min_samples_leaf : integer, optional (default=1) - The minimum number of samples in newly created leaves. A split is - discarded if after the split, one of the leaves would contain less then - ``min_samples_leaf`` samples. - Note: this parameter is tree-specific. - - max_leaf_nodes : int or None, optional (default=None) - Grow trees with ``max_leaf_nodes`` in best-first fashion. - Best nodes are defined as relative reduction in impurity. - If None then unlimited number of leaf nodes. - If not None then ``max_depth`` will be ignored. - Note: this parameter is tree-specific. - - bootstrap : boolean, optional (default=False) - Whether bootstrap samples are used when building trees. - - oob_score : bool - Whether to use out-of-bag samples to estimate - the generalization error. - - n_jobs : integer, optional (default=1) - The number of jobs to run in parallel for both `fit` and `predict`. - If -1, then the number of jobs is set to the number of cores. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance used - by `np.random`. - - verbose : int, optional (default=0) - Controls the verbosity of the tree building process. - - warm_start : bool, optional (default=False) - When set to ``True``, reuse the solution of the previous call to fit - and add more estimators to the ensemble, otherwise, just fit a whole - new forest. - - Attributes - ---------- - estimators_ : list of DecisionTreeClassifier - The collection of fitted sub-estimators. - - classes_ : array of shape = [n_classes] or a list of such arrays - The classes labels (single output problem), or a list of arrays of - class labels (multi-output problem). - - n_classes_ : int or list - The number of classes (single output problem), or a list containing the - number of classes for each output (multi-output problem). - - feature_importances_ : array of shape = [n_features] - The feature importances (the higher, the more important the feature). - - oob_score_ : float - Score of the training dataset obtained using an out-of-bag estimate. - - oob_decision_function_ : array of shape = [n_samples, n_classes] - Decision function computed with out-of-bag estimate on the training - set. If n_estimators is small it might be possible that a data point - was never left out during the bootstrap. In this case, - `oob_decision_function_` might contain NaN. - - References - ---------- - - .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", - Machine Learning, 63(1), 3-42, 2006. - - See also - -------- - sklearn.tree.ExtraTreeClassifier : Base classifier for this ensemble. - RandomForestClassifier : Ensemble Classifier based on trees with optimal - splits. - """ - def __init__(self, - n_estimators=10, - criterion="gini", - max_depth=None, - min_samples_split=2, - min_samples_leaf=1, - max_features="auto", - max_leaf_nodes=None, - bootstrap=False, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(ExtraTreesClassifier, self).__init__( - base_estimator=ExtraTreeClassifier(), - n_estimators=n_estimators, - estimator_params=("criterion", "max_depth", "min_samples_split", - "min_samples_leaf", - "max_features", "max_leaf_nodes", "random_state"), - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - self.criterion = criterion - self.max_depth = max_depth - self.min_samples_split = min_samples_split - self.min_samples_leaf = min_samples_leaf - self.max_features = max_features - self.max_leaf_nodes = max_leaf_nodes - - -class ExtraTreesRegressor(ForestRegressor): - """An extra-trees regressor. - - This class implements a meta estimator that fits a number of - randomized decision trees (a.k.a. extra-trees) on various sub-samples - of the dataset and use averaging to improve the predictive accuracy - and control over-fitting. - - Parameters - ---------- - n_estimators : integer, optional (default=10) - The number of trees in the forest. - - criterion : string, optional (default="mse") - The function to measure the quality of a split. The only supported - criterion is "mse" for the mean squared error. - Note: this parameter is tree-specific. - - max_features : int, float, string or None, optional (default="auto") - The number of features to consider when looking for the best split: - - - If int, then consider `max_features` features at each split. - - If float, then `max_features` is a percentage and - `int(max_features * n_features)` features are considered at each - split. - - If "auto", then `max_features=n_features`. - - If "sqrt", then `max_features=sqrt(n_features)`. - - If "log2", then `max_features=log2(n_features)`. - - If None, then `max_features=n_features`. - - Note: the search for a split does not stop until at least one - valid partition of the node samples is found, even if it requires to - effectively inspect more than ``max_features`` features. - Note: this parameter is tree-specific. - - max_depth : integer or None, optional (default=None) - The maximum depth of the tree. If None, then nodes are expanded until - all leaves are pure or until all leaves contain less than - min_samples_split samples. - Ignored if ``max_leaf_nodes`` is not None. - Note: this parameter is tree-specific. - - min_samples_split : integer, optional (default=2) - The minimum number of samples required to split an internal node. - Note: this parameter is tree-specific. - - min_samples_leaf : integer, optional (default=1) - The minimum number of samples in newly created leaves. A split is - discarded if after the split, one of the leaves would contain less then - ``min_samples_leaf`` samples. - Note: this parameter is tree-specific. - - max_leaf_nodes : int or None, optional (default=None) - Grow trees with ``max_leaf_nodes`` in best-first fashion. - Best nodes are defined as relative reduction in impurity. - If None then unlimited number of leaf nodes. - If not None then ``max_depth`` will be ignored. - Note: this parameter is tree-specific. - - bootstrap : boolean, optional (default=False) - Whether bootstrap samples are used when building trees. - Note: this parameter is tree-specific. - - oob_score : bool - Whether to use out-of-bag samples to estimate - the generalization error. - - n_jobs : integer, optional (default=1) - The number of jobs to run in parallel for both `fit` and `predict`. - If -1, then the number of jobs is set to the number of cores. - - random_state : int, RandomState instance or None, optional (default=None) - If int, random_state is the seed used by the random number generator; - If RandomState instance, random_state is the random number generator; - If None, the random number generator is the RandomState instance used - by `np.random`. - - verbose : int, optional (default=0) - Controls the verbosity of the tree building process. - - warm_start : bool, optional (default=False) - When set to ``True``, reuse the solution of the previous call to fit - and add more estimators to the ensemble, otherwise, just fit a whole - new forest. - - Attributes - ---------- - estimators_ : list of DecisionTreeRegressor - The collection of fitted sub-estimators. - - feature_importances_ : array of shape = [n_features] - The feature importances (the higher, the more important the feature). - - oob_score_ : float - Score of the training dataset obtained using an out-of-bag estimate. - - oob_prediction_ : array of shape = [n_samples] - Prediction computed with out-of-bag estimate on the training set. - - References - ---------- - - .. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees", - Machine Learning, 63(1), 3-42, 2006. - - See also - -------- - sklearn.tree.ExtraTreeRegressor: Base estimator for this ensemble. - RandomForestRegressor: Ensemble regressor using trees with optimal splits. - """ - def __init__(self, - n_estimators=10, - criterion="mse", - max_depth=None, - min_samples_split=2, - min_samples_leaf=1, - max_features="auto", - max_leaf_nodes=None, - bootstrap=False, - oob_score=False, - n_jobs=1, - random_state=None, - verbose=0, - warm_start=False): - super(ExtraTreesRegressor, self).__init__( - base_estimator=ExtraTreeRegressor(), - n_estimators=n_estimators, - estimator_params=("criterion", "max_depth", "min_samples_split", - "min_samples_leaf", - "max_features", "max_leaf_nodes", - "random_state"), - bootstrap=bootstrap, - oob_score=oob_score, - n_jobs=n_jobs, - random_state=random_state, - verbose=verbose, - warm_start=warm_start) - - self.criterion = criterion - self.max_depth = max_depth - self.min_samples_split = min_samples_split - self.min_samples_leaf = min_samples_leaf - self.max_features = max_features - self.max_leaf_nodes = max_leaf_nodes - - diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 875d0ad584..bbf12ff870 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -2,8 +2,8 @@ from itertools import product import sklearn -if sklearn.__version__ != "0.15.2": - raise ValueError("ParamSklearn supports only sklearn version 0.15.2, " +if sklearn.__version__ != "0.16.1": + raise ValueError("ParamSklearn supports only sklearn version 0.16.1, " "you installed %s." % sklearn.__version__) from sklearn.base import RegressorMixin import numpy as np diff --git a/setup.py b/setup.py index 326c611233..1ac2249a58 100644 --- a/setup.py +++ b/setup.py @@ -5,9 +5,9 @@ "hyperparameter configuration.", version="0.1dev", packages=setuptools.find_packages(), - install_requires=["numpy==1.9.0", - "scipy==0.14.0", - "scikit-learn==0.15.2", + install_requires=["numpy>=1.9.0", + "scipy>=0.14.0", + "scikit-learn==0.16.1", "nose", "HPOlibConfigSpace"], test_requires=["mock"], diff --git a/source/conf.py b/source/conf.py index edd9e03e94..446682a51c 100644 --- a/source/conf.py +++ b/source/conf.py @@ -38,6 +38,8 @@ 'numpydoc', # Important for get headings like Parameters... ] +# Configure the extensions + numpydoc_show_class_members = False autosummary_generate = True autodoc_default_flags = ['members', 'inherited-members', diff --git a/tests/components/classification/test_qda.py b/tests/components/classification/test_qda.py index 4dc16e4619..9d60ac83bf 100644 --- a/tests/components/classification/test_qda.py +++ b/tests/components/classification/test_qda.py @@ -15,11 +15,11 @@ def test_default_configuration_iris(self): sklearn.metrics.accuracy_score(predictions, targets)) - @unittest.skip("QDA fails on this one") + #@unittest.skip("QDA fails on this one") def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ _test_classifier(classifier=QDA, dataset='digits') - self.assertAlmostEqual(0.88585306618093507, + self.assertAlmostEqual(0.18882817243472982, sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/tests/components/preprocessing/test_gem.py b/tests/components/preprocessing/test_gem.py index a3fcae6ed6..1bfdf818b6 100644 --- a/tests/components/preprocessing/test_gem.py +++ b/tests/components/preprocessing/test_gem.py @@ -32,6 +32,6 @@ def test_default_configuration_classify(self): accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) self.assertGreaterEqual(accuracy, 0.94) - @unittest.skip("Right now GEM cannot handle sparse arrays!") def test_preprocessing_dtype(self): - super(GEMComponentTest, self)._test_preprocessing_dtype(GEM) + super(GEMComponentTest, self)._test_preprocessing_dtype(GEM, + test_sparse=False) diff --git a/tests/components/preprocessing/test_polynomial.py b/tests/components/preprocessing/test_polynomial.py index ba210e6325..b432ec6372 100644 --- a/tests/components/preprocessing/test_polynomial.py +++ b/tests/components/preprocessing/test_polynomial.py @@ -34,7 +34,6 @@ def test_default_configuration_classify(self): accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) self.assertAlmostEqual(accuracy, 0.93564055859137829, places=2) - @unittest.skip("Produces np.float64") def test_preprocessing_dtype(self): super(PolynomialFeaturesComponentTest, self)._test_preprocessing_dtype(PolynomialFeatures, diff --git a/tests/components/preprocessing/test_random_trees_embedding.py b/tests/components/preprocessing/test_random_trees_embedding.py index b61325aeb8..ce1c19a492 100644 --- a/tests/components/preprocessing/test_random_trees_embedding.py +++ b/tests/components/preprocessing/test_random_trees_embedding.py @@ -27,9 +27,9 @@ def test_preprocessing_dtype(self): configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = RandomTreesEmbedding(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for + hp_name in + default}) preprocessor.fit(X_train) Xt = preprocessor.transform(X_train) print Xt @@ -41,9 +41,9 @@ def test_preprocessing_dtype(self): configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = RandomTreesEmbedding(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) + **{hp_name: default[hp_name] for + hp_name in + default}) preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py index c65a81189b..489db4693e 100644 --- a/tests/components/preprocessing/test_scaling.py +++ b/tests/components/preprocessing/test_scaling.py @@ -29,8 +29,9 @@ def test_default_configuration_with_sparse_data(self): preprocessing = _test_preprocessing(Rescaling, dataset='boston', make_sparse=True) transformation, original = preprocessing + self.assertEqual(original.getnnz(), transformation.getnnz()) self.assertAlmostEqual(1, transformation.max(), places=6) - self.assertTrue(all((original != transformation).data)) + self.assertTrue(~np.allclose(original.data, transformation.data)) def test_preprocessing_dtype(self): super(ScalingComponentTest, self)._test_preprocessing_dtype(Rescaling) diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index cdbceb7c34..908b55f3c8 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -40,5 +40,5 @@ def test_default_configuration(self): predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) - self.assertAlmostEqual(0.30195375410805392, + self.assertAlmostEqual(0.37173953934750514, sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index d8207da713..84aeb3134c 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -6,26 +6,26 @@ from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -dense1 = [[0, 1, 0], - [0, 0, 0]] -dense1_1h = [[1, 0, 1, 1], - [1, 1, 0, 1]] - -dense2 = [[0, np.NaN, 0], - [np.NaN, 0, 2], - [1, 1, 1]] -dense2_1h = [[1, 0, 0, 0, 1, 0, 0], - [0, 0, 1, 0, 0, 0, 1], - [0, 1, 0, 1, 0, 1, 0]] -dense2_partial_1h = [[1., 0., 0., 0., 0.], - [0., 0., 1., 0., 2.], - [0., 1., 0., 1., 1.]] +dense1 = np.array([[0, 1, 0], + [0, 0, 0]]) +dense1_1h = np.array([[1, 0, 1, 1], + [1, 1, 0 , 1]]) + +dense2 = np.array([[0, np.NaN, 0], + [np.NaN, 0, 2], + [1, 1, 1]]) +dense2_1h = np.array([[1, 0, 0, 0, 1, 0, 0], + [0, 0, 1, 0, 0, 0, 1], + [0, 1, 0, 1, 0, 1, 0]]) +dense2_partial_1h = np.array([[1., 0., 0., 0., 0.], + [0., 0., 1., 0., 2.], + [0., 1., 0., 1., 1.]]) # All NaN slice -dense3 = [[0, 1, np.NaN], - [1, 0, np.NaN]] -dense3_1h = [[1, 0, 0, 1, 0], - [0, 1, 1, 0, 0]] +dense3 = np.array([[0, 1, np.NaN], + [1, 0, np.NaN]]) +dense3_1h = np.array([[1, 0, 0, 1, 0], + [0, 1, 1, 0, 0]]) sparse1 = scipy.sparse.csc_matrix(([2, 1, 0, 0, 1, 2], ((1, 4, 5, 2, 3, 5), @@ -97,27 +97,27 @@ def test_sparse2_csr(self): def fit_then_transform(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features) - transformation = ohe.fit_transform(input) + transformation = ohe.fit_transform(input.copy()) self.assertIsInstance(transformation, scipy.sparse.csr_matrix) assert_array_almost_equal(expected, transformation.todense()) ohe2 = OneHotEncoder(categorical_features=categorical_features) - ohe2.fit(input) - transformation = ohe2.transform(input) + ohe2.fit(input.copy()) + transformation = ohe2.transform(input.copy()) self.assertIsInstance(transformation, scipy.sparse.csr_matrix) assert_array_almost_equal(expected, transformation.todense()) def fit_then_transform_dense(self, expected, input, categorical_features='all'): ohe = OneHotEncoder(categorical_features=categorical_features, sparse=False) - transformation = ohe.fit_transform(input) + transformation = ohe.fit_transform(input.copy()) self.assertIsInstance(transformation, np.ndarray) assert_array_almost_equal(expected, transformation) ohe2 = OneHotEncoder(categorical_features=categorical_features, sparse=False) - ohe2.fit(input) - transformation = ohe2.transform(input) + ohe2.fit(input.copy()) + transformation = ohe2.transform(input.copy()) self.assertIsInstance(transformation, np.ndarray) assert_array_almost_equal(expected, transformation) diff --git a/tests/implementations/test_minmaxscaler.py b/tests/implementations/test_minmaxscaler.py index 3beb8811bd..3b49a80f68 100644 --- a/tests/implementations/test_minmaxscaler.py +++ b/tests/implementations/test_minmaxscaler.py @@ -3,12 +3,46 @@ import numpy as np from scipy import sparse from sklearn.utils.testing import assert_array_almost_equal +from sklearn.datasets import load_iris from ParamSklearn.util import get_dataset from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler class MinMaxScalerTest(unittest.TestCase): + def test_min_max_scaler_iris(self): + iris = load_iris() + X = iris.data + + scaler = MinMaxScaler() + # default params + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), 0) + assert_array_almost_equal(X_trans.min(axis=0), 0) + assert_array_almost_equal(X_trans.max(axis=0), 1) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # not default params: min=1, max=2 + scaler = MinMaxScaler(feature_range=(1, 2)) + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), 1) + assert_array_almost_equal(X_trans.max(axis=0), 2) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # min=-.5, max=.6 + scaler = MinMaxScaler(feature_range=(-.5, .6)) + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), -.5) + assert_array_almost_equal(X_trans.max(axis=0), .6) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # raises on invalid range + scaler = MinMaxScaler(feature_range=(2, 1)) + self.assertRaises(ValueError, scaler.fit, X) + def test_min_max_scaler_zero_variance_features(self): """Check min max scaler on toy data with zero variance features""" X = [[0., 1., +0.5], @@ -43,6 +77,8 @@ def test_min_max_scaler_zero_variance_features(self): assert_array_almost_equal(X_trans, X_expected_1_2) + @unittest.skip("I don't understand the original unit test. Thus, I forbid " + "1d input to the scaling function.") def test_min_max_scaler_1d(self): """Test scaling of dataset along single axis""" rng = np.random.RandomState(0) From 9260d6d1b6189335cca81903361befd32b4f739a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 16 Jul 2015 11:04:51 +0200 Subject: [PATCH 266/352] Add warmstarts to models where possible --- ParamSklearn/base.py | 31 ++++-- ParamSklearn/classification.py | 12 +-- .../components/classification/bernoulli_nb.py | 36 ++++++- .../components/classification/extra_trees.py | 59 +++++++---- .../components/classification/gaussian_nb.py | 38 ++++++- .../classification/gradient_boosting.py | 73 ++++++++------ .../classification/multinomial_nb.py | 39 +++++++- .../classification/passive_aggresive.py | 31 +++++- .../classification/random_forest.py | 98 +++++++++++-------- ParamSklearn/components/classification/sgd.py | 69 ++++++++----- ParamSklearn/util.py | 15 +++ .../classification/test_bernoulli_nb.py | 25 +++++ .../classification/test_extra_trees.py | 12 ++- .../classification/test_gaussian_nb.py | 25 +++++ .../classification/test_gradient_boosting.py | 15 ++- .../classification/test_multinomial_nb.py | 25 +++++ .../classification/test_passive_aggressive.py | 28 ++++-- .../classification/test_random_forest.py | 14 ++- tests/components/classification/test_sgd.py | 23 ++++- .../preprocessing/test_balancing.py | 43 ++++++-- 20 files changed, 541 insertions(+), 170 deletions(-) create mode 100644 tests/components/classification/test_bernoulli_nb.py create mode 100644 tests/components/classification/test_gaussian_nb.py create mode 100644 tests/components/classification/test_multinomial_nb.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index fbe8344001..c60c40e136 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -36,7 +36,7 @@ def __init__(self, configuration, random_state=None): else: self.random_state = check_random_state(random_state) - def fit(self, X, Y, fit_params=None, init_params=None): + def fit(self, X, y, fit_params=None, init_params=None): """Fit the selected algorithm to the training data. Parameters @@ -70,11 +70,12 @@ def fit(self, X, Y, fit_params=None, init_params=None): # TODO: perform input validation # TODO: look if X.shape[0] == y.shape[0] # TODO: check if the hyperparameters have been set... - # TODO: this is an example of the antipattern of not properly - # initializing a class in the init function! - # TODO: can this happen now that a configuration is specified at - # instantiation time + X, fit_params = self.pre_transform(X, y, fit_params=fit_params, + init_params=init_params) + self.fit_estimator(X, y, fit_params=fit_params) + return self + def pre_transform(self, X, y, fit_params=None, init_params=None): # Save all transformation object in a list to create a pipeline object steps = [] @@ -98,7 +99,8 @@ def fit(self, X, Y, fit_params=None, init_params=None): continue name_ = instantiated_hyperparameter.split(":")[1] - preproc_params[name_] = self.configuration[instantiated_hyperparameter] + preproc_params[name_] = self.configuration[ + instantiated_hyperparameter] preproc_params.update(init_params_per_method[preproc_name]) preprocessor_object = components.preprocessing_components. \ @@ -116,7 +118,7 @@ def fit(self, X, Y, fit_params=None, init_params=None): if self.configuration[instantiated_hyperparameter] is None: continue - name_ = instantiated_hyperparameter. split(":")[1] + name_ = instantiated_hyperparameter.split(":")[1] estimator_parameters[name_] = self.configuration[ instantiated_hyperparameter] @@ -127,7 +129,7 @@ def fit(self, X, Y, fit_params=None, init_params=None): steps.append((estimator_name, estimator_object)) self._validate_input_X(X) - self._validate_input_Y(Y) + self._validate_input_Y(y) self._pipeline = Pipeline(steps) if fit_params is None or not isinstance(fit_params, dict): @@ -135,9 +137,20 @@ def fit(self, X, Y, fit_params=None, init_params=None): else: fit_params = {key.replace(":", "__"): value for key, value in fit_params.items()} - self._pipeline.fit(X, Y, **fit_params) + X, fit_params = self._pipeline._pre_transform(X, y, **fit_params) + return X, fit_params + + def fit_estimator(self, X, y, fit_params=None): + self._pipeline.steps[-1][-1].fit(X, y, **fit_params) return self + def iterative_fit(self, X, y, fit_params=None, n_iter=1): + self._pipeline.steps[-1][-1].iterative_fit(X, y, n_iter=n_iter, + **fit_params) + + def configuration_fully_fitted(self): + return self._pipeline.steps[-1][-1].configuration_fully_fitted() + def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to # handle them diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 9f76bba374..836dac2055 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -61,21 +61,21 @@ class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): """ - def fit(self, X, Y, fit_params=None, init_params=None): - self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + def pre_transform(self, X, y, fit_params=None, init_params=None): + self.num_targets = 1 if len(y.shape) == 1 else y.shape[1] # Weighting samples has to be done here, not in the components if self.configuration['balancing:strategy'] == 'weighting': balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( - Y, self.configuration['classifier'], + y, self.configuration['classifier'], self.configuration['preprocessor'], init_params, fit_params) - super(ParamSklearnClassifier, self).fit(X, Y, fit_params=fit_params, - init_params=init_params) + X, fit_params = super(ParamSklearnClassifier, self).pre_transform( + X, y, fit_params=fit_params, init_params=init_params) - return self + return X, fit_params def predict_proba(self, X, batch_size=None): """predict_proba. diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 94d5fc82fd..9290b5612f 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -1,5 +1,6 @@ import numpy as np import sklearn.naive_bayes +from sklearn.utils.validation import check_is_fitted from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -24,10 +25,41 @@ def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.estimator = None def fit(self, X, y): - self.estimator = sklearn.naive_bayes.MultinomialNB( alpha = self.alpha, fit_prior = self.fit_prior) - self.estimator.fit(X, y) + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) return self + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.MultinomialNB( + alpha=self.alpha, fit_prior=self.fit_prior) + self.classes_ = np.unique(y.astype(int)) + + for iter in range(n_iter): + start = self.n_iter * 1000 + stop = (self.n_iter + 1) * 1000 + self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 9bc85b0082..4766651072 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -60,29 +60,46 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y, sample_weight=None): - num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - # Use at most half of the features - max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.estimator = ETC( - n_estimators=0, criterion=self.criterion, - max_depth=self.max_depth, min_samples_split=self.min_samples_split, - min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, - max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, - oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, - random_state=self.random_state, - warm_start = True - ) - # JTS TODO: I think we might have to copy here if we want self.estimator - # to always be consistent on sigabort - while len(self.estimator.estimators_) < self.n_estimators: - tmp = self.estimator # TODO copy ? - tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y, sample_weight=sample_weight) - self.estimator = tmp + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) return self + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.estimator = ETC( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, + random_state=self.random_state, + warm_start=True + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 771adf0945..6d5c6d24e8 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -15,12 +15,42 @@ def __init__(self, random_state=None, verbose=0): self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y): - num_features = X.shape[1] - self.estimator = sklearn.naive_bayes.GaussianNB() - self.estimator.fit(X, Y) + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) return self + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.GaussianNB() + self.classes_ = np.unique(y.astype(int)) + + for iter in range(n_iter): + start = self.n_iter * 1000 + stop = (self.n_iter + 1) * 1000 + self.estimator.partial_fit(X[start:stop], y[start:stop], + self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 61d120836c..29b5028cf3 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -53,36 +53,53 @@ def __init__(self, learning_rate, n_estimators, subsample, self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y): - num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - # Use at most half of the features - max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.estimator = sklearn.ensemble.GradientBoostingClassifier( - learning_rate=self.learning_rate, - n_estimators=0, - subsample=self.subsample, - min_samples_split=self.min_samples_split, - min_samples_leaf=self.min_samples_leaf, - max_features=max_features, - max_leaf_nodes=self.max_leaf_nodes, - loss=self.loss, - max_depth=self.max_depth, - warm_start=True, - init=self.init, - random_state=self.random_state, - verbose=self.verbose - ) - # JTS TODO: I think we might have to copy here if we want self.estimator - # to always be consistent on sigabort - while len(self.estimator.estimators_) < self.n_estimators: - tmp = self.estimator # TODO I think we need to copy here! - tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) - self.estimator = tmp - self.estimator.fit(X, Y) + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) return self + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + num_features = X.shape[1] + max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.estimator = sklearn.ensemble.GradientBoostingClassifier( + learning_rate=self.learning_rate, + n_estimators=0, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_features=max_features, + max_leaf_nodes=self.max_leaf_nodes, + loss=self.loss, + max_depth=self.max_depth, + warm_start=True, + init=self.init, + random_state=self.random_state, + verbose=self.verbose + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + + return self + + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index a217b395a6..16559356bd 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -24,12 +24,43 @@ def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.verbose = int(verbose) self.estimator = None - def fit(self, X, Y): - self.estimator = sklearn.naive_bayes.MultinomialNB(alpha=self.alpha, - fit_prior=self.fit_prior) - self.estimator.fit(X, Y) + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) return self + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.MultinomialNB( + alpha=self.alpha, fit_prior=self.fit_prior) + self.classes_ = np.unique(y.astype(int)) + + for iter in range(n_iter): + start = self.n_iter * 1000 + stop = (self.n_iter + 1) * 1000 + self.estimator.partial_fit(X[start:stop], y[start:stop], + self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index 61d95dd617..1fd0bd3235 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -1,3 +1,4 @@ +import numpy as np from sklearn.linear_model.passive_aggressive import PassiveAggressiveClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -20,13 +21,33 @@ def __init__(self, C, fit_intercept, n_iter, loss, random_state=None): self.random_state = random_state self.estimator = None - def fit(self, X, Y): - self.estimator = PassiveAggressiveClassifier( - C=self.C, fit_intercept=self.fit_intercept, n_iter=self.n_iter, - loss=self.loss, shuffle=True, random_state=self.random_state) - self.estimator.fit(X, Y) + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.estimator = PassiveAggressiveClassifier( + C=self.C, fit_intercept=self.fit_intercept, n_iter=1, + loss=self.loss, shuffle=True, random_state=self.random_state, + warm_start=True) + self.classes_ = np.unique(y.astype(int)) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, y) + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + def predict(self, X): if self.estimator is None: raise NotImplementedError() diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 646a841452..6e90856e55 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -29,49 +29,65 @@ def __init__(self, n_estimators, criterion, max_features, self.n_jobs = n_jobs self.estimator = None - def fit(self, X, Y, sample_weight=None): - self.n_estimators = int(self.n_estimators) - - if self.max_depth == "None": - self.max_depth = None - else: - self.max_depth = int(self.max_depth) - self.min_samples_split = int(self.min_samples_split) - self.min_samples_leaf = int(self.min_samples_leaf) - if self.max_features not in ("sqrt", "log2", "auto"): - num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - # Use at most half of the features - max_features = max(1, min(int(X.shape[1] / 2), max_features)) - if self.bootstrap == "True": - self.bootstrap = True - else: - self.bootstrap = False - if self.max_leaf_nodes == "None": - self.max_leaf_nodes = None - - # initial fit of only increment trees - self.estimator = RandomForestClassifier( - n_estimators=0, - criterion=self.criterion, - max_features=max_features, - max_depth=self.max_depth, - min_samples_split=self.min_samples_split, - min_samples_leaf=self.min_samples_leaf, - bootstrap=self.bootstrap, - max_leaf_nodes=self.max_leaf_nodes, - random_state=self.random_state, - n_jobs=self.n_jobs, - warm_start=True) - # JTS TODO: I think we might have to copy here if we want self.estimator - # to always be consistent on sigabort - while len(self.estimator.estimators_) < self.n_estimators: - tmp = self.estimator # TODO I think we need to copy here! - tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y, sample_weight=sample_weight) - self.estimator = tmp + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self + + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.n_estimators = int(self.n_estimators) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_features not in ("sqrt", "log2", "auto"): + num_features = X.shape[1] + max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + if self.bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + + # initial fit of only increment trees + self.estimator = RandomForestClassifier( + n_estimators=0, + criterion=self.criterion, + max_features=max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, + n_jobs=self.n_jobs, + warm_start=True) + + tmp = self.estimator # TODO I think we need to copy here! + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp return self + def configuration_fully_fitted(self): + if self.estimator is None: + return False + + return not len(self.estimator.estimators_) < self.n_estimators + def predict(self, X): if self.estimator is None: raise NotImplementedError diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index 38f673eaad..9684953c94 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -29,35 +29,50 @@ def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, self.random_state = random_state self.estimator = None - def fit(self, X, Y): - # TODO: maybe scale training data that its norm becomes 1? - # http://scikit-learn.org/stable/modules/sgd.html#id1 - self.alpha = float(self.alpha) - self.fit_intercept = bool(self.fit_intercept) - self.n_iter = int(self.n_iter) - if self.class_weight == "None": - self.class_weight = None - self.l1_ratio = float(self.l1_ratio) - self.epsilon = float(self.epsilon) - self.eta0 = float(self.eta0) - self.power_t = float(self.power_t) - - self.estimator = SGDClassifier(loss=self.loss, - penalty=self.penalty, - alpha=self.alpha, - fit_intercept=self.fit_intercept, - n_iter=self.n_iter, - learning_rate=self.learning_rate, - class_weight=self.class_weight, - l1_ratio=self.l1_ratio, - epsilon=self.epsilon, - eta0=self.eta0, - power_t=self.power_t, - shuffle=True, - random_state=self.random_state) - self.estimator.fit(X, Y) + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + self.alpha = float(self.alpha) + self.fit_intercept = bool(self.fit_intercept) + self.n_iter = int(self.n_iter) + if self.class_weight == "None": + self.class_weight = None + self.l1_ratio = float(self.l1_ratio) + self.epsilon = float(self.epsilon) + self.eta0 = float(self.eta0) + self.power_t = float(self.power_t) + + self.estimator = SGDClassifier(loss=self.loss, + penalty=self.penalty, + alpha=self.alpha, + fit_intercept=self.fit_intercept, + n_iter=self.n_iter, + learning_rate=self.learning_rate, + class_weight=self.class_weight, + l1_ratio=self.l1_ratio, + epsilon=self.epsilon, + eta0=self.eta0, + power_t=self.power_t, + shuffle=True, + random_state=self.random_state) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, y) + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + def predict(self, X): if self.estimator is None: raise NotImplementedError() diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index ffdf81bf33..b287cca051 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -95,6 +95,21 @@ def _test_classifier(classifier, dataset='iris', sparse=False): return predictions, Y_test +def _test_classifier_iterative_fit(classifier, dataset='iris', sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse) + configuration_space = classifier.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) + default = configuration_space.get_default_configuration() + classifier = classifier(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + while not classifier.configuration_fully_fitted(): + predictor = classifier.iterative_fit(X_train, Y_train) + predictions = predictor.predict(X_test) + return predictions, Y_test + + def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, make_sparse=sparse) diff --git a/tests/components/classification/test_bernoulli_nb.py b/tests/components/classification/test_bernoulli_nb.py new file mode 100644 index 0000000000..5c770b2dd4 --- /dev/null +++ b/tests/components/classification/test_bernoulli_nb.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.classification.bernoulli_nb import \ + BernoulliNB +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class BernoulliNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(BernoulliNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(BernoulliNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index 056148db4f..5423851604 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -2,7 +2,7 @@ from ParamSklearn.components.classification.extra_trees import \ ExtraTreesClassifier -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics @@ -13,4 +13,12 @@ def test_default_configuration(self): predictions, targets = \ _test_classifier(ExtraTreesClassifier) self.assertAlmostEqual(0.95999999999999996, - sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(ExtraTreesClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_gaussian_nb.py b/tests/components/classification/test_gaussian_nb.py new file mode 100644 index 0000000000..574c1c49ab --- /dev/null +++ b/tests/components/classification/test_gaussian_nb.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.classification.gaussian_nb import \ + GaussianNB +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class GaussianNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(GaussianNB) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(GaussianNB) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index 4b733944b9..06037459a4 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -2,7 +2,7 @@ from ParamSklearn.components.classification.gradient_boosting import \ GradientBoostingClassifier -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics @@ -12,5 +12,14 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(GradientBoostingClassifier) - self.assertAlmostEqual(0.93999999999999995, - sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, targets)) + + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(GradientBoostingClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_multinomial_nb.py b/tests/components/classification/test_multinomial_nb.py new file mode 100644 index 0000000000..4225d50e07 --- /dev/null +++ b/tests/components/classification/test_multinomial_nb.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.classification.multinomial_nb import \ + MultinomialNB +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class MultinomialNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(MultinomialNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(MultinomialNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/tests/components/classification/test_passive_aggressive.py b/tests/components/classification/test_passive_aggressive.py index d3e00f9400..a1809f331d 100644 --- a/tests/components/classification/test_passive_aggressive.py +++ b/tests/components/classification/test_passive_aggressive.py @@ -1,23 +1,39 @@ import unittest from ParamSklearn.components.classification.passive_aggresive import PassiveAggressive -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics -class SGDComponentTest(unittest.TestCase): +class PassiveAggressiveComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier(PassiveAggressive, dataset='iris') - self.assertAlmostEqual(0.92, + predictions, targets = _test_classifier(PassiveAggressive) + self.assertAlmostEqual(0.97999999999999998, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + PassiveAggressive) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score( + predictions, targets)) + def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ _test_classifier(classifier=PassiveAggressive, dataset='digits') - self.assertAlmostEqual(0.91317547055251969, + self.assertAlmostEqual(0.91924711596842745, sklearn.metrics.accuracy_score(predictions, - targets)) \ No newline at end of file + targets)) + + def test_default_configuration_digits_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit(classifier=PassiveAggressive, + dataset='digits') + self.assertAlmostEqual(0.91924711596842745, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index 63d7f11c3f..a947aae40a 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.classification.random_forest import RandomForest -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics @@ -9,6 +9,14 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier(RandomForest, dataset='iris') + predictions, targets = _test_classifier(RandomForest) self.assertAlmostEqual(0.95999999999999996, - sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(RandomForest) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py index 62b19a3ce6..43c0acc00f 100644 --- a/tests/components/classification/test_sgd.py +++ b/tests/components/classification/test_sgd.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.classification.sgd import SGD -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics @@ -10,14 +10,31 @@ class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(SGD, dataset='iris') - self.assertAlmostEqual(0.96, + self.assertAlmostEqual(1.0, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + SGD) + self.assertAlmostEqual(1.0, + sklearn.metrics.accuracy_score( + predictions, targets)) + def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ _test_classifier(classifier=SGD, dataset='digits') self.assertAlmostEqual(0.89313904068002425, sklearn.metrics.accuracy_score(predictions, - targets)) \ No newline at end of file + targets)) + + def test_default_configuration_digits_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + classifier=SGD, + dataset='digits') + self.assertAlmostEqual(0.89313904068002425, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index 0bef60688c..3a623e6d4f 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -73,15 +73,16 @@ def test_weighting_effect(self): for name, clf, acc_no_weighting, acc_weighting in \ [('adaboost', AdaboostClassifier, 0.692, 0.719), ('decision_tree', DecisionTree, 0.712, 0.668), - ('extra_trees', ExtraTreesClassifier, 0.910, 0.913), - ('random_forest', RandomForest, 0.896, 0.895), + ('extra_trees', ExtraTreesClassifier, 0.901, 0.919), + ('random_forest', RandomForest, 0.886, 0.885), ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), - ('sgd', SGD, 0.879, 0.906), + ('sgd', SGD, 0.811, 0.902), ('ridge', Ridge, 0.89071038251366119, 0.91013964784456591)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: + # Fit X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_estimators=[name]) @@ -94,15 +95,30 @@ def test_weighting_effect(self): sklearn.metrics.accuracy_score(predictions, Y_test), places=3) + # pre_transform and fit_estimator + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include_estimators=[name]) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = ParamSklearnClassifier(default, random_state=1) + Xt, fit_params = classifier.pre_transform(X_train, Y_train) + classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) + predictions = classifier.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.accuracy_score( + predictions, Y_test), + places=3) + for name, pre, acc_no_weighting, acc_weighting in \ [('extra_trees_preproc_for_classification', - ExtraTreesPreprocessor, 0.900, 0.908), + ExtraTreesPreprocessor, 0.892, 0.910), ('liblinear_svc_preprocessor', LibLinear_Preprocessor, - 0.907, 0.882)]: + 0.889, 0.885)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( include_estimators=['sgd'], include_preprocessors=[name]) default = cs.get_default_configuration() @@ -110,6 +126,21 @@ def test_weighting_effect(self): classifier = ParamSklearnClassifier(default, random_state=1) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.accuracy_score( + predictions, Y_test), + places=3) + + # pre_transform and fit_estimator + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include_estimators=['sgd'], include_preprocessors=[name]) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = ParamSklearnClassifier(default, random_state=1) + Xt, fit_params = classifier.pre_transform(X_train, Y_train) + classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) + predictions = classifier.predict(X_test) self.assertAlmostEqual(acc, sklearn.metrics.accuracy_score( predictions, Y_test), From f5cb3e3ea0254ce15c9c647e91f14f3d2c1b6b50 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 20 Jul 2015 16:34:47 +0200 Subject: [PATCH 267/352] Allow for iterative fitting with auto-sklearn --- ParamSklearn/base.py | 28 +++++++++++++++++++--------- ParamSklearn/classification.py | 4 ++-- tests/test_classification.py | 32 ++++++++++++++++---------------- tests/test_regression.py | 8 ++++---- 4 files changed, 41 insertions(+), 31 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index c60c40e136..b1ce319740 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -10,7 +10,7 @@ from sklearn.base import BaseEstimator from sklearn.pipeline import Pipeline -from sklearn.utils import check_random_state +from sklearn.utils.validation import check_random_state, check_is_fitted from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -29,7 +29,6 @@ class ParamSklearnBaseEstimator(BaseEstimator): def __init__(self, configuration, random_state=None): self.configuration = configuration - self._pipeline = None if random_state is None: self.random_state = check_random_state(1) @@ -131,31 +130,42 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): self._validate_input_X(X) self._validate_input_Y(y) - self._pipeline = Pipeline(steps) + self.pipeline_ = Pipeline(steps) if fit_params is None or not isinstance(fit_params, dict): fit_params = dict() else: fit_params = {key.replace(":", "__"): value for key, value in fit_params.items()} - X, fit_params = self._pipeline._pre_transform(X, y, **fit_params) + X, fit_params = self.pipeline_._pre_transform(X, y, **fit_params) return X, fit_params def fit_estimator(self, X, y, fit_params=None): - self._pipeline.steps[-1][-1].fit(X, y, **fit_params) + check_is_fitted(self, 'pipeline_') + if fit_params is None: + fit_params = {} + self.pipeline_.steps[-1][-1].fit(X, y, **fit_params) return self def iterative_fit(self, X, y, fit_params=None, n_iter=1): - self._pipeline.steps[-1][-1].iterative_fit(X, y, n_iter=n_iter, + check_is_fitted(self, 'pipeline_') + if fit_params is None: + fit_params = {} + self.pipeline_.steps[-1][-1].iterative_fit(X, y, n_iter=n_iter, **fit_params) + def estimator_supports_iterative_fit(self): + check_is_fitted(self, 'pipeline_') + return hasattr(self.pipeline_.steps[-1][-1], 'iterative_fit') + def configuration_fully_fitted(self): - return self._pipeline.steps[-1][-1].configuration_fully_fitted() + check_is_fitted(self, 'pipeline_') + return self.pipeline_.steps[-1][-1].configuration_fully_fitted() def _validate_input_X(self, X): # TODO: think of all possible states which can occur and how to # handle them """ - if not self._pipeline[-1].handles_missing_values() or \ + if not self.pipeline_[-1].handles_missing_values() or \ (self._preprocessor is not None and not\ self._preprocessor.handles_missing_value()): assert_all_finite(X) @@ -230,7 +240,7 @@ def predict(self, X, batch_size=None): if batch_size is None: self._validate_input_X(X) - return self._pipeline.predict(X) + return self.pipeline_.predict(X) else: if type(batch_size) is not int or batch_size <= 0: raise Exception("batch_size must be a positive integer") diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 836dac2055..802afbe51c 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -96,10 +96,10 @@ def predict_proba(self, X, batch_size=None): if batch_size is None: self._validate_input_X(X) Xt = X - for name, transform in self._pipeline.steps[:-1]: + for name, transform in self.pipeline_.steps[:-1]: Xt = transform.transform(Xt) - return self._pipeline.steps[-1][-1].predict_proba(Xt) + return self.pipeline_.steps[-1][-1].predict_proba(Xt) else: if type(batch_size) is not int or batch_size <= 0: diff --git a/tests/test_classification.py b/tests/test_classification.py index 1382da9f78..8d49548f9d 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -289,8 +289,8 @@ def test_predict_batched(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((1647,), prediction.shape) self.assertEqual(83, cls_predict.predict.call_count) @@ -302,8 +302,8 @@ def test_predict_batched(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((1647, 2), prediction.shape) self.assertEqual(83, cls_predict.predict.call_count) @@ -336,8 +336,8 @@ def test_predict_batched_sparse(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((1647,), prediction.shape) self.assertEqual(83, cls_predict.predict.call_count) @@ -350,8 +350,8 @@ def test_predict_batched_sparse(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((1647, 2), prediction.shape) self.assertEqual(83, cls_predict.predict.call_count) @@ -368,8 +368,8 @@ def test_predict_proba_batched(self): X_test_ = X_test.copy() prediction_ = cls.predict_proba(X_test_) # The object behind the last step in the pipeline - cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) - cls._pipeline.steps[-1] = ("estimator", cls_predict) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) prediction = cls.predict_proba(X_test, batch_size=20) self.assertEqual((1647, 10), prediction.shape) self.assertEqual(84, cls_predict.predict_proba.call_count) @@ -382,8 +382,8 @@ def test_predict_proba_batched(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict_proba(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) - cls._pipeline.steps[-1] = ("estimator", cls_predict) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) prediction = cls.predict_proba(X_test, batch_size=20) self.assertIsInstance(prediction, list) self.assertEqual(2, len(prediction)) @@ -421,8 +421,8 @@ def test_predict_proba_batched_sparse(self): X_test_ = X_test.copy() prediction_ = cls.predict_proba(X_test_) # The object behind the last step in the pipeline - cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) - cls._pipeline.steps[-1] = ("estimator", cls_predict) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) prediction = cls.predict_proba(X_test, batch_size=20) self.assertEqual((1647, 10), prediction.shape) self.assertEqual(84, cls_predict.predict_proba.call_count) @@ -436,8 +436,8 @@ def test_predict_proba_batched_sparse(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict_proba(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline.steps[-1][1]) - cls._pipeline.steps[-1] = ("estimator", cls_predict) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) prediction = cls.predict_proba(X_test, batch_size=20) self.assertIsInstance(prediction, list) self.assertEqual(2, len(prediction)) diff --git a/tests/test_regression.py b/tests/test_regression.py index 4f88f7046c..e52f75a627 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -180,8 +180,8 @@ def test_predict_batched(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((356,), prediction.shape) self.assertEqual(18, cls_predict.predict.call_count) @@ -198,8 +198,8 @@ def test_predict_batched_sparse(self): cls.fit(X_train, Y_train) X_test_ = X_test.copy() prediction_ = cls.predict(X_test_) - cls_predict = mock.Mock(wraps=cls._pipeline) - cls._pipeline = cls_predict + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict prediction = cls.predict(X_test, batch_size=20) self.assertEqual((356,), prediction.shape) self.assertEqual(18, cls_predict.predict.call_count) From 6c43b1fd34a97fae3fc36153c666f890412ac98e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 22 Jul 2015 17:28:58 +0200 Subject: [PATCH 268/352] REFACTOR: pipelines more flexible --- ParamSklearn/__init__.py | 2 +- ParamSklearn/base.py | 322 +++++++--------- ParamSklearn/classification.py | 306 ++++----------- ParamSklearn/components/__init__.py | 3 + ParamSklearn/components/base.py | 360 ++++++++++++++++++ .../components/classification/__init__.py | 142 ++++++- .../components/classification/adaboost.py | 2 +- .../components/classification/bernoulli_nb.py | 4 +- .../classification/decision_tree.py | 2 +- .../components/classification/extra_trees.py | 2 +- .../components/classification/gaussian_nb.py | 2 +- .../classification/gradient_boosting.py | 5 +- .../classification/k_nearest_neighbors.py | 2 +- ParamSklearn/components/classification/lda.py | 2 +- .../classification/liblinear_svc.py | 2 +- .../components/classification/libsvm_svc.py | 2 +- .../classification/multinomial_nb.py | 2 +- .../classification/passive_aggresive.py | 2 +- .../components/classification/proj_logit.py | 3 +- ParamSklearn/components/classification/qda.py | 2 +- .../classification/random_forest.py | 8 +- .../components/classification/ridge.py | 2 +- ParamSklearn/components/classification/sgd.py | 2 +- .../components/classification_base.py | 122 ------ .../components/preprocessing/__init__.py | 131 ++++++- .../components/preprocessing/balancing.py | 16 +- .../components/preprocessing/densifier.py | 2 +- .../extra_trees_preproc_for_classification.py | 2 +- .../components/preprocessing/fast_ica.py | 2 +- .../preprocessing/feature_agglomeration.py | 2 +- ParamSklearn/components/preprocessing/gem.py | 2 +- .../components/preprocessing/imputation.py | 2 +- .../components/preprocessing/kernel_pca.py | 2 +- .../components/preprocessing/kitchen_sinks.py | 2 +- .../liblinear_svc_preprocessor.py | 2 +- .../preprocessing/no_preprocessing.py | 2 +- .../preprocessing/nystroem_sampler.py | 2 +- ParamSklearn/components/preprocessing/pca.py | 8 +- .../components/preprocessing/polynomial.py | 2 +- .../preprocessing/random_trees_embedding.py | 2 +- .../components/preprocessing/rescaling.py | 3 +- .../select_percentile_classification.py | 2 +- .../select_percentile_regression.py | 2 +- .../components/preprocessing/select_rates.py | 2 +- .../components/preprocessing/tfidf.py | 2 +- .../components/preprocessing/truncatedSVD.py | 2 +- ParamSklearn/components/preprocessor_base.py | 111 ------ .../components/regression/__init__.py | 135 ++++++- .../components/regression/gaussian_process.py | 2 +- .../regression/gradient_boosting.py | 2 +- .../components/regression/random_forest.py | 8 +- .../components/regression/ridge_regression.py | 2 +- .../regression/support_vector_regression.py | 2 +- ParamSklearn/components/regression_base.py | 118 ------ ParamSklearn/create_searchspace_util.py | 165 ++++---- ParamSklearn/regression.py | 232 +++-------- source/first_steps.rst | 2 +- .../preprocessing/test_balancing.py | 24 +- tests/test_classification.py | 154 ++++---- ..._create_searchspace_util_classification.py | 128 ++++--- tests/test_regression.py | 76 ++-- tests/test_textclassification.py | 1 + 62 files changed, 1413 insertions(+), 1246 deletions(-) create mode 100644 ParamSklearn/components/base.py delete mode 100644 ParamSklearn/components/classification_base.py delete mode 100644 ParamSklearn/components/preprocessor_base.py delete mode 100644 ParamSklearn/components/regression_base.py diff --git a/ParamSklearn/__init__.py b/ParamSklearn/__init__.py index 0743d23c81..c7f439060a 100644 --- a/ParamSklearn/__init__.py +++ b/ParamSklearn/__init__.py @@ -5,4 +5,4 @@ scikit-learn models. This configuration space can be searched by one of the hyperparameter optimization algorithms in HPOlib.""" -__version__ = "0.15.2dev" \ No newline at end of file +__version__ = "0.16.1dev" \ No newline at end of file diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index b1ce319740..ac8485ab92 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -1,5 +1,5 @@ from abc import ABCMeta, abstractmethod -from collections import defaultdict +from collections import defaultdict, OrderedDict import copy import numpy as np @@ -12,11 +12,8 @@ from sklearn.pipeline import Pipeline from sklearn.utils.validation import check_random_state, check_is_fitted -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction - -from . import components as components +from ParamSklearn import components as components +import ParamSklearn.create_searchspace_util class ParamSklearnBaseEstimator(BaseEstimator): @@ -66,15 +63,13 @@ def fit(self, X, y, fit_params=None, init_params=None): NoModelException is raised if fit() is called without specifying a classification algorithm first. """ - # TODO: perform input validation - # TODO: look if X.shape[0] == y.shape[0] - # TODO: check if the hyperparameters have been set... X, fit_params = self.pre_transform(X, y, fit_params=fit_params, init_params=init_params) self.fit_estimator(X, y, fit_params=fit_params) return self def pre_transform(self, X, y, fit_params=None, init_params=None): + # Save all transformation object in a list to create a pipeline object steps = [] @@ -86,30 +81,38 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): init_params_per_method[method][param] = value # List of preprocessing steps (and their order) - preprocessors_names = ["imputation", "rescaling", - self.configuration['preprocessor']] + preprocessors_names = [preprocessor[0] for + preprocessor in self._get_pipeline()[:-1]] + for preproc_name in preprocessors_names: preproc_params = {} for instantiated_hyperparameter in self.configuration: - if not instantiated_hyperparameter.startswith(preproc_name): + if not instantiated_hyperparameter.startswith( + preproc_name + ":"): continue if self.configuration[instantiated_hyperparameter] is None: continue - name_ = instantiated_hyperparameter.split(":")[1] + name_ = instantiated_hyperparameter.split(":")[-1] preproc_params[name_] = self.configuration[ instantiated_hyperparameter] preproc_params.update(init_params_per_method[preproc_name]) + preprocessor_object = components.preprocessing_components. \ _preprocessors[preproc_name](random_state=self.random_state, **preproc_params) + + # Ducktyping... + if hasattr(preprocessor_object, 'get_components'): + preprocessor_object = preprocessor_object.choice + steps.append((preproc_name, preprocessor_object)) # Extract Estimator Hyperparameters from the configuration object - estimator_name = self.configuration[ - self._get_estimator_hyperparameter_name()] + estimator_name = self._get_pipeline()[-1][0] + estimator_object = self._get_pipeline()[-1][1] estimator_parameters = {} for instantiated_hyperparameter in self.configuration: if not instantiated_hyperparameter.startswith(estimator_name): @@ -117,18 +120,19 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): if self.configuration[instantiated_hyperparameter] is None: continue - name_ = instantiated_hyperparameter.split(":")[1] + name_ = instantiated_hyperparameter.split(":")[-1] estimator_parameters[name_] = self.configuration[ instantiated_hyperparameter] estimator_parameters.update(init_params_per_method[estimator_name]) - estimator_object = self._get_estimator_components()[ - estimator_name](random_state=self.random_state, + estimator_object = estimator_object(random_state=self.random_state, **estimator_parameters) - steps.append((estimator_name, estimator_object)) - self._validate_input_X(X) - self._validate_input_Y(y) + # Ducktyping... + if hasattr(estimator_object, 'get_components'): + estimator_object = estimator_object.choice + + steps.append((estimator_name, estimator_object)) self.pipeline_ = Pipeline(steps) if fit_params is None or not isinstance(fit_params, dict): @@ -161,65 +165,6 @@ def configuration_fully_fitted(self): check_is_fitted(self, 'pipeline_') return self.pipeline_.steps[-1][-1].configuration_fully_fitted() - def _validate_input_X(self, X): - # TODO: think of all possible states which can occur and how to - # handle them - """ - if not self.pipeline_[-1].handles_missing_values() or \ - (self._preprocessor is not None and not\ - self._preprocessor.handles_missing_value()): - assert_all_finite(X) - X = safe_asarray(X) - else: - raise NotImplementedError() - - if not self._estimator.handles_nominal_features() or \ - (self._preprocessor is not None and not \ - self._preprocessor.handles_nominal_features()): - if X.dtype not in (np.float64, float64, np.float32, float): - raise ValueError("Data type of X matrix is not float but %s!" - % X.dtype) - else: - raise NotImplementedError() - - if not self._estimator.handles_numeric_features() or \ - (self._preprocessor is not None and not \ - self._preprocessor.handles_numeric_features()): - raise NotImplementedError() - else: - if X.dtype not in (np.float64, float64, np.float32, float): - raise ValueError("Data type of X matrix is not float but %s!" - % X.dtype) - """ - pass - - def _validate_input_Y(self, Y): - """ - Y = np.atleast_1d(Y) - if not self._estimator.handles_non_binary_classes() or \ - (self._preprocessor is not None and not \ - self._preprocessor.handles_non_binary_classes()): - unique = np.unique(Y) - if unique > 2: - raise ValueError("Estimator %s which only handles binary " - "classes cannot handle %d unique values" % - (self._estimator, unique)) - else: - pass - - if len(Y.shape) > 1: - raise NotImplementedError() - """ - pass - - def add_model_class(self, model): - """ - Raises - ------ - NotImplementedError - """ - raise NotImplementedError() - def predict(self, X, batch_size=None): """Predict the classes using the selected model. @@ -239,7 +184,6 @@ def predict(self, X, batch_size=None): # TODO check if fit() was called before... if batch_size is None: - self._validate_input_X(X) return self.pipeline_.predict(X) else: if type(batch_size) is not int or batch_size <= 0: @@ -262,13 +206,8 @@ def predict(self, X, batch_size=None): return y @classmethod - def get_hyperparameter_search_space(cls, estimator_name, - default_estimator, - estimator_components, - default_preprocessor, - preprocessor_components, - dataset_properties, - always_active): + def get_hyperparameter_search_space(cls, include=None, exclude=None, + dataset_properties=None): """Return the configuration space for the CASH problem. This method should be called by the method @@ -307,107 +246,120 @@ def get_hyperparameter_search_space(cls, estimator_name, The configuration space describing the ParamSklearnClassifier. """ + raise NotImplementedError() - cs = ConfigurationSpace() - - available_estimators = estimator_components - available_preprocessors = preprocessor_components - - if default_estimator is None: - default_estimator = available_estimators.keys()[0] - - estimator = CategoricalHyperparameter(estimator_name, - available_estimators.keys(), default=default_estimator) - cs.add_hyperparameter(estimator) - for name in available_estimators.keys(): - - # We have to retrieve the configuration space every time because - # we change the objects it returns. If we reused it, we could not - # retrieve the conditions further down - # TODO implement copy for hyperparameters and forbidden and - # conditions! - - estimator_configuration_space = available_estimators[name]. \ - get_hyperparameter_search_space(dataset_properties) - for parameter in estimator_configuration_space.get_hyperparameters(): - new_parameter = copy.deepcopy(parameter) - new_parameter.name = "%s:%s" % (name, new_parameter.name) - cs.add_hyperparameter(new_parameter) - # We must only add a condition if the hyperparameter is not - # conditional on something else - if len(estimator_configuration_space. - get_parents_of(parameter)) == 0: - condition = EqualsCondition(new_parameter, estimator, name) - cs.add_condition(condition) - - for condition in available_estimators[name]. \ - get_hyperparameter_search_space(dataset_properties).get_conditions(): - dlcs = condition.get_descendant_literal_conditions() - for dlc in dlcs: - if not dlc.child.name.startswith(name): - dlc.child.name = "%s:%s" % (name, dlc.child.name) - if not dlc.parent.name.startswith(name): - dlc.parent.name = "%s:%s" % (name, dlc.parent.name) - cs.add_condition(condition) - - for forbidden_clause in available_estimators[name]. \ - get_hyperparameter_search_space(dataset_properties).forbidden_clauses: - dlcs = forbidden_clause.get_descendant_literal_clauses() - for dlc in dlcs: - if not dlc.hyperparameter.name.startswith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) - cs.add_forbidden_clause(forbidden_clause) - - preprocessor_choices = filter(lambda app: app not in always_active, - available_preprocessors.keys()) - preprocessor = CategoricalHyperparameter("preprocessor", - preprocessor_choices, default=default_preprocessor) - cs.add_hyperparameter(preprocessor) - for name in available_preprocessors.keys(): - preprocessor_configuration_space = available_preprocessors[name]. \ - get_hyperparameter_search_space(dataset_properties) - for parameter in preprocessor_configuration_space.get_hyperparameters(): - new_parameter = copy.deepcopy(parameter) - new_parameter.name = "%s:%s" % (name, new_parameter.name) - cs.add_hyperparameter(new_parameter) - # We must only add a condition if the hyperparameter is not - # conditional on something else - if len(preprocessor_configuration_space. - get_parents_of( - parameter)) == 0 and name not in always_active: - condition = EqualsCondition(new_parameter, preprocessor, - name) - cs.add_condition(condition) - - for condition in available_preprocessors[name]. \ - get_hyperparameter_search_space(dataset_properties).get_conditions(): - if not isinstance(condition, AbstractConjunction): - dlcs = [condition] + @classmethod + def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, + include, pipeline): + for node_0_idx, node_1_idx in zip(range(len(pipeline) - 1), + range(1, len(pipeline))): + node_0_name = pipeline[node_0_idx][0] + node_1_name = pipeline[node_1_idx][0] + node_0 = pipeline[node_0_idx][1] + node_1 = pipeline[node_1_idx][1] + + node_0_include = include.get( + node_0_name) if include is not None else None + node_0_exclude = exclude.get( + node_0_name) if exclude is not None else None + node_1_include = include.get( + node_1_name) if include is not None else None + node_1_exclude = exclude.get( + node_1_name) if exclude is not None else None + + matches = ParamSklearn.create_searchspace_util.get_match_array( + node_0=node_0, node_1=node_1, node_0_include=node_0_include, + node_0_exclude=node_0_exclude, node_1_include=node_1_include, + node_1_exclude=node_1_exclude, + dataset_properties=dataset_properties, ) + + # Now we have only legal combinations at this step of the pipeline + # Simple sanity checks + assert np.sum(matches) != 0, "No valid %s/%s combination found, " \ + "probably a bug." % (node_0_name, + node_1_name) + + assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ + "'matches' is not binary; %s <= %d, [%d*%d]" % \ + (str(np.sum(matches)), matches.shape[0] * matches.shape[1], + matches.shape[0], matches.shape[1]) + + if np.sum(matches) < (matches.shape[0] * matches.shape[1]): + matches, node_0_list, node_1_list = \ + ParamSklearn.create_searchspace_util.sanitize_arrays( + matches=matches, node_0=node_0, node_1=node_1, + dataset_properties=dataset_properties, + node_0_include=node_0_include, + node_0_exclude=node_0_exclude, + node_1_include=node_1_include, + node_1_exclude=node_1_exclude) + + # Check if we reached a dead end + assert len(node_0_list) > 0, "No valid node 0 found" + assert len(node_1_list) > 0, "No valid node 1 found" + + # Check for inconsistencies + assert len(node_0_list) == matches.shape[0], \ + "Node 0 deleting went wrong" + assert len(node_1_list) == matches.shape[1], \ + "Node 1 deleting went wrong" + else: + if hasattr(node_0, "get_components"): + node_0_list = node_0.get_available_components( + data_prop=dataset_properties, + include=node_0_include, + exclude=node_0_exclude + ) else: - dlcs = condition.get_descendent_literal_conditions() - for dlc in dlcs: - if not dlc.child.name.startswith(name): - dlc.child.name = "%s:%s" % (name, dlc.child.name) - if not dlc.parent.name.startswith(name): - dlc.parent.name = "%s:%s" % (name, dlc.parent.name) - cs.add_condition(condition) - - for forbidden_clause in available_preprocessors[name]. \ - get_hyperparameter_search_space(dataset_properties).forbidden_clauses: - dlcs = forbidden_clause.get_descendant_literal_clauses() - for dlc in dlcs: - if not dlc.hyperparameter.name.startswith(name): - dlc.hyperparameter.name = "%s:%s" % (name, - dlc.hyperparameter.name) - cs.add_forbidden_clause(forbidden_clause) + node_0_list = None + if hasattr(node_1, "get_components"): + node_1_list = node_1.get_available_components( + data_prop=dataset_properties, + include=node_1_include, + exclude=node_1_exclude + ) + else: + node_1_list = None + + if hasattr(node_0, "get_components"): + node_0_name += ":__choice__" + if node_0_idx == 0: + if hasattr(node_0, "get_components"): + cs.add_configuration_space(node_0_name, + node_0.get_hyperparameter_search_space( + dataset_properties, + include=node_0_list)) + else: + cs.add_configuration_space(node_0_name, + node_0.get_hyperparameter_search_space( + dataset_properties)) + + if hasattr(node_1, "get_components"): + cs.add_configuration_space(node_1_name, + node_1.get_hyperparameter_search_space( + dataset_properties, + include=node_1_list)) + node_1_name += ":__choice__" + else: + cs.add_configuration_space(node_1_name, + node_1.get_hyperparameter_search_space( + dataset_properties)) + + # And now add forbidden parameter configurations + # According to matches + if np.sum(matches) < (matches.shape[0] * matches.shape[1]): + cs = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=cs, node_0_list=node_0_list, + node_1_list=node_1_list, matches=matches, + node_0_name=node_0_name, node_1_name=node_1_name) return cs @staticmethod - def _get_estimator_hyperparameter_name(): - pass + def _get_pipeline(): + raise NotImplementedError() + + def _get_estimator_hyperparameter_name(self): + raise NotImplementedError() + - @staticmethod - def _get_estimator_components(): - pass \ No newline at end of file diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 802afbe51c..21ec5e9a4b 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -5,8 +5,8 @@ from sklearn.base import ClassifierMixin -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause -from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator @@ -68,8 +68,8 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): if self.configuration['balancing:strategy'] == 'weighting': balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( - y, self.configuration['classifier'], - self.configuration['preprocessor'], + y, self.configuration['classifier:__choice__'], + self.configuration['preprocessor:__choice__'], init_params, fit_params) X, fit_params = super(ParamSklearnClassifier, self).pre_transform( @@ -94,7 +94,6 @@ def predict_proba(self, X, batch_size=None): array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) """ if batch_size is None: - self._validate_input_X(X) Xt = X for name, transform in self.pipeline_.steps[:-1]: Xt = transform.transform(Xt) @@ -139,41 +138,9 @@ def predict_proba(self, X, batch_size=None): return y @classmethod - def get_available_components(cls, available_comp, data_prop, inc, exc): - components_dict = OrderedDict() - for name in available_comp: - if inc is not None and name not in inc: - continue - elif exc is not None and name in exc: - continue - - entry = available_comp[name] - if entry.get_properties()['handles_classification'] is False: - continue - if data_prop.get('multiclass') is True and entry.get_properties()[ - 'handles_multiclass'] is False: - continue - if data_prop.get('multilabel') is True and available_comp[name]. \ - get_properties()['handles_multilabel'] is False: - continue - components_dict[name] = entry - - return components_dict - - @classmethod - def get_hyperparameter_search_space(cls, include_estimators=None, - exclude_estimators=None, - include_preprocessors=None, - exclude_preprocessors=None, + def get_hyperparameter_search_space(cls, include=None, exclude=None, dataset_properties=None): - - if include_estimators is not None and exclude_estimators is not None: - raise ValueError("The arguments include_estimators and " - "exclude_estimators cannot be used together.") - - if include_preprocessors is not None and exclude_preprocessors is not None: - raise ValueError("The arguments include_preprocessors and " - "exclude_preprocessors cannot be used together.") + cs = ConfigurationSpace() if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() @@ -182,127 +149,30 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # This dataset is probaby dense dataset_properties['sparse'] = False - # Compile a list of legal preprocessors for this problem - available_preprocessors = components.preprocessing_components._preprocessors - preprocessors = ParamSklearnClassifier.get_available_components( - available_comp=available_preprocessors, - data_prop=dataset_properties, - inc=include_preprocessors, - exc=exclude_preprocessors) - - # Compile a list of all estimator objects for this problem - available_classifiers = ParamSklearnClassifier._get_estimator_components() - classifiers = ParamSklearnClassifier.get_available_components( - available_comp=available_classifiers, - data_prop=dataset_properties, - inc=include_estimators, - exc=exclude_estimators) - - if len(classifiers) == 0: - raise ValueError("No classifiers found") - if len(preprocessors) == 0: - raise ValueError("No preprocessors found, please add NoPreprocessing") - - preprocessors_list = preprocessors.keys() - classifiers_list = classifiers.keys() - matches = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, - sparse=dataset_properties.get('sparse'), pipeline=cls._get_pipeline()) - - # Now we have only legal preprocessors/classifiers we combine them - # Simple sanity checks - assert np.sum(matches) != 0, "No valid preprocessor/classifier " \ - "combination found, probably a bug" - assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ - "'matches' is not binary; %s <= %d, [%d*%d]" % \ - (str(np.sum(matches)), matches.shape[0]*matches.shape[1], - matches.shape[0], matches.shape[1]) - - if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, preprocessors_list, classifiers_list, preprocessors, classifiers = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - m=matches, preprocessors_list=preprocessors_list, - estimators_list=classifiers_list, - preprocessors=preprocessors, estimators=classifiers) - - # Sanity checks - assert len(preprocessors_list) > 0, "No valid preprocessors found" - assert len(classifiers_list) > 0, "No valid classifiers found" - - assert len(preprocessors_list) == matches.shape[0], \ - "Preprocessor deleting went wrong" - assert len(classifiers_list) == matches.shape[1], \ - "Classifier deleting went wrong" - assert [c in classifiers_list for c in classifiers] - assert [p in preprocessors_list for p in preprocessors] - - # Select the default preprocessor before the always active - # preprocessors are added, so they will not be selected as default - # preprocessors - if "no_preprocessing" in preprocessors: - preprocessor_default = "no_preprocessing" - else: - preprocessor_default = sorted(preprocessors.keys())[0] - - # Now add always present preprocessors - for name in available_preprocessors: - if name in cls._get_pipeline(): - preprocessors[name] = available_preprocessors[name] - - # Hardcode the defaults based on some educated guesses - classifier_defaults = ['random_forest', 'liblinear_svc', 'sgd', - 'libsvm_svc'] - classifier_default = None - for cd_ in classifier_defaults: - # Make sure that a classifier which can only handle dense is not - # selected as the default for a sparse dataset - if cd_ not in classifiers: - continue - no_preprocessing_idx = preprocessors_list.index(preprocessor_default) - cd_index = classifiers_list.index(cd_) - if matches[no_preprocessing_idx, cd_index] == 1: - classifier_default = cd_ - break - if classifier_default is None: - classifier_default = classifiers.keys()[0] - - # Get the configuration space - configuration_space = super(ParamSklearnClassifier, cls).\ - get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), - default_estimator=classifier_default, - estimator_components=classifiers, - default_preprocessor=preprocessor_default, - preprocessor_components=preprocessors, - dataset_properties=dataset_properties, - always_active=cls._get_pipeline()) - - # And now add forbidden parameter configurations - # According to matches - configuration_space = ParamSklearn.create_searchspace_util.add_forbidden( - conf_space=configuration_space, preproc_list=preprocessors_list, - est_list=classifiers_list, matches=matches, est_type="classifier") + pipeline = cls._get_pipeline() + cs = cls._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + classifiers = cs.get_hyperparameter('classifier:__choice__').choices + preprocessors = cs.get_hyperparameter('preprocessor:__choice__').choices + available_classifiers = pipeline[-1][1].get_available_components( + dataset_properties) + available_preprocessors = pipeline[-2][1].get_available_components( + dataset_properties) # A classifier which can handle sparse data after the densifier for key in classifiers: - if SPARSE in classifiers[key].get_properties()['input']: - try: - configuration_space.add_forbidden_clause( + if SPARSE in available_classifiers[key].get_properties()['input']: + if 'densifier' in preprocessors: + cs.add_forbidden_clause( ForbiddenAndConjunction( ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - 'classifier'), key), + cs.get_hyperparameter( + 'classifier:__choice__'), key), ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - 'preprocessor'), 'densifier') + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') )) - except ValueError as e: - if e.message.startswith("Forbidden clause must be " - "instantiated with a legal " - "hyperparameter value for " - "'preprocessor"): - pass - else: - raise e # which would take too long # Combinations of non-linear models with feature learning: @@ -313,53 +183,18 @@ def get_hyperparameter_search_space(cls, include_estimators=None, feature_learning = ["kitchen_sinks", "nystroem_sampler"] for c, f in product(classifiers_, feature_learning): - if c not in classifiers_list: + if c not in classifiers: continue - if f not in preprocessors_list: + if f not in preprocessors: continue try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f))) + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) except KeyError: pass - except ValueError as e: - if "violates forbidden clause (Forbidden: classifier == %s " \ - "&& Forbidden: preprocessor == %s)" % (classifiers_, - feature_learning): - # TODO: super-hacky, build a method for that in the - # configuration space module - configuration_space._hyperparameters[ - 'classifier'].default = classifier_defaults[1] - - # We have seen empirically that tree-based models together with PCA - # don't work better than tree-based models without preprocessing - #classifiers_ = ["random_forest", "extra_trees", "gradient_boosting", - # "decision_tree"] - #for c in classifiers_: - # if c not in classifiers_list: - # continue - # try: - # configuration_space.add_forbidden_clause( - # ForbiddenAndConjunction( - # ForbiddenEqualsClause( - # configuration_space.get_hyperparameter( - # "preprocessor"), "pca"), - # ForbiddenEqualsClause( - # configuration_space.get_hyperparameter( - # "classifier"), c))) - # except KeyError: - # pass - # except ValueError as e: - # if e.message.startswith("Forbidden clause must be " - # "instantiated with a legal " - # "hyperparameter value for " - # "'preprocessor"): - # pass - # else: - # raise e # Won't work # Multinomial NB etc does not work with negative values, don't use @@ -369,75 +204,88 @@ def get_hyperparameter_search_space(cls, include_estimators=None, "fast_ica", "kernel_pca", "nystroem_sampler"] scaling_strategies = ['standard', 'none', "normalize"] for c in classifiers_: - if c not in classifiers_list: + if c not in classifiers: continue for scaling_strategy in scaling_strategies: try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( "rescaling:strategy"), scaling_strategy), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c))) except KeyError: pass for c, f in product(classifiers_, preproc_with_negative_X): - if c not in classifiers_list: + if c not in classifiers: continue - if f not in preprocessors_list: + if f not in preprocessors: continue try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "classifier"), c))) + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f), + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c))) except KeyError: pass # Now try to add things for which we know that they don't work forbidden_hyperparameter_combinations = \ - [("select_percentile_classification:score_func", "chi2", + [("preprocessor:select_percentile_classification:score_func", "chi2", "rescaling:strategy", "standard"), - ("select_percentile_classification:score_func", "chi2", + ("preprocessor:select_percentile_classification:score_func", "chi2", "rescaling:strategy", "normalize"), - ("select_percentile_classification:score_func", "chi2", + ("preprocessor:select_percentile_classification:score_func", "chi2", "rescaling:strategy", "none"), - ("select_rates:score_func", "chi2", + ("preprocessor:select_rates:score_func", "chi2", "rescaling:strategy", "standard"), - ("select_rates:score_func", "chi2", + ("preprocessor:select_rates:score_func", "chi2", "rescaling:strategy", "none"), - ("select_rates:score_func", "chi2", + ("preprocessor:select_rates:score_func", "chi2", "rescaling:strategy", "normalize"), - ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "standard"), - ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "normalize"), - ("nystroem_sampler:kernel", 'chi2', "rescaling:strategy", + ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", "none")] for hp_name_1, hp_value_1, hp_name_2, hp_value_2 in \ forbidden_hyperparameter_combinations: try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( hp_name_1), hp_value_1), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( + ForbiddenEqualsClause(cs.get_hyperparameter( hp_name_2), hp_value_2) )) except: pass - return configuration_space + return cs @staticmethod - def _get_estimator_hyperparameter_name(): + def _get_pipeline(): + steps = [] + + # Add the always active preprocessing components + steps.extend( + [["imputation", + components.preprocessing._preprocessors['imputation']], + ["rescaling", + components.preprocessing._preprocessors['rescaling']], + ["balancing", + components.preprocessing._preprocessors['balancing']]]) + + # Add the preprocessing component + steps.append(['preprocessor', + components.preprocessing._preprocessors['preprocessor']]) + + # Add the classification component + steps.append(['classifier', + components.classification_components._classifiers['classifier']]) + return steps + + def _get_estimator_hyperparameter_name(self): return "classifier" - @staticmethod - def _get_estimator_components(): - return components.classification_components._classifiers - - @staticmethod - def _get_pipeline(): - return ["imputation", "rescaling", "balancing", "__preprocessor__", - "__estimator__"] \ No newline at end of file diff --git a/ParamSklearn/components/__init__.py b/ParamSklearn/components/__init__.py index f146c07fb4..8485d27b68 100644 --- a/ParamSklearn/components/__init__.py +++ b/ParamSklearn/components/__init__.py @@ -39,3 +39,6 @@ from . import classification as classification_components from . import regression as regression_components from . import preprocessing as preprocessing_components + + + diff --git a/ParamSklearn/components/base.py b/ParamSklearn/components/base.py new file mode 100644 index 0000000000..9fd15a1934 --- /dev/null +++ b/ParamSklearn/components/base.py @@ -0,0 +1,360 @@ +class ParamSklearnClassificationAlgorithm(object): + """Provide an abstract interface for classification algorithms in + ParamSklearn. + + Make a subclass of this and put it into the directory + `ParamSklearn/components/classification` to make it available.""" + + def __init__(self): + self.estimator = None + self.properties = None + + @staticmethod + def get_properties(): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Can the algorithm handle multiclass-classification problems? + (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this classification algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this classification algorithm. + """ + raise NotImplementedError() + + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict_proba(self, X): + """Predict probabilities. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + + def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ + return self.estimator + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name + + +class ParamSklearnPreprocessingAlgorithm(object): + """Provide an abstract interface for preprocessing algorithms in + ParamSklearn. + + Make a subclass of this and put it into the directory + `ParamSklearn/components/preprocessing` to make it available.""" + + def __init__(self): + self.preprocessor = None + + @staticmethod + def get_properties(): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Can preprocess regression data? + (handles_regression : {True, False} + * Can preprocess classification data? + (handles_classification : {True, False} + * Can the algorithm handle multiclass-classification problems? + (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this preprocessing algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this preprocessing algorithm. + """ + raise NotImplementedError() + + def fit(self, X, Y): + """The fit function calls the fit function of the underlying + scikit-learn preprocessing algorithm and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def transform(self, X): + """The transform function calls the transform function of the + underlying scikit-learn model and returns the transformed array. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + X : array + Return the transformed training data + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def get_preprocessor(self): + """Return the underlying preprocessor object. + + Returns + ------- + preprocessor : the underlying preprocessor object + """ + return self.preprocessor + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %" % name + + +class ParamSklearnRegressionAlgorithm(object): + """Provide an abstract interface for regression algorithms in + ParamSklearn. + + Make a subclass of this and put it into the directory + `ParamSklearn/components/regression` to make it available.""" + + def __init__(self): + self.estimator = None + self.properties = None + + @staticmethod + def get_properties(): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this regression algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this regression algorithm. + """ + raise NotImplementedError() + + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict_proba(self, X): + """Predict probabilities. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + + def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ + return self.estimator + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %" % name + + diff --git a/ParamSklearn/components/classification/__init__.py b/ParamSklearn/components/classification/__init__.py index abb44e479b..09abc1bb53 100644 --- a/ParamSklearn/components/classification/__init__.py +++ b/ParamSklearn/components/classification/__init__.py @@ -1,11 +1,18 @@ __author__ = 'feurerm' +from collections import OrderedDict +import copy import inspect import os import pkgutil import sys -from ..classification_base import ParamSklearnClassificationAlgorithm +from ..base import ParamSklearnClassificationAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from sklearn.base import BaseEstimator classifier_directory = os.path.split(__file__)[0] _classifiers = {} @@ -23,3 +30,136 @@ # but not the real target classifier classifier = obj _classifiers[module_name] = classifier + + +class ClassifierChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _classifiers + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + available_comp = cls.get_components() + components_dict = OrderedDict() + + if include is not None and exclude is not None: + raise ValueError("The argument include and exclude cannot be used together.") + + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Avoid infinite loop + if entry == ClassifierChoice: + continue + + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and entry.get_properties()[ + 'handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and available_comp[name]. \ + get_properties()['handles_multilabel'] is False: + continue + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError("The arguments include_estimators and " + "exclude_estimators cannot be used together.") + + cs = ConfigurationSpace() + + # Compile a list of all estimator objects for this problem + available_estimators = cls.get_available_components( + data_prop=dataset_properties, + include=include, + exclude=exclude) + + if len(available_estimators) == 0: + raise ValueError("No classifiers found") + + if default is None: + defaults = ['random_forest', 'liblinear_svc', 'sgd', + 'libsvm_svc'] + available_estimators.keys() + for default_ in defaults: + if default_ in available_estimators: + if include is not None and default_ not in include: + continue + if exclude is not None and default_ in exclude: + continue + default = default_ + break + + estimator = CategoricalHyperparameter('__choice__', + available_estimators.keys(), + default=default) + cs.add_hyperparameter(estimator) + for estimator_name in available_estimators.keys(): + + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + + estimator_configuration_space = available_estimators[ + estimator_name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in estimator_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % ( + estimator_name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(estimator_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, estimator, + estimator_name) + cs.add_condition(condition) + + for condition in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + dlcs = condition.get_descendant_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(estimator_name): + dlc.child.name = "%s:%s" % ( + estimator_name, dlc.child.name) + if not dlc.parent.name.startswith(estimator_name): + dlc.parent.name = "%s:%s" % ( + estimator_name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(estimator_name): + dlc.hyperparameter.name = "%s:%s" % (estimator_name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_classifiers['classifier'] = ClassifierChoice \ No newline at end of file diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 4974d2de75..aea98d2129 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, Constant -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 9290b5612f..41d0e82637 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS @@ -43,6 +43,8 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): for iter in range(n_iter): start = self.n_iter * 1000 stop = (self.n_iter + 1) * 1000 + # Upper limit, scipy.sparse doesn't seem to handle max > len(matrix) + stop = min(stop, y.shape[0]) self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) self.n_iter += 1 diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index 4a4d918cfc..e6ae936d25 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -5,7 +5,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.classification_base import \ +from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS # get our own forests to replace the sklearn ones diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 4766651072..318895e91e 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -5,7 +5,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS from sklearn.ensemble import ExtraTreesClassifier as ETC diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 6d5c6d24e8..caf5d6d717 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -3,7 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index 29b5028cf3..f0afff226c 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS @@ -63,6 +63,9 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + # Special fix for gradient boosting! + if isinstance(X, np.ndarray): + X = np.ascontiguousarray(X, dtype=X.dtype) if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index 7c1bb929e5..aab06ebe34 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -5,7 +5,7 @@ Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index 6093f3bc23..a8dbab4b21 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.classification_base import \ +from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS from ParamSklearn.implementations.util import softmax diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index ef91476214..e47fd255ec 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.implementations.util import softmax from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 3f88cb288e..ece46fa699 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -6,7 +6,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 16559356bd..243668ae45 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index 1fd0bd3235..9f91ffdcdb 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -6,7 +6,7 @@ CategoricalHyperparameter, UnParametrizedHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.classification_base import \ +from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS from ParamSklearn.implementations.util import softmax diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index e0b1cbeb4a..1fdfeccb95 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -5,7 +5,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS from ParamSklearn.implementations import ProjLogit @@ -17,7 +17,6 @@ def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): self.max_epochs = max_epochs self.estimator = None - def fit(self, X, Y): self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) self.estimator.fit(X, Y) diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index 79af01cd7f..8bbeb9472f 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -3,7 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ParamSklearn.components.classification_base import \ +from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS from ParamSklearn.implementations.util import softmax diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 6e90856e55..cea21a8864 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -6,8 +6,8 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE # get our own forests to replace the sklearn ones #from ParamSklearn.implementations import forest @@ -113,8 +113,8 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), + 'handles_sparse': True, + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index a9270285d1..99f6b3ef6d 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -6,7 +6,7 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.classification_base import \ +from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS from ParamSklearn.implementations.util import softmax diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index 9684953c94..b8f1ed0431 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -6,7 +6,7 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS from ParamSklearn.implementations.util import softmax diff --git a/ParamSklearn/components/classification_base.py b/ParamSklearn/components/classification_base.py deleted file mode 100644 index 63a0179006..0000000000 --- a/ParamSklearn/components/classification_base.py +++ /dev/null @@ -1,122 +0,0 @@ -class ParamSklearnClassificationAlgorithm(object): - """Provide an abstract interface for classification algorithms in - ParamSklearn. - - Make a subclass of this and put it into the directory - `ParamSklearn/components/classification` to make it available.""" - def __init__(self): - self.estimator = None - self.properties = None - - @staticmethod - def get_properties(): - """Get the properties of the underlying algorithm. These are: - - * Short name - * Full name - * Can the algorithm handle missing values? - (handles_missing_values : {True, False}) - * Can the algorithm handle nominal features? - (handles_nominal_features : {True, False}) - * Can the algorithm handle numerical features? - (handles_numerical_features : {True, False}) - * Does the algorithm prefer data scaled in [0,1]? - (prefers_data_scaled : {True, False} - * Does the algorithm prefer data normalized to 0-mean, 1std? - (prefers_data_normalized : {True, False} - * Can the algorithm handle multiclass-classification problems? - (handles_multiclass : {True, False}) - * Can the algorithm handle multilabel-classification problems? - (handles_multilabel : {True, False} - * Is the algorithm deterministic for a given seed? - (is_deterministic : {True, False) - * Can the algorithm handle sparse data? - (handles_sparse : {True, False} - * What are the preferred types of the data array? - (preferred_dtype : list of tuples) - - Returns - ------- - dict - """ - raise NotImplementedError() - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - """Return the configuration space of this classification algorithm. - - Returns - ------- - HPOlibConfigspace.configuration_space.ConfigurationSpace - The configuration space of this classification algorithm. - """ - raise NotImplementedError() - - def fit(self, X, y): - """The fit function calls the fit function of the underlying - scikit-learn model and returns `self`. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - Training data - - y : array-like, shape = [n_samples] - - Returns - ------- - self : returns an instance of self. - Targets - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def predict(self, X): - """The predict function calls the predict function of the - underlying scikit-learn model and returns an array with the predictions. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape = (n_samples,) - Returns the predicted values - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def predict_proba(self, X): - """Predict probabilities. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - """ - raise NotImplementedError() - - def get_estimator(self): - """Return the underlying estimator object. - - Returns - ------- - estimator : the underlying estimator object - """ - return self.estimator - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/preprocessing/__init__.py b/ParamSklearn/components/preprocessing/__init__.py index 7617039858..2d22d37e20 100644 --- a/ParamSklearn/components/preprocessing/__init__.py +++ b/ParamSklearn/components/preprocessing/__init__.py @@ -1,11 +1,17 @@ __author__ = 'feurerm' +from collections import OrderedDict +import copy import inspect import os import pkgutil import sys -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ..base import ParamSklearnPreprocessingAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction + preprocessors_directory = os.path.split(__file__)[0] _preprocessors = {} @@ -23,3 +29,126 @@ # but not the real target classifier preprocessor = obj _preprocessors[module_name] = preprocessor + + +class PreprocessorChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _preprocessors + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + available_comp = cls.get_components() + + components_dict = OrderedDict() + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Exclude itself to avoid infinite loop + if entry == PreprocessorChoice or hasattr(entry, 'get_components'): + continue + + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and entry.get_properties()[ + 'handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and available_comp[name]. \ + get_properties()['handles_multilabel'] is False: + continue + components_dict[name] = entry + + always_active = ["imputation", "rescaling", "balancing"] + components_dict = {key: value for key, value + in components_dict.items() + if key not in always_active} + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + cs = ConfigurationSpace() + + # Compile a list of legal preprocessors for this problem + available_preprocessors = cls.get_available_components( + data_prop=dataset_properties, + include=include, exclude=exclude) + + if len(available_preprocessors) == 0: + raise ValueError( + "No preprocessors found, please add NoPreprocessing") + + if default is None: + defaults = ['no_preprocessing', 'select_percentile', 'pca', + 'truncatedSVD'] + for default_ in defaults: + if default_ in available_preprocessors: + default = default_ + break + + preprocessor = CategoricalHyperparameter('__choice__', + available_preprocessors.keys(), + default=default) + cs.add_hyperparameter(preprocessor) + for name in available_preprocessors: + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, preprocessor, + name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + if not isinstance(condition, AbstractConjunction): + dlcs = [condition] + else: + dlcs = condition.get_descendent_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_preprocessors['preprocessor'] = PreprocessorChoice \ No newline at end of file diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py index ba25498996..f9434e7a5b 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -3,7 +3,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE, INPUT @@ -13,10 +13,10 @@ def __init__(self, strategy, random_state=None): self.strategy = strategy def fit(self, X, y=None): - raise NotImplementedError() + return self def transform(self, X): - raise NotImplementedError() + return X def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): if init_params is None: @@ -49,18 +49,18 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): sample_weights[mask] *= cw[i] if classifier in clf_: - fit_params['%s:sample_weight' % classifier] = sample_weights + fit_params['classifier:sample_weight'] = sample_weights if preprocessor in pre_: - fit_params['%s:sample_weight' % preprocessor] = sample_weights + fit_params['preprocessor:sample_weight'] = sample_weights # Classifiers which can adjust sample weights themselves via the # argument `class_weight` clf_ = ['liblinear_svc', 'libsvm_svc', 'sgd'] pre_ = ['liblinear_svc_preprocessor'] if classifier in clf_: - init_params['%s:class_weight' % classifier] = 'auto' + init_params['classifier:class_weight'] = 'auto' if preprocessor in pre_: - init_params['%s:class_weight' % preprocessor] = 'auto' + init_params['preprocessor:class_weight'] = 'auto' clf_ = ['ridge'] if classifier in clf_: @@ -74,7 +74,7 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): class_weights[ue] = cw[i] if classifier in clf_: - init_params['%s:class_weight' % classifier] = class_weights + init_params['classifier:class_weight'] = class_weights return init_params, fit_params diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py index 10d9c45cca..d7b9ba5d41 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -2,7 +2,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE diff --git a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py index 8a8ed0f61c..db9729a97a 100644 --- a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py @@ -6,7 +6,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py index b8cf56810d..fe9b317bb9 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -8,7 +8,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ ForbiddenAndConjunction, ForbiddenEqualsClause -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/preprocessing/feature_agglomeration.py index 1bcd2177b4..7a88852fd1 100644 --- a/ParamSklearn/components/preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/preprocessing/feature_agglomeration.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ ForbiddenAndConjunction, ForbiddenEqualsClause -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py index 8c7deac191..6b405f8ff7 100644 --- a/ParamSklearn/components/preprocessing/gem.py +++ b/ParamSklearn/components/preprocessing/gem.py @@ -1,7 +1,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.implementations.gem import GEM as GEMImpl from ParamSklearn.util import DENSE diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index 81f3a8274c..c9fa66fec9 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE, INPUT diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index d46bf39508..9fe0506f1f 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -7,7 +7,7 @@ UniformIntegerHyperparameter, UniformFloatHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, InCondition -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index 5f72d5cf0b..3a6f854a80 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index fc087fd619..d1a0512acb 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/no_preprocessing.py b/ParamSklearn/components/preprocessing/no_preprocessing.py index 03579743cf..466f6dc2e7 100644 --- a/ParamSklearn/components/preprocessing/no_preprocessing.py +++ b/ParamSklearn/components/preprocessing/no_preprocessing.py @@ -1,6 +1,6 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/nystroem_sampler.py b/ParamSklearn/components/preprocessing/nystroem_sampler.py index cd30b7d595..2d4099f1cf 100644 --- a/ParamSklearn/components/preprocessing/nystroem_sampler.py +++ b/ParamSklearn/components/preprocessing/nystroem_sampler.py @@ -6,7 +6,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter from HPOlibConfigSpace.conditions import InCondition, EqualsCondition, AndConjunction -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE, INPUT diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 7455750a65..a52d2a8bdb 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -5,8 +5,8 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.util import DENSE, INPUT class PCA(ParamSklearnPreprocessingAlgorithm): @@ -34,6 +34,8 @@ def fit(self, X, Y=None): components = self.preprocessor.components_ self.preprocessor.components_ = components[:idx] + self.preprocessor.explained_variance_ = \ + self.preprocessor.explained_variance_[:idx] if not np.isfinite(self.preprocessor.components_).all(): raise ValueError("PCA found non-finite components.") @@ -65,7 +67,7 @@ def get_properties(): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, ), - 'output': DENSE, + 'output': INPUT, # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py index 77cb819e2d..e60e6ba8f9 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -7,7 +7,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.implementations.util import softmax from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py index 4640680ec9..71f93bc51f 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ UnParametrizedHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index b8210398be..9a93214733 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -1,11 +1,10 @@ - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.implementations.StandardScaler import StandardScaler from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler from ParamSklearn.implementations.Normalizer import Normalizer -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE, INPUT diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index 33718172cc..e82c82403c 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -3,7 +3,7 @@ import sklearn.feature_selection -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.util import DENSE, SPARSE, INPUT diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 808eb777c7..2d18d07828 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -3,7 +3,7 @@ import sklearn.feature_selection -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.util import DENSE diff --git a/ParamSklearn/components/preprocessing/select_rates.py b/ParamSklearn/components/preprocessing/select_rates.py index 582a8d038f..dcb9e1cdde 100644 --- a/ParamSklearn/components/preprocessing/select_rates.py +++ b/ParamSklearn/components/preprocessing/select_rates.py @@ -4,7 +4,7 @@ import sklearn.feature_selection -from ParamSklearn.components.preprocessor_base import \ +from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import DENSE, SPARSE, INPUT diff --git a/ParamSklearn/components/preprocessing/tfidf.py b/ParamSklearn/components/preprocessing/tfidf.py index ea765186fd..3a4ffe4105 100644 --- a/ParamSklearn/components/preprocessing/tfidf.py +++ b/ParamSklearn/components/preprocessing/tfidf.py @@ -2,7 +2,7 @@ Configuration -from ..preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ..base import ParamSklearnPreprocessingAlgorithm import numpy as np diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index 8c22f1e81c..f6c876fbc9 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE diff --git a/ParamSklearn/components/preprocessor_base.py b/ParamSklearn/components/preprocessor_base.py deleted file mode 100644 index cb6fe6727f..0000000000 --- a/ParamSklearn/components/preprocessor_base.py +++ /dev/null @@ -1,111 +0,0 @@ -class ParamSklearnPreprocessingAlgorithm(object): - """Provide an abstract interface for preprocessing algorithms in - ParamSklearn. - - Make a subclass of this and put it into the directory - `ParamSklearn/components/preprocessing` to make it available.""" - def __init__(self): - self.preprocessor = None - - @staticmethod - def get_properties(): - """Get the properties of the underlying algorithm. These are: - - * Short name - * Full name - * Can the algorithm handle missing values? - (handles_missing_values : {True, False}) - * Can the algorithm handle nominal features? - (handles_nominal_features : {True, False}) - * Can the algorithm handle numerical features? - (handles_numerical_features : {True, False}) - * Does the algorithm prefer data scaled in [0,1]? - (prefers_data_scaled : {True, False} - * Does the algorithm prefer data normalized to 0-mean, 1std? - (prefers_data_normalized : {True, False} - * Can preprocess regression data? - (handles_regression : {True, False} - * Can preprocess classification data? - (handles_classification : {True, False} - * Can the algorithm handle multiclass-classification problems? - (handles_multiclass : {True, False}) - * Can the algorithm handle multilabel-classification problems? - (handles_multilabel : {True, False} - * Is the algorithm deterministic for a given seed? - (is_deterministic : {True, False) - * Can the algorithm handle sparse data? - (handles_sparse : {True, False} - * What are the preferred types of the data array? - (preferred_dtype : list of tuples) - - Returns - ------- - dict - """ - raise NotImplementedError() - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - """Return the configuration space of this preprocessing algorithm. - - Returns - ------- - HPOlibConfigspace.configuration_space.ConfigurationSpace - The configuration space of this preprocessing algorithm. - """ - raise NotImplementedError() - - def fit(self, X, Y): - """The fit function calls the fit function of the underlying - scikit-learn preprocessing algorithm and returns `self`. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - Training data - - y : array-like, shape = [n_samples] - - Returns - ------- - self : returns an instance of self. - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def transform(self, X): - """The transform function calls the transform function of the - underlying scikit-learn model and returns the transformed array. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - X : array - Return the transformed training data - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def get_preprocessor(self): - """Return the underlying preprocessor object. - - Returns - ------- - preprocessor : the underlying preprocessor object - """ - return self.preprocessor - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %" % name diff --git a/ParamSklearn/components/regression/__init__.py b/ParamSklearn/components/regression/__init__.py index 8a843f0232..051dd0de39 100644 --- a/ParamSklearn/components/regression/__init__.py +++ b/ParamSklearn/components/regression/__init__.py @@ -1,11 +1,16 @@ -__author__ = 'eggenspk' +__author__ = ['Katharina Eggensperger', 'Matthias Feurer'] +from collections import OrderedDict +import copy import inspect import os import pkgutil import sys -from ..regression_base import ParamSklearnRegressionAlgorithm +from ..base import ParamSklearnRegressionAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition regressor_directory = os.path.split(__file__)[0] _regressors = {} @@ -23,3 +28,129 @@ # but not the real target classifier classifier = obj _regressors[module_name] = classifier + + +class RegressorChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _regressors + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + available_comp = cls.get_components() + components_dict = OrderedDict() + + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Avoid infinite loop + if entry == RegressorChoice: + continue + + if entry.get_properties()['handles_regression'] is False: + continue + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError("The argument include and exclude cannot be used together.") + + cs = ConfigurationSpace() + + # Compile a list of all estimator objects for this problem + available_estimators = cls.get_available_components( + data_prop=dataset_properties, + include=include, + exclude=exclude) + + if len(available_estimators) == 0: + raise ValueError("No regressors found") + + if default is None: + defaults = ['random_forest', 'support_vector_regression'] + available_estimators.keys() + for default_ in defaults: + if default_ in available_estimators: + if include is not None and default_ not in include: + continue + if exclude is not None and default_ in exclude: + continue + default = default_ + break + + estimator = CategoricalHyperparameter('__choice__', + available_estimators.keys(), + default=default) + cs.add_hyperparameter(estimator) + for estimator_name in available_estimators.keys(): + + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + + estimator_configuration_space = available_estimators[ + estimator_name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in estimator_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % ( + estimator_name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(estimator_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, estimator, + estimator_name) + cs.add_condition(condition) + + for condition in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + dlcs = condition.get_descendant_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(estimator_name): + dlc.child.name = "%s:%s" % ( + estimator_name, dlc.child.name) + if not dlc.parent.name.startswith(estimator_name): + dlc.parent.name = "%s:%s" % ( + estimator_name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(estimator_name): + dlc.hyperparameter.name = "%s:%s" % (estimator_name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_regressors['regressor'] = RegressorChoice \ No newline at end of file diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index 55a0f57ccb..82e48010d9 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index 15ad04f1bc..41cee7ff5d 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, Constant -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index 40a47a960b..f9328c0216 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -5,8 +5,8 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE # get our own forests to replace the sklearn ones #from ParamSklearn.implementations import forest from sklearn.ensemble import RandomForestRegressor @@ -107,8 +107,8 @@ def get_properties(): 'handles_multilabel': False, 'prefers_data_normalized': False, 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), + 'handles_sparse': True, + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index 76d0dd40fb..302848821e 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS diff --git a/ParamSklearn/components/regression/support_vector_regression.py b/ParamSklearn/components/regression/support_vector_regression.py index e187b21646..69b910defe 100644 --- a/ParamSklearn/components/regression/support_vector_regression.py +++ b/ParamSklearn/components/regression/support_vector_regression.py @@ -9,7 +9,7 @@ UnParametrizedHyperparameter -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS # Something is wrong here... diff --git a/ParamSklearn/components/regression_base.py b/ParamSklearn/components/regression_base.py deleted file mode 100644 index f1cfee4486..0000000000 --- a/ParamSklearn/components/regression_base.py +++ /dev/null @@ -1,118 +0,0 @@ -class ParamSklearnRegressionAlgorithm(object): - """Provide an abstract interface for regression algorithms in - ParamSklearn. - - Make a subclass of this and put it into the directory - `ParamSklearn/components/regression` to make it available.""" - def __init__(self): - self.estimator = None - self.properties = None - - @staticmethod - def get_properties(): - """Get the properties of the underlying algorithm. These are: - - * Short name - * Full name - * Can the algorithm handle missing values? - (handles_missing_values : {True, False}) - * Can the algorithm handle nominal features? - (handles_nominal_features : {True, False}) - * Can the algorithm handle numerical features? - (handles_numerical_features : {True, False}) - * Does the algorithm prefer data scaled in [0,1]? - (prefers_data_scaled : {True, False} - * Does the algorithm prefer data normalized to 0-mean, 1std? - (prefers_data_normalized : {True, False} - * Is the algorithm deterministic for a given seed? - (is_deterministic : {True, False) - * Can the algorithm handle sparse data? - (handles_sparse : {True, False} - * What are the preferred types of the data array? - (preferred_dtype : list of tuples) - - Returns - ------- - dict - """ - raise NotImplementedError() - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - """Return the configuration space of this regression algorithm. - - Returns - ------- - HPOlibConfigspace.configuration_space.ConfigurationSpace - The configuration space of this regression algorithm. - """ - raise NotImplementedError() - - def fit(self, X, y): - """The fit function calls the fit function of the underlying - scikit-learn model and returns `self`. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - Training data - - y : array-like, shape = [n_samples] - - Returns - ------- - self : returns an instance of self. - Targets - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def predict(self, X): - """The predict function calls the predict function of the - underlying scikit-learn model and returns an array with the predictions. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape = (n_samples,) - Returns the predicted values - - Notes - ----- - Please see the `scikit-learn API documentation - `_ for further information.""" - raise NotImplementedError() - - def predict_proba(self, X): - """Predict probabilities. - - Parameters - ---------- - X : array-like, shape = (n_samples, n_features) - - Returns - ------- - array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) - """ - raise NotImplementedError() - - def get_estimator(self): - """Return the underlying estimator object. - - Returns - ------- - estimator : the underlying estimator object - """ - return self.estimator - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %" % name diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index b6638f824a..3c757017d0 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -1,4 +1,3 @@ - import numpy as np from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction @@ -7,94 +6,132 @@ from ParamSklearn.util import SPARSE, DENSE, INPUT -def get_match_array(preprocessors, estimators, sparse, pipeline): - # Now select combinations that work - # We build a binary matrix, where a 1 indicates, that a combination - # work on this dataset based in the dataset and the input/output formats +def get_match_array(node_0, node_1, dataset_properties, + node_0_include=None, node_0_exclude=None, + node_1_include=None, node_1_exclude=None): + # Select combinations of nodes that work + # Three cases possible: + # * node_0 and node_1 are both nodes: + # Check if they fit together, return a (1, 1) array + # * node_0 is a node, node_1 is a composite of nodes (or vice versa) + # Check if they fit together, return a (1, n) array + # * node_0 and node_1 are both composites of nodes + # Check if they fit together, return a (n, m) array + # + # We build a binary array, where a 1 indicates, that a combination + # works on this dataset based on the dataset and the input/output formats + # # A 'zero'-row (column) is an unusable preprocessor (classifier) # A single zero results in an forbidden condition - preprocessors_list = preprocessors.keys() - estimator_list = estimators.keys() - matches = np.zeros([len(preprocessors), len(estimators)]) - for pidx, p in enumerate(preprocessors_list): - p_in = preprocessors[p].get_properties()['input'] - p_out = preprocessors[p].get_properties()['output'] - if p in pipeline: + + # Duck typing, not sure if it's good... + sparse = dataset_properties.get('sparse') + + node_0_is_choice = hasattr(node_0, "get_available_components") + node_1_is_choice = hasattr(node_1, "get_available_components") + + if node_0_is_choice: + node_0_choices = node_0.get_available_components( + dataset_properties, include=node_0_include, exclude=node_0_exclude).values() + else: + node_0_choices = [node_0] + if node_1_is_choice: + node_1_choices = node_1.get_available_components( + dataset_properties, include=node_1_include, exclude=node_1_exclude).values() + else: + node_1_choices = [node_1] + + matches = np.zeros([len(node_0_choices), len(node_1_choices)]) + + for n0_idx, n0 in enumerate(node_0_choices): + if node_0_is_choice and node_0 == n0: continue - elif sparse and SPARSE not in p_in: + + node0_in = node_0_choices[n0_idx].get_properties()['input'] + node0_out = node_0_choices[n0_idx].get_properties()['output'] + + if sparse and SPARSE not in node0_in: continue - elif not sparse and DENSE not in p_in: + elif not sparse and DENSE not in node0_in: continue - for cidx, c in enumerate(estimator_list): - c_in = estimators[c].get_properties()['input'] - if p_out == INPUT: + + for n1_idx, n1 in enumerate(node_1_choices): + if node_1_is_choice and node_1 == n1: + continue + + node1_in = n1.get_properties()['input'] + if node0_out == INPUT: # Preprocessor does not change the format - if (sparse and SPARSE in c_in) or \ - (not sparse and DENSE in c_in): + if (sparse and SPARSE in node1_in) or \ + (not sparse and DENSE in node1_in): # Estimator input = Dataset format - matches[pidx, cidx] = 1 - continue + matches[n0_idx, n1_idx] = 1 else: # These won't work - continue - elif p_out == DENSE and DENSE in c_in: - matches[pidx, cidx] = 1 - continue - elif p_out == SPARSE and SPARSE in c_in: - matches[pidx, cidx] = 1 - continue + pass + elif node0_out == DENSE and DENSE in node1_in: + matches[n0_idx, n1_idx] = 1 + elif node0_out == SPARSE and SPARSE in node1_in: + matches[n0_idx, n1_idx] = 1 else: # These won't work - continue + pass return matches -def _get_idx_to_keep(m): +def _get_idx_to_keep(matches): # Returns all rows and cols where matches contains not only zeros - keep_row = [idx for idx in range(m.shape[0]) if np.sum(m[idx, :]) != 0] - keep_col = [idx for idx in range(m.shape[1]) if np.sum(m[:, idx]) != 0] + keep_row = [idx for idx in range(matches.shape[0]) if np.sum(matches[idx, :]) != 0] + keep_col = [idx for idx in range(matches.shape[1]) if np.sum(matches[:, idx]) != 0] return keep_col, keep_row -def sanitize_arrays(m, preprocessors_list, estimators_list, - preprocessors, estimators): - assert len(preprocessors_list) == len(preprocessors.keys()) - assert len(estimators_list) == len(estimators.keys()) - assert isinstance(m, np.ndarray) - # remove components that are not usable for this problem - keep_col, keep_row = _get_idx_to_keep(m) +def sanitize_arrays(matches, node_0, node_1, dataset_properties, + node_0_include=None, node_0_exclude=None, + node_1_include=None, node_1_exclude=None): + node_0_is_choice = hasattr(node_0, "get_available_components") + node_1_is_choice = hasattr(node_1, "get_available_components") + + if not node_0_is_choice: + node_0 = [node_0] + else: + node_0 = node_0.get_available_components(dataset_properties, + include=node_0_include, + exclude=node_0_exclude).keys() + if not node_1_is_choice: + node_1 = [node_1] + else: + node_1 = node_1.get_available_components(dataset_properties, + include=node_1_include, + exclude=node_1_exclude).keys() - m = m[keep_row, :] - m = m[:, keep_col] - preproc_list = [preprocessors_list[p] for p in keep_row] - est_list = [estimators_list[p] for p in keep_col] + assert matches.shape[0] == len(node_0), (matches.shape[0], len(node_0)) + assert matches.shape[1] == len(node_1), (matches.shape[1], len(node_1)) + assert isinstance(matches, np.ndarray) + # remove components that are not usable for this problem + keep_col, keep_row = _get_idx_to_keep(matches) - new_est = dict() - for c in est_list: - new_est[c] = estimators[c] - new_preproc = dict() - for p in preproc_list: - new_preproc[p] = preprocessors[p] + matches = matches[keep_row, :] + matches = matches[:, keep_col] - assert len(new_preproc) == m.shape[0] - assert len(new_est) == m.shape[1] - return m, preproc_list, est_list, new_preproc, new_est + node_0_list = [node_0[p] for p in keep_row] + node_1_list = [node_1[p] for p in keep_col] + assert len(node_0_list) == matches.shape[0] + assert len(node_1_list) == matches.shape[1] + return matches, node_0_list, node_1_list -def add_forbidden(conf_space, preproc_list, est_list, matches, est_type='classifier'): - assert est_type in ('classifier', 'regressor'), "'task_type is %s" % est_type - for pdx, p in enumerate(preproc_list): +def add_forbidden(conf_space, node_0_list, node_1_list, matches, + node_0_name, node_1_name): + for pdx, p in enumerate(node_0_list): if np.sum(matches[pdx, :]) == matches.shape[1]: continue - for cdx, c in enumerate(est_list): + for cdx, c in enumerate(node_1_list): if matches[pdx, cdx] == 0: - try: - conf_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(conf_space.get_hyperparameter( - est_type), c), - ForbiddenEqualsClause(conf_space.get_hyperparameter( - "preprocessor"), p))) - except: - pass + conf_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(conf_space.get_hyperparameter( + node_1_name), c), + ForbiddenEqualsClause(conf_space.get_hyperparameter( + node_0_name), p))) return conf_space diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index bbf12ff870..f0a7b96404 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -9,6 +9,7 @@ import numpy as np from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction +from HPOlibConfigSpace.configuration_space import ConfigurationSpace from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator @@ -62,28 +63,11 @@ class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): """ - def fit(self, X, Y, fit_params=None, init_params=None): - super(ParamSklearnRegressor, self).fit(X, Y, fit_params=fit_params, - init_params=init_params) + def pre_transform(self, X, Y, fit_params=None, init_params=None): + X, fit_params = super(ParamSklearnRegressor, self).pre_transform( + X, Y, fit_params=fit_params, init_params=init_params) self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] - return self - - - def _validate_input_X(self, X): - # TODO: think of all possible states which can occur and how to - # handle them - pass - - def _validate_input_Y(self, Y): - pass - - def add_model_class(self, model): - """ - Raises - ------ - NotImplementedError - """ - raise NotImplementedError() + return X, fit_params @classmethod def get_available_components(cls, available_comp, data_prop, inc, exc): @@ -100,12 +84,8 @@ def get_available_components(cls, available_comp, data_prop, inc, exc): components_dict[name] = entry return components_dict - @classmethod - def get_hyperparameter_search_space(cls, include_estimators=None, - exclude_estimators=None, - include_preprocessors=None, - exclude_preprocessors=None, + def get_hyperparameter_search_space(cls, include=None, exclude=None, dataset_properties=None): """Return the configuration space for the CASH problem. @@ -142,13 +122,7 @@ def get_hyperparameter_search_space(cls, include_estimators=None, cs : HPOlibConfigSpace.configuration_space.Configuration The configuration space describing the ParamSklearnClassifier. """ - if include_estimators is not None and exclude_estimators is not None: - raise ValueError("The arguments include_estimators and " - "exclude_regressors cannot be used together.") - - if include_preprocessors is not None and exclude_preprocessors is not None: - raise ValueError("The arguments include_preprocessors and " - "exclude_preprocessors cannot be used together.") + cs = ConfigurationSpace() if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() @@ -157,121 +131,30 @@ def get_hyperparameter_search_space(cls, include_estimators=None, # This dataset is probaby dense dataset_properties['sparse'] = False - available_preprocessors = components.preprocessing_components._preprocessors - preprocessors = ParamSklearnRegressor.get_available_components( - available_comp=available_preprocessors, - data_prop=dataset_properties, inc=include_preprocessors, - exc=exclude_preprocessors) - - # Compile a list of all estimator objects for this problem - available_regressors = ParamSklearnRegressor._get_estimator_components() - regressors = ParamSklearnRegressor.get_available_components( - available_comp=available_regressors, data_prop=dataset_properties, - inc=include_estimators, exc=exclude_estimators) - - if len(regressors) == 0: - raise ValueError("No regressors found") - if len(preprocessors) == 0: - raise ValueError("No preprocessors found, please add NoPreprocessing") - - preprocessors_list = preprocessors.keys() - regressors_list = regressors.keys() - matches = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=regressors, - sparse=dataset_properties.get('sparse'), pipeline=cls._get_pipeline()) - - # Now we have only legal preprocessors/classifiers we combine them - # Simple sanity checks - assert np.sum(matches) != 0, "No valid preprocessor/regressor " \ - "combination found, probably a bug" - assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ - "'matches' is not binary; %s <= %d, [%d*%d]" % \ - (str(np.sum(matches)), matches.shape[0]*matches.shape[1], - matches.shape[0], matches.shape[1]) - - if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, preprocessors_list, regressors_list, preprocessors, regressors = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - m=matches, preprocessors_list=preprocessors_list, - estimators_list=regressors_list, - preprocessors=preprocessors, estimators=regressors) - - # Sanity checks - assert len(preprocessors_list) > 0, "No valid preprocessors found" - assert len(regressors_list) > 0, "No valid classifiers found" - - assert len(preprocessors_list) == matches.shape[0], \ - "Preprocessor deleting went wrong" - assert len(regressors_list) == matches.shape[1], \ - "Classifier deleting went wrong" - assert [r in regressors_list for r in regressors] - assert [p in preprocessors_list for p in preprocessors] - - # Select the default preprocessor before the always active - # preprocessors are added, so they will not be selected as default - # preprocessors - if "no_preprocessing" in preprocessors: - preprocessor_default = "no_preprocessing" - else: - preprocessor_default = sorted(preprocessors.keys())[0] - - # Now add always present preprocessors - for name in available_preprocessors: - if name in cls._get_pipeline(): - preprocessors[name] = available_preprocessors[name] - - # Hardcode the defaults based on some educated guesses - regressor_defaults = ['random_forest', 'liblinear', 'sgd', - 'libsvm_svc'] - regressor_default = None - for rd_ in regressor_defaults: - if rd_ not in regressors: - continue - no_preprocessing_idx = preprocessors_list.index(preprocessor_default) - rd_index = regressors_list.index(rd_) - if matches[no_preprocessing_idx, rd_index] == 1: - regressor_default = rd_ - break - if regressor_default is None: - regressor_default = regressors.keys()[0] - - # Get the configuration space - configuration_space = super(ParamSklearnRegressor, cls).\ - get_hyperparameter_search_space(estimator_name=cls._get_estimator_hyperparameter_name(), - default_estimator=regressor_default, - estimator_components=regressors, - default_preprocessor=preprocessor_default, - preprocessor_components=preprocessors, - dataset_properties=dataset_properties, - always_active=cls._get_pipeline()) - - # And now add forbidden parameter configurations - # According to matches - configuration_space = ParamSklearn.create_searchspace_util.add_forbidden( - conf_space=configuration_space, preproc_list=preprocessors_list, - est_list=regressors_list, matches=matches, est_type="regressor") + pipeline = cls._get_pipeline() + cs = cls._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + regressors = cs.get_hyperparameter('regressor:__choice__').choices + preprocessors = cs.get_hyperparameter('preprocessor:__choice__').choices + available_regressors = pipeline[-1][1].get_available_components( + dataset_properties) + available_preprocessors = pipeline[-2][1].get_available_components( + dataset_properties) # A regressor which can handle sparse data after the densifier for key in regressors: - if SPARSE in regressors[key].get_properties()['input']: - try: - configuration_space.add_forbidden_clause( + if SPARSE in available_regressors[key].get_properties()['input']: + if 'densifier' in preprocessors: + cs.add_forbidden_clause( ForbiddenAndConjunction( ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - 'regressor'), key), + cs.get_hyperparameter( + 'regressor:__choice__'), key), ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - 'preprocessor'), 'densifier') + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') )) - except ValueError as e: - if e.message.startswith("Forbidden clause must be " - "instantiated with a legal " - "hyperparameter value for " - "'preprocessor"): - pass - else: - raise e # which would take too long # Combinations of tree-based models with feature learning: @@ -279,55 +162,44 @@ def get_hyperparameter_search_space(cls, include_estimators=None, feature_learning_ = ["kitchen_sinks", "sparse_filtering"] for r, f in product(regressors_, feature_learning_): - if r not in regressors_list: + if r not in regressors: continue - if f not in preprocessors_list: + if f not in preprocessors: continue try: - configuration_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "regressor"), r), - ForbiddenEqualsClause(configuration_space.get_hyperparameter( - "preprocessor"), f))) + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "regressor:__choice__"), r), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) except KeyError: pass - # We have seen empirically that tree-based models together with PCA - # don't work better than tree-based models without preprocessing - regressors_ = ["random_forest", "gradient_boosting"] - for r in regressors_: - if r not in regressors_list: - continue - try: - configuration_space.add_forbidden_clause( - ForbiddenAndConjunction( - ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - "preprocessor"), "pca"), - ForbiddenEqualsClause( - configuration_space.get_hyperparameter( - "classifier"), r))) - except KeyError: - pass - except ValueError as e: - if e.message.startswith("Forbidden clause must be " - "instantiated with a legal " - "hyperparameter value for " - "'preprocessor"): - pass - else: - raise e - - return configuration_space + return cs @staticmethod def _get_estimator_components(): return components.regression_components._regressors - @staticmethod - def _get_estimator_hyperparameter_name(): - return "regressor" - @staticmethod def _get_pipeline(): - return ["imputation", "rescaling", "__preprocessor__", "__estimator__"] + steps = [] + + # Add the always active preprocessing components + steps.extend( + [["imputation", + components.preprocessing._preprocessors['imputation']], + ["rescaling", + components.preprocessing._preprocessors['rescaling']]]) + + # Add the preprocessing component + steps.append(['preprocessor', + components.preprocessing._preprocessors['preprocessor']]) + + # Add the classification component + steps.append(['regressor', + components.regression_components._regressors['regressor']]) + return steps + + def _get_estimator_hyperparameter_name(self): + return "regressor" diff --git a/source/first_steps.rst b/source/first_steps.rst index 6d82ad86a5..3520d15200 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -17,7 +17,7 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> configuration_space.seed(2) + >>> configuration_space.seed(1) >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index 3a623e6d4f..15c912c0a2 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -29,11 +29,11 @@ def test_balancing_get_weights_treed_single_label(self): balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( Y, 'random_forest', None, None, None) - self.assertTrue(np.allclose(fit_params['random_forest:sample_weight'], + self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 80 + [1.6] * 20))) init_params, fit_params = balancing.get_weights( Y, None, 'extra_trees_preproc_for_classification', None, None) - self.assertTrue(np.allclose(fit_params['extra_trees_preproc_for_classification:sample_weight'], + self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], np.array([0.4] * 80 + [1.6] * 20))) def test_balancing_get_weights_treed_multilabel(self): @@ -42,11 +42,11 @@ def test_balancing_get_weights_treed_multilabel(self): balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( Y, 'random_forest', None, None, None) - self.assertTrue(np.allclose(fit_params['random_forest:sample_weight'], + self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 500 + [4.0] * 10))) init_params, fit_params = balancing.get_weights( Y, None, 'extra_trees_preproc_for_classification', None, None) - self.assertTrue(np.allclose(fit_params['extra_trees_preproc_for_classification:sample_weight'], + self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], np.array([0.4] * 500 + [4.0] * 10))) def test_balancing_get_weights_svm_sgd(self): @@ -54,11 +54,11 @@ def test_balancing_get_weights_svm_sgd(self): balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( Y, 'libsvm_svc', None, None, None) - self.assertEqual(("libsvm_svc:class_weight", "auto"), + self.assertEqual(("classifier:class_weight", "auto"), init_params.items()[0]) init_params, fit_params = balancing.get_weights( Y, None, 'liblinear_svc_preprocessor', None, None) - self.assertEqual(("liblinear_svc_preprocessor:class_weight", "auto"), + self.assertEqual(("preprocessor:class_weight", "auto"), init_params.items()[0]) def test_balancing_get_weights_ridge(self): @@ -66,8 +66,8 @@ def test_balancing_get_weights_ridge(self): balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( Y, 'ridge', None, None, None) - self.assertAlmostEqual(0.4, init_params['ridge:class_weight'][0]) - self.assertAlmostEqual(1.6, init_params['ridge:class_weight'][1]) + self.assertAlmostEqual(0.4, init_params['classifier:class_weight'][0]) + self.assertAlmostEqual(1.6, init_params['classifier:class_weight'][1]) def test_weighting_effect(self): for name, clf, acc_no_weighting, acc_weighting in \ @@ -85,7 +85,7 @@ def test_weighting_effect(self): # Fit X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_estimators=[name]) + include={'classifier': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) @@ -98,7 +98,7 @@ def test_weighting_effect(self): # pre_transform and fit_estimator X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_estimators=[name]) + include={'classifier': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) @@ -120,7 +120,7 @@ def test_weighting_effect(self): X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_estimators=['sgd'], include_preprocessors=[name]) + include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) @@ -134,7 +134,7 @@ def test_weighting_effect(self): # pre_transform and fit_estimator X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_estimators=['sgd'], include_preprocessors=[name]) + include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) diff --git a/tests/test_classification.py b/tests/test_classification.py index 8d49548f9d..fc17c74fba 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -18,8 +18,8 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.components.classification_base import ParamSklearnClassificationAlgorithm -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.classification as classification_components import ParamSklearn.components.preprocessing as preprocessing_components from ParamSklearn.util import get_dataset, DENSE, SPARSE, PREDICTIONS @@ -29,6 +29,8 @@ class TestParamSklearnClassifier(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: + if classifiers[c] == classification_components.ClassifierChoice: + continue props = classifiers[c].get_properties() self.assertIn('input', props) self.assertIn('output', props) @@ -48,8 +50,10 @@ def test_io_dict(self): def test_find_classifiers(self): classifiers = classification_components._classifiers - self.assertGreaterEqual(len(classifiers), 1) + self.assertGreaterEqual(len(classifiers), 2) for key in classifiers: + if hasattr(classifiers[key], 'get_components'): + continue self.assertIn(ParamSklearnClassificationAlgorithm, classifiers[key].__bases__) @@ -57,6 +61,8 @@ def test_find_preprocessors(self): preprocessors = preprocessing_components._preprocessors self.assertGreaterEqual(len(preprocessors), 1) for key in preprocessors: + if hasattr(preprocessors[key], 'get_components'): + continue self.assertIn(ParamSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) @@ -64,11 +70,12 @@ def test_default_configuration(self): for i in range(2): cs = ParamSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() + print cs X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') auto = ParamSklearnClassifier(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) - self.assertAlmostEqual(0.95999999999999996, + self.assertAlmostEqual(0.9599999999999995, sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) @@ -93,12 +100,14 @@ def test_configurations(self): continue else: print config + print traceback.format_exc() raise e except LinAlgError as e: if "not positive definite, even with jitter" in e.message: continue else: print config + print traceback.format_exc() raise e except AttributeError as e: # Some error in QDA @@ -106,6 +115,7 @@ def test_configurations(self): continue else: print config + print traceback.format_exc() raise e except RuntimeWarning as e: if "invalid value encountered in sqrt" in e.message: @@ -114,12 +124,14 @@ def test_configurations(self): continue else: print config + print traceback.format_exc() raise e except UserWarning as e: if "FastICA did not converge" in e.message: continue else: print config + print traceback.format_exc() raise e except MemoryError as e: continue @@ -127,15 +139,17 @@ def test_configurations(self): def test_configurations_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) + print cs for i in range(10): config = cs.sample_configuration() + print config X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) cls = ParamSklearnClassifier(config, random_state=1) try: cls.fit(X_train, Y_train) predictions = cls.predict(X_test) - except ValueError as e: + except ValueError as e: if "Floating-point under-/overflow occurred at epoch" in \ e.message or \ "removed all features" in e.message: @@ -184,68 +198,84 @@ def test_get_hyperparameter_search_space(self): def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_estimators=['libsvm_svc']) - self.assertEqual(cs.get_hyperparameter('classifier'), - CategoricalHyperparameter('classifier', ['libsvm_svc'])) + include={'classifier': ['libsvm_svc']}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__'), + CategoricalHyperparameter('classifier:__choice__', ['libsvm_svc'])) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - exclude_estimators=['libsvm_svc']) + exclude={'classifier': ['libsvm_svc']}) self.assertNotIn('libsvm_svc', str(cs)) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_preprocessors=['select_percentile_classification']) - self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', + include={'preprocessor': ['select_percentile_classification']}) + self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), + CategoricalHyperparameter('preprocessor:__choice__', ['select_percentile_classification'])) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - exclude_preprocessors=['select_percentile_classification']) + exclude={'preprocessor': ['select_percentile_classification']}) self.assertNotIn('select_percentile_classification', str(cs)) def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include_preprocessors=['nystroem_sampler']) - self.assertEqual(cs.get_hyperparameter('preprocessor').choices, - ['nystroem_sampler']) + self.assertRaisesRegexp(ValueError, "Configuration:\n" + " balancing:strategy, Value: none\n" + " classifier:__choice__, Value: random_forest\n" + " classifier:random_forest:bootstrap, Value: True\n" + " classifier:random_forest:criterion, Value: gini\n" + " classifier:random_forest:max_depth, Constant: None\n" + " classifier:random_forest:max_features, Value: 1.0\n" + " classifier:random_forest:max_leaf_nodes, Constant: None\n" + " classifier:random_forest:min_samples_leaf, Value: 1\n" + " classifier:random_forest:min_samples_split, Value: 2\n" + " classifier:random_forest:n_estimators, Constant: 100\n" + " imputation:strategy, Value: mean\n" + " preprocessor:__choice__, Value: nystroem_sampler\n" + " preprocessor:nystroem_sampler:gamma, Value: 0.1\n" + " preprocessor:nystroem_sampler:kernel, Value: rbf\n" + " preprocessor:nystroem_sampler:n_components, Value: 100\n" + " rescaling:strategy, Value: min/max\n" + "violates forbidden clause \(Forbidden: classifier:__choice__ == random_forest && Forbidden: preprocessor:__choice__ == nystroem_sampler\)", + ParamSklearnClassifier.get_hyperparameter_search_space, + include={'preprocessor': ['nystroem_sampler']}) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" " balancing:strategy, Value: none\n" - " classifier, Value: multinomial_nb\n" + " classifier:__choice__, Value: multinomial_nb\n" + " classifier:multinomial_nb:alpha, Value: 1.0\n" + " classifier:multinomial_nb:fit_prior, Value: True\n" " imputation:strategy, Value: mean\n" - " multinomial_nb:alpha, Value: 1.0\n" - " multinomial_nb:fit_prior, Value: True\n" - " preprocessor, Value: truncatedSVD\n" + " preprocessor:__choice__, Value: truncatedSVD\n" + " preprocessor:truncatedSVD:target_dim, Value: 128\n" " rescaling:strategy, Value: min/max\n" - " truncatedSVD:target_dim, Value: 128\n" - "violates forbidden clause \(Forbidden: preprocessor == " - "truncatedSVD && Forbidden: classifier == multinomial_nb\)", + "violates forbidden clause \(Forbidden: preprocessor:__choice__ == " + "truncatedSVD && Forbidden: classifier:__choice__ == multinomial_nb\)", ParamSklearnClassifier.get_hyperparameter_search_space, - include_estimators=['multinomial_nb'], - include_preprocessors=['truncatedSVD'], + include={'classifier': ['multinomial_nb'], + 'preprocessor': ['truncatedSVD']}, dataset_properties={'sparse':True}) # It must also be catched that no classifiers which can handle sparse # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Configuration:\n" " balancing:strategy, Value: none\n" - " classifier, Value: liblinear_svc\n" + " classifier:__choice__, Value: liblinear_svc\n" + " classifier:liblinear_svc:C, Value: 1.0\n" + " classifier:liblinear_svc:dual, Constant: False\n" + " classifier:liblinear_svc:fit_intercept, Constant: True\n" + " classifier:liblinear_svc:intercept_scaling, Constant: 1\n" + " classifier:liblinear_svc:loss, Value: l2\n" + " classifier:liblinear_svc:multi_class, Constant: ovr\n" + " classifier:liblinear_svc:penalty, Value: l2\n" + " classifier:liblinear_svc:tol, Value: 0.0001\n" " imputation:strategy, Value: mean\n" - " liblinear_svc:C, Value: 1.0\n" - " liblinear_svc:dual, Constant: False\n" - " liblinear_svc:fit_intercept, Constant: True\n" - " liblinear_svc:intercept_scaling, Constant: 1\n" - " liblinear_svc:loss, Value: l2\n" - " liblinear_svc:multi_class, Constant: ovr\n" - " liblinear_svc:penalty, Value: l2\n" - " liblinear_svc:tol, Value: 0.0001\n" - " preprocessor, Value: densifier\n" + " preprocessor:__choice__, Value: densifier\n" " rescaling:strategy, Value: min/max\n" - "violates forbidden clause \(Forbidden: classifier == liblinear_svc &&" - " Forbidden: preprocessor == densifier\)", + "violates forbidden clause \(Forbidden: classifier:__choice__ == liblinear_svc &&" + " Forbidden: preprocessor:__choice__ == densifier\)", ParamSklearnClassifier.get_hyperparameter_search_space, - include_estimators=['liblinear_svc'], - include_preprocessors=['densifier'], + include={'classifier': ['liblinear_svc'], + 'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) def test_get_hyperparameter_search_space_dataset_properties(self): @@ -312,21 +342,19 @@ def test_predict_batched(self): def test_predict_batched_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - # Densifier + RF is the only combination that easily tests sparse - # data with multilabel classification! config = Configuration(cs, values={"balancing:strategy": "none", - "classifier": "random_forest", + "classifier:__choice__": "random_forest", "imputation:strategy": "mean", - "preprocessor": "densifier", - 'random_forest:bootstrap': 'True', - 'random_forest:criterion': 'gini', - 'random_forest:max_depth': 'None', - 'random_forest:min_samples_split': 2, - 'random_forest:min_samples_leaf': 2, - 'random_forest:max_features': 0.5, - 'random_forest:max_leaf_nodes': 'None', - 'random_forest:n_estimators': 100, + "preprocessor:__choice__": "no_preprocessing", + 'classifier:random_forest:bootstrap': 'True', + 'classifier:random_forest:criterion': 'gini', + 'classifier:random_forest:max_depth': 'None', + 'classifier:random_forest:min_samples_split': 2, + 'classifier:random_forest:min_samples_leaf': 2, + 'classifier:random_forest:max_features': 0.5, + 'classifier:random_forest:max_leaf_nodes': 'None', + 'classifier:random_forest:n_estimators': 100, "rescaling:strategy": "min/max"}) cls = ParamSklearnClassifier(config) @@ -396,21 +424,19 @@ def test_predict_proba_batched_sparse(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - # Densifier + RF is the only combination that easily tests sparse - # data with multilabel classification! config = Configuration(cs, values={"balancing:strategy": "none", - "classifier": "random_forest", + "classifier:__choice__": "random_forest", "imputation:strategy": "mean", - "preprocessor": "densifier", - 'random_forest:bootstrap': 'True', - 'random_forest:criterion': 'gini', - 'random_forest:max_depth': 'None', - 'random_forest:min_samples_split': 2, - 'random_forest:min_samples_leaf': 2, - 'random_forest:max_features': 0.5, - 'random_forest:max_leaf_nodes': 'None', - 'random_forest:n_estimators': 100, + "preprocessor:__choice__": "no_preprocessing", + 'classifier:random_forest:bootstrap': 'True', + 'classifier:random_forest:criterion': 'gini', + 'classifier:random_forest:max_depth': 'None', + 'classifier:random_forest:min_samples_split': 2, + 'classifier:random_forest:min_samples_leaf': 2, + 'classifier:random_forest:max_features': 0.5, + 'classifier:random_forest:max_leaf_nodes': 'None', + 'classifier:random_forest:n_estimators': 100, "rescaling:strategy": "min/max"}) # Multiclass diff --git a/tests/test_create_searchspace_util_classification.py b/tests/test_create_searchspace_util_classification.py index 16eaea42c4..740ab16462 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/tests/test_create_searchspace_util_classification.py @@ -7,14 +7,12 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC -from ParamSklearn.components.classification.random_forest import RandomForest +from ParamSklearn.components.classification.lda import LDA from ParamSklearn.components.preprocessing.pca import PCA from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding - -from ParamSklearn.classification import ParamSklearnClassifier import ParamSklearn.create_searchspace_util class TestCreateClassificationSearchspace(unittest.TestCase): @@ -22,74 +20,85 @@ class TestCreateClassificationSearchspace(unittest.TestCase): def test_get_match_array(self): # preproc is empty preprocessors = OrderedDict() - preprocessors["pca"] = PCA # dense + preprocessors['pca'] = PCA classifiers = OrderedDict() - classifiers["random_forest"] = RandomForest + classifiers['rf'] = LDA + # Sparse + dense + class Preprocessors(object): + @classmethod + def get_available_components(self, *args, **kwargs): + return preprocessors + + class Classifiers(object): + @classmethod + def get_available_components(self, *args, **kwargs): + return classifiers + + # Dense m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=True, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=PCA, node_1=LDA, dataset_properties={'sparse': True}) self.assertEqual(numpy.sum(m), 0) m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=False, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=PCA, node_1=LDA, dataset_properties={'sparse': False}) self.assertEqual(m, [[1]]) - preprocessors['TSVD'] = TruncatedSVD # sparse + # Sparse + preprocessors['tSVD'] = TruncatedSVD m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=True, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=LDA, + dataset_properties={'sparse': True}) self.assertEqual(m[0], [0]) # pca self.assertEqual(m[1], [1]) # svd m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=False, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=LDA, + dataset_properties={'sparse': False}) self.assertEqual(m[0], [1]) # pca self.assertEqual(m[1], [0]) # svd - preprocessors['none'] = NoPreprocessing # sparse + dense + preprocessors['none'] = NoPreprocessing m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=True, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=LDA, + dataset_properties={'sparse': True}) self.assertEqual(m[0, :], [0]) # pca self.assertEqual(m[1, :], [1]) # tsvd self.assertEqual(m[2, :], [0]) # none m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=False, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=LDA, + dataset_properties={'sparse': False}) self.assertEqual(m[0, :], [1]) # pca self.assertEqual(m[1, :], [0]) # tsvd self.assertEqual(m[2, :], [1]) # none classifiers['libsvm'] = LibLinear_SVC m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=False, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=Classifiers, + dataset_properties={'sparse': False}) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=True, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=Classifiers, + dataset_properties={'sparse': True}) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none preprocessors['rte'] = RandomTreesEmbedding m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=False, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=Classifiers, + dataset_properties={'sparse': False}) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none self.assertListEqual(list(m[3, :]), [0, 1]) # random trees embedding m = ParamSklearn.create_searchspace_util.get_match_array( - preprocessors=preprocessors, estimators=classifiers, sparse=True, - pipeline=ParamSklearnClassifier._get_pipeline()) + node_0=Preprocessors, node_1=Classifiers, + dataset_properties={'sparse': True}) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none @@ -117,48 +126,45 @@ def test_get_idx_to_keep(self): [self.assertTrue(r < m.shape[0]) for r in r_keep] def test_sanitize_arrays(self): + class Choices(list): + def get_available_components(self, *args, **kwargs): + return OrderedDict(((v, v) for i, v in enumerate(self[:]))) + m = numpy.zeros([2, 3]) - preprocessors_list = ['pa', 'pb'] - preprocessors = OrderedDict([['pa', 1], ['pb', 2]]) - classifier_list = ['ca', 'cb', 'cc'] - classifiers = OrderedDict([['ca', 1], ['cb', 2], ['cc', 3]]) + preprocessors = Choices(['pa', 'pb']) + classifiers = Choices(['ca', 'cb', 'cc']) # all zeros -> empty - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + new_m, new_preproc_list, new_class_list = \ ParamSklearn.create_searchspace_util.sanitize_arrays( - m=m, preprocessors=preprocessors, - preprocessors_list=preprocessors_list, estimators=classifiers, - estimators_list=classifier_list) + matches=m, node_0=preprocessors, node_1=classifiers, + dataset_properties={}) self.assertEqual(len(new_m), 0) - self.assertTrue(len(new_classifier_list) == len(new_preprocessors_list) == 0) - self.assertTrue(len(new_preproc) == len(new_class) == 0) + self.assertTrue(len(new_preproc_list) == len(new_class_list) == 0) for i in range(20): m = numpy.zeros([2, 3]) class_idx = numpy.random.randint(low=0, high=m.shape[1], size=1)[0] pre_idx = numpy.random.randint(low=0, high=m.shape[0], size=1)[0] m[pre_idx, class_idx] = 1 - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + new_m, new_preproc_list, new_class_list = \ ParamSklearn.create_searchspace_util.sanitize_arrays( - m=m, preprocessors=preprocessors, - preprocessors_list=preprocessors_list, - estimators=classifiers, estimators_list=classifier_list) - self.assertIn(preprocessors_list[pre_idx], new_preprocessors_list) - self.assertIn(preprocessors_list[pre_idx], preprocessors) - self.assertIn(classifier_list[class_idx], new_classifier_list) - self.assertIn(classifier_list[class_idx], classifiers) + matches=m, node_0=preprocessors, node_1=classifiers, + dataset_properties={}) + print preprocessors, pre_idx, new_preproc_list + self.assertIn(preprocessors[pre_idx], new_preproc_list) + self.assertIn(classifiers[class_idx], new_class_list) self.assertTrue(new_m.shape[0] == new_m.shape[1] == 1) m = numpy.array([[1, 0, 0], [0, 1, 0]]) - new_m, new_preprocessors_list, new_classifier_list, new_preproc, new_class = \ + new_m, new_preproc_list, new_class_list = \ ParamSklearn.create_searchspace_util.sanitize_arrays( - m=m, preprocessors=preprocessors, - preprocessors_list=preprocessors_list, estimators=classifiers, - estimators_list=classifier_list) - self.assertListEqual(preprocessors_list, new_preprocessors_list) - [self.assertIn(p, preprocessors) for p in preprocessors_list] - self.assertListEqual(classifier_list[:-1], new_classifier_list) - [self.assertIn(c, classifiers) for c in new_classifier_list] + matches=m, node_0=preprocessors, node_1=classifiers, + dataset_properties={}) + self.assertListEqual(preprocessors, new_preproc_list) + [self.assertIn(p, preprocessors) for p in preprocessors] + self.assertListEqual(classifiers[:-1], new_class_list) + [self.assertIn(c, classifiers) for c in new_class_list] self.assertTrue(m.shape[0], new_m.shape[0]) self.assertTrue(m.shape[1], new_m.shape[1]) @@ -174,16 +180,18 @@ def test_add_forbidden(self): cs.add_hyperparameter(preprocessor) cs.add_hyperparameter(classifier) new_cs = ParamSklearn.create_searchspace_util.add_forbidden( - conf_space=cs, preproc_list=preprocessors_list, - est_list=classifier_list, matches=m, est_type="classifier") + conf_space=cs, node_0_list=preprocessors_list, + node_1_list=classifier_list, matches=m, + node_0_name='preprocessor', node_1_name="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 0) self.assertIsInstance(new_cs, ConfigurationSpace) - m[0, 0] = 0 + m[1, 1] = 0 new_cs = ParamSklearn.create_searchspace_util.add_forbidden( - conf_space=cs, preproc_list=preprocessors_list, - est_list=classifier_list, matches=m, est_type="classifier") + conf_space=cs, node_0_list=preprocessors_list, + node_1_list=classifier_list, matches=m, + node_0_name='preprocessor', node_1_name="classifier") self.assertEqual(len(new_cs.forbidden_clauses), 1) - self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'ca') - self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pa') + self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'cb') + self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pb') self.assertIsInstance(new_cs, ConfigurationSpace) \ No newline at end of file diff --git a/tests/test_regression.py b/tests/test_regression.py index e52f75a627..f97396f634 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -14,8 +14,8 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.regression import ParamSklearnRegressor -from ParamSklearn.components.regression_base import ParamSklearnRegressionAlgorithm -from ParamSklearn.components.preprocessor_base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.regression as regression_components import ParamSklearn.components.preprocessing as preprocessing_components from ParamSklearn.util import get_dataset, SPARSE, DENSE, PREDICTIONS @@ -25,8 +25,10 @@ class TestParamSKlearnRegressor(unittest.TestCase): def test_io_dict(self): regressors = regression_components._regressors - for c in regressors: - props = regressors[c].get_properties() + for r in regressors: + if regressors[r] == regression_components.RegressorChoice: + continue + props = regressors[r].get_properties() self.assertIn('input', props) self.assertIn('output', props) inp = props['input'] @@ -50,6 +52,8 @@ def test_find_regressors(self): regressors = regression_components._regressors self.assertGreaterEqual(len(regressors), 1) for key in regressors: + if hasattr(regressors[key], 'get_components'): + continue self.assertIn(ParamSklearnRegressionAlgorithm, regressors[key].__bases__) @@ -57,6 +61,8 @@ def test_find_preprocessors(self): preprocessors = preprocessing_components._preprocessors self.assertGreaterEqual(len(preprocessors), 1) for key in preprocessors: + if hasattr(preprocessors[key], 'get_components'): + continue self.assertIn(ParamSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) @@ -79,63 +85,63 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(51, len(hyperparameters)) + self.assertEqual(75, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnRegressor.get_hyperparameter_search_space( - include_estimators=['random_forest']) - self.assertEqual(cs.get_hyperparameter('regressor'), - CategoricalHyperparameter('regressor', ['random_forest'])) + include={'regressor': ['random_forest']}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__'), + CategoricalHyperparameter('regressor:__choice__', ['random_forest'])) # TODO add this test when more than one regressor is present cs = ParamSklearnRegressor.get_hyperparameter_search_space( - exclude_estimators=['random_forest']) + exclude={'regressor': ['random_forest']}) self.assertNotIn('random_forest', str(cs)) cs = ParamSklearnRegressor.get_hyperparameter_search_space( - include_preprocessors=['pca']) - self.assertEqual(cs.get_hyperparameter('preprocessor'), - CategoricalHyperparameter('preprocessor', ['pca', ])) + include={'preprocessor': ['pca']}) + self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), + CategoricalHyperparameter('preprocessor:__choice__', ['pca'])) cs = ParamSklearnRegressor.get_hyperparameter_search_space( - exclude_preprocessors=['no_preprocessing']) + exclude={'preprocessor': ['no_preprocessing']}) self.assertNotIn('no_preprocessing', str(cs)) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" " imputation:strategy, Value: mean\n" - " kitchen_sinks:gamma, Value: 1.0\n" - " kitchen_sinks:n_components, Value: 100\n" - " preprocessor, Value: kitchen_sinks\n" - " random_forest:bootstrap, Value: True\n" - " random_forest:criterion, Constant: mse\n" - " random_forest:max_depth, Constant: None\n" - " random_forest:max_features, Value: 1.0\n" - " random_forest:min_samples_leaf, Value: 1\n" - " random_forest:min_samples_split, Value: 2\n" - " random_forest:n_estimators, Constant: 100\n" - " regressor, Value: random_forest\n" + " preprocessor:__choice__, Value: kitchen_sinks\n" + " preprocessor:kitchen_sinks:gamma, Value: 1.0\n" + " preprocessor:kitchen_sinks:n_components, Value: 100\n" + " regressor:__choice__, Value: random_forest\n" + " regressor:random_forest:bootstrap, Value: True\n" + " regressor:random_forest:criterion, Constant: mse\n" + " regressor:random_forest:max_depth, Constant: None\n" + " regressor:random_forest:max_features, Value: 1.0\n" + " regressor:random_forest:min_samples_leaf, Value: 1\n" + " regressor:random_forest:min_samples_split, Value: 2\n" + " regressor:random_forest:n_estimators, Constant: 100\n" " rescaling:strategy, Value: min/max\n" - "violates forbidden clause \(Forbidden: regressor == random_forest" - " && Forbidden: preprocessor == kitchen_sinks\)", + "violates forbidden clause \(Forbidden: regressor:__choice__ == random_forest" + " && Forbidden: preprocessor:__choice__ == kitchen_sinks\)", ParamSklearnRegressor.get_hyperparameter_search_space, - include_estimators=['random_forest'], - include_preprocessors=['kitchen_sinks']) + include={'regressor': ['random_forest'], + 'preprocessor': ['kitchen_sinks']}) # It must also be catched that no classifiers which can handle sparse # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Configuration:\n" " imputation:strategy, Value: mean\n" - " preprocessor, Value: densifier\n" - " regressor, Value: ridge_regression\n" + " preprocessor:__choice__, Value: densifier\n" + " regressor:__choice__, Value: ridge_regression\n" + " regressor:ridge_regression:alpha, Value: 1.0\n" " rescaling:strategy, Value: min/max\n" - " ridge_regression:alpha, Value: 1.0\n" - "violates forbidden clause \(Forbidden: regressor == " - "ridge_regression && Forbidden: preprocessor == densifier\)", + "violates forbidden clause \(Forbidden: regressor:__choice__ == " + "ridge_regression && Forbidden: preprocessor:__choice__ == densifier\)", ParamSklearnRegressor.get_hyperparameter_search_space, - include_estimators=['ridge_regression'], - include_preprocessors=['densifier'], + include={'regressor': ['ridge_regression'], + 'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) @unittest.skip("test_get_hyperparameter_search_space_dataset_properties" + diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 8645da2d11..0ff4dc72bb 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -6,6 +6,7 @@ class TextClassificationTest(unittest.TestCase): + @unittest.skip("Not properly implemented yet!") def test_get_hyperparameter_search_space(self): cs = ParamSklearnTextClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) From 9caf652d8aa1fefb8531b5ccc339aceef520c305 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 10:15:22 +0200 Subject: [PATCH 269/352] Add ExtraTreesRegressor, update ExtraTreesClassifier to sklearn 0.16 --- .../components/classification/extra_trees.py | 74 +++---- .../components/regression/extra_trees.py | 181 ++++++++++++++++++ ParamSklearn/util.py | 19 +- misc/regressors.csv | 2 +- .../classification/test_extra_trees.py | 8 + .../components/regression/test_extra_trees.py | 33 ++++ tests/test_regression.py | 2 +- 7 files changed, 269 insertions(+), 50 deletions(-) create mode 100644 ParamSklearn/components/regression/extra_trees.py create mode 100644 tests/components/regression/test_extra_trees.py diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 318895e91e..f6c7143f9f 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -6,17 +6,15 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE from sklearn.ensemble import ExtraTreesClassifier as ETC -# get our own forests to replace the sklearn ones -#from ParamSklearn.implementations import forest class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, - min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", #use_max_depth=False, + min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", bootstrap=False, max_leaf_nodes=None, max_depth="None", oob_score=False, n_jobs=1, random_state=None, verbose=0): @@ -125,8 +123,8 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), + 'handles_sparse': True, + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? @@ -134,50 +132,34 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) - #use_max_depth = CategoricalHyperparameter( - # name="use_max_depth", choices=("True", "False"), default="False") - bootstrap = CategoricalHyperparameter( - "bootstrap", ["True", "False"], default="False") - - # Copied from random_forest.py - #n_estimators = UniformIntegerHyperparameter( - # "n_estimators", 10, 100, default=10) - n_estimators = Constant("n_estimators", 100) - criterion = CategoricalHyperparameter( - "criterion", ["gini", "entropy"], default="gini") - #max_features = UniformFloatHyperparameter( - # "max_features", 0.01, 0.5, default=0.1) - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 2, 20, default=2) - min_samples_leaf = UniformIntegerHyperparameter( - "min_samples_leaf", 1, 20, default=1) - - # Unparametrized - #max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + + # Unparametrized, we use min_samples as regularization + # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( # name="max_leaf_nodes_or_max_depth", value="max_depth") - # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", - # choices=["max_leaf_nodes", "max_depth"], default="max_depth") - #max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", + # choices=["max_leaf_nodes", "max_depth"], default="max_depth") + # min_weight_fraction_leaf = UniformFloatHyperparameter( + # "min_weight_fraction_leaf", 0.0, 0.1) + # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", # value="None") - # UniformIntegerHyperparameter( - # name="max_leaf_nodes", lower=10, upper=1000, default=) - - max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(criterion) - cs.add_hyperparameter(max_features) - #cs.add_hyperparameter(use_max_depth) - cs.add_hyperparameter(max_depth) - #cs.add_hyperparameter(max_leaf_nodes_or_max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - #cs.add_hyperparameter(max_leaf_nodes) - cs.add_hyperparameter(bootstrap) + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) # Conditions # Not applicable because max_leaf_nodes is no legal value of the parent diff --git a/ParamSklearn/components/regression/extra_trees.py b/ParamSklearn/components/regression/extra_trees.py new file mode 100644 index 0000000000..7007f47e0f --- /dev/null +++ b/ParamSklearn/components/regression/extra_trees.py @@ -0,0 +1,181 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE + +from sklearn.ensemble import ExtraTreesRegressor as ETR + + +class ExtraTreesRegressor(ParamSklearnRegressionAlgorithm): + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, + max_leaf_nodes_or_max_depth="max_depth", + bootstrap=False, max_leaf_nodes=None, max_depth="None", + oob_score=False, n_jobs=1, random_state=None, verbose=0): + + self.n_estimators = int(n_estimators) + self.estimator_increment = 10 + if criterion not in ("mse"): + raise ValueError("'criterion' is not in ('mse'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + #if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None + else: + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, y, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + + if self.estimator is None: + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.estimator = ETR( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, + verbose=self.verbose, + random_state=self.random_state, + warm_start=True + ) + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y,) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(): + return {'shortname': 'ET', + 'name': 'Extra Trees Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(Constant("criterion", "mse")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) + + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + + # Unparametrized, we use min_samples as regularization + # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + # name="max_leaf_nodes_or_max_depth", value="max_depth") + # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", + # choices=["max_leaf_nodes", "max_depth"], default="max_depth") + # min_weight_fraction_leaf = UniformFloatHyperparameter( + # "min_weight_fraction_leaf", 0.0, 0.1) + # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + # value="None") + + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) + + # Conditions + # Not applicable because max_leaf_nodes is no legal value of the parent + #cond_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=max_leaf_nodes, + # parent=max_leaf_nodes_or_max_depth, + # value="max_leaf_nodes") + #cond2_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=use_max_depth, + # parent=max_leaf_nodes_or_max_depth, + # value="max_depth") + + #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, + #value="True") + #cs.add_condition(cond_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond_max_depth) + + return cs diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index b287cca051..8e41f3bf0f 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -196,9 +196,9 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, self.assertEqual(Xt.dtype, np.float64) -def _test_regressor(Regressor, dataset='diabetes'): +def _test_regressor(Regressor, dataset='diabetes', sparse=False): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, - make_sparse=False) + make_sparse=sparse) configuration_space = Regressor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() regressor = Regressor(random_state=1, @@ -217,6 +217,21 @@ def _test_regressor(Regressor, dataset='diabetes'): return predictions, Y_test +def _test_regressor_iterative_fit(Regressor, dataset='diabetes', sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse) + configuration_space = Regressor.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) + default = configuration_space.get_default_configuration() + regressor = Regressor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + while not regressor.configuration_fully_fitted(): + regressor = regressor.iterative_fit(X_train, Y_train) + predictions = regressor.predict(X_test) + return predictions, Y_test + + if __name__ == "__main__": find_sklearn_classes(sklearn.base.ClassifierMixin) find_sklearn_classes(sklearn.base.RegressorMixin) diff --git a/misc/regressors.csv b/misc/regressors.csv index 7ee65254e3..e25c528686 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -6,7 +6,7 @@ class,added,comment ,False,Is a preprocessing method ,False,See module name ,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. -,, +,True, ,, ,True, ,True, diff --git a/tests/components/classification/test_extra_trees.py b/tests/components/classification/test_extra_trees.py index 5423851604..c58ef61eeb 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/tests/components/classification/test_extra_trees.py @@ -15,6 +15,14 @@ def test_default_configuration(self): self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_classifier(ExtraTreesClassifier, sparse=True) + self.assertAlmostEqual(0.71999999999999997, + sklearn.metrics.accuracy_score(predictions, + targets)) + def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = \ diff --git a/tests/components/regression/test_extra_trees.py b/tests/components/regression/test_extra_trees.py new file mode 100644 index 0000000000..d55de0a3b0 --- /dev/null +++ b/tests/components/regression/test_extra_trees.py @@ -0,0 +1,33 @@ +import unittest + +from ParamSklearn.components.regression.extra_trees import \ + ExtraTreesRegressor +from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class ExtraTreesComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(ExtraTreesRegressor) + self.assertAlmostEqual(0.4269923975466271, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_regressor(ExtraTreesRegressor, sparse=True) + self.assertAlmostEqual(0.26287621251507987, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_regressor_iterative_fit(ExtraTreesRegressor) + self.assertAlmostEqual(0.4269923975466271, + sklearn.metrics.r2_score(targets, + predictions)) \ No newline at end of file diff --git a/tests/test_regression.py b/tests/test_regression.py index f97396f634..af6bdde84d 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(75, len(hyperparameters)) + self.assertEqual(82, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 3f0ac49bf76bf3ee690f6209248e65316fde9999 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 10:47:47 +0200 Subject: [PATCH 270/352] Update random forests to sklearn 0.16 --- .../classification/random_forest.py | 49 +++-- .../components/regression/random_forest.py | 167 +++++++++--------- .../classification/test_random_forest.py | 7 + .../regression/test_random_forests.py | 19 +- tests/test_classification.py | 5 +- tests/test_regression.py | 6 +- 6 files changed, 133 insertions(+), 120 deletions(-) diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index cea21a8864..a26e51c519 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -15,7 +15,8 @@ class RandomForest(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, - bootstrap, max_leaf_nodes, random_state=None, n_jobs=1): + min_weight_fraction_leaf, bootstrap, max_leaf_nodes, + random_state=None, n_jobs=1): self.n_estimators = n_estimators self.estimator_increment = 10 self.criterion = criterion @@ -23,6 +24,7 @@ def __init__(self, n_estimators, criterion, max_features, self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf self.bootstrap = bootstrap self.max_leaf_nodes = max_leaf_nodes self.random_state = random_state @@ -55,6 +57,8 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): max_features = int(float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) + else: + max_features = self.max_features if self.bootstrap == "True": self.bootstrap = True else: @@ -70,13 +74,14 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): max_depth=self.max_depth, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, bootstrap=self.bootstrap, max_leaf_nodes=self.max_leaf_nodes, random_state=self.random_state, n_jobs=self.n_jobs, warm_start=True) - tmp = self.estimator # TODO I think we need to copy here! + tmp = self.estimator tmp.n_estimators += n_iter tmp.fit(X, y, sample_weight=sample_weight) self.estimator = tmp @@ -106,7 +111,6 @@ def get_properties(): 'handles_nominal_values': False, 'handles_numerical_features': True, 'prefers_data_scaled': False, - # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, 'handles_regression': False, 'handles_classification': True, @@ -122,30 +126,19 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - #n_estimators = UniformIntegerHyperparameter( - # "n_estimators", 10, 100, default=10) - n_estimators = Constant("n_estimators", 100) - criterion = CategoricalHyperparameter( - "criterion", ["gini", "entropy"], default="gini") - #max_features = UniformFloatHyperparameter( - # "max_features", 0.01, 0.5, default=0.2) - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - max_depth = UnParametrizedHyperparameter("max_depth", "None") - min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 2, 20, default=2) - min_samples_leaf = UniformIntegerHyperparameter( - "min_samples_leaf", 1, 20, default=1) - max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") - bootstrap = CategoricalHyperparameter( - "bootstrap", ["True", "False"], default="True") cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(criterion) - cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(max_leaf_nodes) - cs.add_hyperparameter(bootstrap) + cs.add_hyperparameter(Constant("n_estimators", 100)) + cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_depth", "None")) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_leaf_nodes", "None")) + cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="True")) return cs diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index f9328c0216..b117af732b 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -15,78 +15,83 @@ class RandomForest(ParamSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, - bootstrap, - max_leaf_nodes_or_max_depth="max_depth", - max_leaf_nodes=None, random_state=None, - n_jobs=1): - self.n_estimators = int(n_estimators) + min_weight_fraction_leaf, bootstrap, max_leaf_nodes, + random_state=None, n_jobs=1): + self.n_estimators = n_estimators self.estimator_increment = 10 - if criterion in ("mse",): - self.criterion = criterion - else: - raise ValueError("criterion should be in (mse,) but is: %s" % - str(criterion)) + self.criterion = criterion + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.bootstrap = bootstrap + self.max_leaf_nodes = max_leaf_nodes + self.random_state = random_state + self.n_jobs = n_jobs + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self - if max_features in ("sqrt", "log2", "auto"): - raise ValueError("'max_features' should be a float: %s" % - str(max_features)) - self.max_features = float(max_features) + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None - self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) - if self.max_leaf_nodes_or_max_depth == "max_depth": - if max_depth == 'None': + if self.estimator is None: + self.n_estimators = int(self.n_estimators) + if self.max_depth == "None": self.max_depth = None else: - self.max_depth = int(max_depth) - self.max_leaf_nodes = None - elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": - self.max_depth = None - if max_leaf_nodes == 'None': - self.max_leaf_nodes = None + self.max_depth = int(self.max_depth) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_features not in ("sqrt", "log2", "auto"): + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) else: - self.max_leaf_nodes = int(max_leaf_nodes) - else: - raise ValueError("max_leaf_nodes_or_max_depth sould be in " - "('max_leaf_nodes', 'max_depth'): %s" % - self.max_leaf_nodes_or_max_depth) - self.min_samples_split = int(min_samples_split) - self.min_samples_leaf = int(min_samples_leaf) - - if bootstrap == "True": - self.bootstrap = True - else: - self.bootstrap = False + max_features = self.max_features + if self.bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None - self.random_state = random_state - self.n_jobs = n_jobs - self.estimator = None + self.estimator = RandomForestRegressor( + n_estimators=0, + criterion=self.criterion, + max_features=max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, + n_jobs=self.n_jobs, + warm_start=True) - def fit(self, X, Y): - num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - # Use at most half of the features - max_features = max(1, min(int(X.shape[1] / 2), max_features)) - self.estimator = RandomForestRegressor( - n_estimators=0, - criterion=self.criterion, - max_features=max_features, - max_depth=self.max_depth, - min_samples_split=self.min_samples_split, - min_samples_leaf=self.min_samples_leaf, - bootstrap=self.bootstrap, - max_leaf_nodes=self.max_leaf_nodes, - random_state=self.random_state, - n_jobs=self.n_jobs, - warm_start=True) - # JTS TODO: I think we might have to copy here if we want self.estimator - # to always be consistent on sigabort - while len(self.estimator.estimators_) < self.n_estimators: - tmp = self.estimator # TODO copy ? - tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) - self.estimator = tmp + tmp = self.estimator + tmp.n_estimators += n_iter + tmp.fit(X, y) + self.estimator = tmp return self + def configuration_fully_fitted(self): + if self.estimator is None: + return False + + return not len(self.estimator.estimators_) < self.n_estimators + def predict(self, X): if self.estimator is None: raise NotImplementedError @@ -100,7 +105,6 @@ def get_properties(): 'handles_nominal_values': False, 'handles_numerical_features': True, 'prefers_data_scaled': False, - # TODO find out if this is good because of sparcity... 'handles_regression': True, 'handles_classification': False, 'handles_multiclass': False, @@ -116,28 +120,19 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - criterion = Constant(name="criterion", value="mse") - # Copied from classification/random_forest.py - #n_estimators = UniformIntegerHyperparameter( - # name="n_estimators", lower=10, upper=100, default=10, log=False) - n_estimators = Constant("n_estimators", 100) - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - max_depth = UnParametrizedHyperparameter("max_depth", "None") - min_samples_split = UniformIntegerHyperparameter( - name="min_samples_split", lower=2, upper=20, default=2, log=False) - min_samples_leaf = UniformIntegerHyperparameter( - name="min_samples_leaf", lower=1, upper=20, default=1, log=False) - bootstrap = CategoricalHyperparameter( - name="bootstrap", choices=["True", "False"], default="True") - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(bootstrap) - cs.add_hyperparameter(criterion) - + cs.add_hyperparameter(Constant("n_estimators", 100)) + cs.add_hyperparameter(Constant("criterion", "mse")) + cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_depth", "None")) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_leaf_nodes", "None")) + cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="True")) return cs diff --git a/tests/components/classification/test_random_forest.py b/tests/components/classification/test_random_forest.py index a947aae40a..ee53405031 100644 --- a/tests/components/classification/test_random_forest.py +++ b/tests/components/classification/test_random_forest.py @@ -13,6 +13,13 @@ def test_default_configuration(self): self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_classifier(RandomForest, sparse=True) + self.assertAlmostEqual(0.85999999999999999, + sklearn.metrics.accuracy_score(predictions, + targets)) + def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = \ diff --git a/tests/components/regression/test_random_forests.py b/tests/components/regression/test_random_forests.py index d49d7970f3..0909cbb0c6 100644 --- a/tests/components/regression/test_random_forests.py +++ b/tests/components/regression/test_random_forests.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.regression.random_forest import RandomForest -from ParamSklearn.util import _test_regressor +from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics @@ -10,7 +10,20 @@ class RandomForestComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_regressor(RandomForest, - dataset='diabetes') + predictions, targets = _test_regressor(RandomForest) + self.assertAlmostEqual(0.41224692924630502, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_regressor(RandomForest, sparse=True) + self.assertAlmostEqual(0.24117530425422551, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_regressor_iterative_fit(RandomForest) self.assertAlmostEqual(0.41224692924630502, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_classification.py b/tests/test_classification.py index fc17c74fba..62e6dfddbe 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(136, len(hyperparameters)) + self.assertEqual(137, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) @@ -227,6 +227,7 @@ def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classi " classifier:random_forest:max_leaf_nodes, Constant: None\n" " classifier:random_forest:min_samples_leaf, Value: 1\n" " classifier:random_forest:min_samples_split, Value: 2\n" + " classifier:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" " classifier:random_forest:n_estimators, Constant: 100\n" " imputation:strategy, Value: mean\n" " preprocessor:__choice__, Value: nystroem_sampler\n" @@ -355,6 +356,7 @@ def test_predict_batched_sparse(self): 'classifier:random_forest:max_features': 0.5, 'classifier:random_forest:max_leaf_nodes': 'None', 'classifier:random_forest:n_estimators': 100, + 'classifier:random_forest:min_weight_fraction_leaf': 0.0, "rescaling:strategy": "min/max"}) cls = ParamSklearnClassifier(config) @@ -434,6 +436,7 @@ def test_predict_proba_batched_sparse(self): 'classifier:random_forest:max_depth': 'None', 'classifier:random_forest:min_samples_split': 2, 'classifier:random_forest:min_samples_leaf': 2, + 'classifier:random_forest:min_weight_fraction_leaf': 0.0, 'classifier:random_forest:max_features': 0.5, 'classifier:random_forest:max_leaf_nodes': 'None', 'classifier:random_forest:n_estimators': 100, diff --git a/tests/test_regression.py b/tests/test_regression.py index af6bdde84d..e19809565c 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -76,7 +76,7 @@ def test_default_configuration(self): predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse r2_score = sklearn.metrics.r2_score(Y_test, predictions) - self.assertAlmostEqual(0.41211271098191482, r2_score) + self.assertAlmostEqual(0.41626416529791199, r2_score) model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(82, len(hyperparameters)) + self.assertEqual(84, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): @@ -119,8 +119,10 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " regressor:random_forest:criterion, Constant: mse\n" " regressor:random_forest:max_depth, Constant: None\n" " regressor:random_forest:max_features, Value: 1.0\n" + " regressor:random_forest:max_leaf_nodes, Constant: None\n" " regressor:random_forest:min_samples_leaf, Value: 1\n" " regressor:random_forest:min_samples_split, Value: 2\n" + " regressor:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" " regressor:random_forest:n_estimators, Constant: 100\n" " rescaling:strategy, Value: min/max\n" "violates forbidden clause \(Forbidden: regressor:__choice__ == random_forest" From 53da02f8d5923d32f3c11a28b0e11f64d905399d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 11:28:41 +0200 Subject: [PATCH 271/352] Update gradient boosting to sklearn 0.16 --- .../classification/gradient_boosting.py | 155 +++++-------- .../classification/random_forest.py | 1 + .../regression/gradient_boosting.py | 212 +++++++++--------- .../classification/test_gradient_boosting.py | 1 - .../regression/test_gradient_boosting.py | 13 +- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 7 files changed, 177 insertions(+), 209 deletions(-) diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index f0afff226c..c02cde2ebf 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -3,54 +3,31 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant + UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant, \ + CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): - - def __init__(self, learning_rate, n_estimators, subsample, - min_samples_split, min_samples_leaf, max_features, max_depth, - max_leaf_nodes_or_max_depth="max_depth", - max_leaf_nodes=None, loss='deviance', - init=None, random_state=None, verbose=0, estimator_increment=10): - - self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) - - if self.max_leaf_nodes_or_max_depth == "max_depth": - if max_depth == 'None': - self.max_depth = None - else: - self.max_depth = int(max_depth) - self.max_leaf_nodes = None - elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": - self.max_depth = None - if max_leaf_nodes == 'None': - self.max_leaf_nodes = None - else: - self.max_leaf_nodes = int(max_leaf_nodes) - else: - raise ValueError("max_leaf_nodes_or_max_depth sould be in " - "('max_leaf_nodes', 'max_depth'): %s" % - self.max_leaf_nodes_or_max_depth) - - self.learning_rate = float(learning_rate) - self.n_estimators = int(n_estimators) - self.estimator_increment = int(estimator_increment) - self.subsample = float(subsample) - self.min_samples_split = int(min_samples_split) - self.min_samples_leaf = int(min_samples_leaf) - if max_features in ("sqrt", "log2", "auto"): - raise ValueError("'max_features' should be a float: %s" % - max_features) - self.max_features = float(max_features) - + def __init__(self, loss, learning_rate, n_estimators, subsample, + min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, max_depth, max_features, + max_leaf_nodes, init=None, random_state=None, verbose=0): self.loss = loss + self.learning_rate = learning_rate + self.n_estimators = n_estimators + self.subsample = subsample + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_depth = max_depth + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes self.init = init self.random_state = random_state - self.verbose = int(verbose) + self.verbose = verbose self.estimator = None def fit(self, X, y, sample_weight=None, refit=False): @@ -70,24 +47,42 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): self.estimator = None if self.estimator is None: + self.learning_rate = float(self.learning_rate) + self.n_estimators = int(self.n_estimators) + self.subsample = float(self.subsample) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) # Use at most half of the features max_features = max(1, min(int(X.shape[1] / 2), max_features)) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.verbose = int(self.verbose) + self.estimator = sklearn.ensemble.GradientBoostingClassifier( + loss=self.loss, learning_rate=self.learning_rate, n_estimators=0, subsample=self.subsample, min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + max_depth=self.max_depth, max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, - loss=self.loss, - max_depth=self.max_depth, - warm_start=True, init=self.init, random_state=self.random_state, - verbose=self.verbose + verbose=self.verbose, + warm_start=True, ) tmp = self.estimator # TODO copy ? @@ -137,59 +132,25 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - learning_rate = UniformFloatHyperparameter( - name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) - subsample = UniformFloatHyperparameter( - name="subsample", lower=0.01, upper=1.0, default=1.0, log=False) - - # Unparametrized - #max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( - # name="max_leaf_nodes_or_max_depth", value="max_depth") - # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", - # choices=["max_leaf_nodes", "max_depth"], default="max_depth") - - max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", - value="None") - - - # Copied from random_forest.py - #n_estimators = UniformIntegerHyperparameter( - # name="n_estimators", lower=10, upper=100, default=10, log=False) - n_estimators = Constant("n_estimators", 100) - #max_features = UniformFloatHyperparameter( - # name="max_features", lower=0.01, upper=0.5, default=0.1) - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - max_depth = UniformIntegerHyperparameter( - name="max_depth", lower=1, upper=10, default=3) - min_samples_split = UniformIntegerHyperparameter( - name="min_samples_split", lower=2, upper=20, default=2, log=False) - min_samples_leaf = UniformIntegerHyperparameter( - name="min_samples_leaf", lower=1, upper=20, default=1, log=False) - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(learning_rate) - cs.add_hyperparameter(max_features) - #cs.add_hyperparameter(max_leaf_nodes_or_max_depth) - #cs.add_hyperparameter(max_leaf_nodes) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(subsample) - - # Conditions - #cond_max_leaf_nodes_or_max_depth = \ - # EqualsCondition(child=max_leaf_nodes, - # parent=max_leaf_nodes_or_max_depth, - # value="max_leaf_nodes") - - #cond2_max_leaf_nodes_or_max_depth = \ - # EqualsCondition(child=max_depth, - # parent=max_leaf_nodes_or_max_depth, - # value="max_depth") - - #cs.add_condition(cond_max_leaf_nodes_or_max_depth) - #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + loss = cs.add_hyperparameter(Constant("loss", "deviance")) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True)) + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False)) + min_weight_fraction_leaf = cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + subsample = cs.add_hyperparameter(UniformFloatHyperparameter( + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False)) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter( + name="max_leaf_nodes", value="None")) + return cs diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index a26e51c519..9b214f5268 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -52,6 +52,7 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): self.max_depth = int(self.max_depth) self.min_samples_split = int(self.min_samples_split) self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) if self.max_features not in ("sqrt", "log2", "auto"): num_features = X.shape[1] max_features = int(float(self.max_features) * (np.log(num_features) + 1)) diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index 41cee7ff5d..bbcbc87647 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -3,98 +3,105 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, CategoricalHyperparameter, Constant + UniformIntegerHyperparameter, CategoricalHyperparameter, Constant, \ + UnParametrizedHyperparameter +from HPOlibConfigSpace.conditions import InCondition from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, PREDICTIONS class GradientBoosting(ParamSklearnRegressionAlgorithm): - def __init__(self, - loss, learning_rate, subsample, min_samples_split, - min_samples_leaf, max_depth, max_features, alpha=0.9, - max_leaf_nodes=None, estimator_increment=10, - max_leaf_nodes_or_max_depth="max_depth", - n_estimators=100, init=None, random_state=None, verbose=0): + def __init__(self, loss, learning_rate, n_estimators, subsample, + min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, max_depth, max_features, + max_leaf_nodes, alpha=None, init=None, random_state=None, + verbose=0): + self.loss = loss + self.learning_rate = learning_rate + self.n_estimators = n_estimators + self.subsample = subsample + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_depth = max_depth + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + self.alpha = alpha + self.init = init + self.random_state = random_state + self.verbose = verbose + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) - self.max_leaf_nodes_or_max_depth = str(max_leaf_nodes_or_max_depth) + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self - if self.max_leaf_nodes_or_max_depth == "max_depth": - if max_depth == 'None': + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + # Special fix for gradient boosting! + if isinstance(X, np.ndarray): + X = np.ascontiguousarray(X, dtype=X.dtype) + if refit: + self.estimator = None + + if self.estimator is None: + self.learning_rate = float(self.learning_rate) + self.n_estimators = int(self.n_estimators) + self.subsample = float(self.subsample) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + if self.max_depth == "None": self.max_depth = None else: - self.max_depth = int(max_depth) - self.max_leaf_nodes = None - elif self.max_leaf_nodes_or_max_depth == "max_leaf_nodes": - self.max_depth = None - if max_leaf_nodes == 'None': + self.max_depth = int(self.max_depth) + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + if self.max_leaf_nodes == "None": self.max_leaf_nodes = None else: - self.max_leaf_nodes = int(max_leaf_nodes) - else: - raise ValueError("max_leaf_nodes_or_max_depth sould be in " - "('max_leaf_nodes', 'max_depth'): %s" % - self.max_leaf_nodes_or_max_depth) - - if loss in ("ls", "lad", "huber", "quantile"): - self.loss = loss - else: - raise ValueError("'loss' should be in ('ls', 'lad', 'huber', " - "'quantile'), but is %s" % str(loss)) - self.learning_rate = float(learning_rate) - self.subsample = float(subsample) - self.min_samples_split = int(float(min_samples_split)) - self.min_samples_leaf = int(float(min_samples_leaf)) - self.max_depth = int(float(max_depth)) - - if self.loss in ('huber', 'quantile'): - self.alpha = float(alpha) - else: - self.alpha = 0.9 # default value - - self.n_estimators = n_estimators - - self.estimator_increment = int(estimator_increment) - self.init = init + self.max_leaf_nodes = int(self.max_leaf_nodes) + if self.alpha is not None: + self.alpha = float(self.alpha) + self.verbose = int(self.verbose) + + self.estimator = sklearn.ensemble.GradientBoostingRegressor( + loss=self.loss, + learning_rate=self.learning_rate, + n_estimators=0, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + max_depth=self.max_depth, + max_features=max_features, + max_leaf_nodes=self.max_leaf_nodes, + init=self.init, + random_state=self.random_state, + verbose=self.verbose, + warm_start=True, + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp - # We handle this later - self.max_features = float(max_features) + return self - # Defaults - self.random_state = random_state - self.verbose = int(verbose) - self.estimator = None - def fit(self, X, Y): - num_features = X.shape[1] - max_features = int(float(self.max_features) * (np.log(num_features) + 1)) - # Use at most half of the features - max_features = max(1, min(int(X.shape[1] / 2), max_features)) - - self.estimator = sklearn.ensemble.GradientBoostingRegressor( - max_leaf_nodes=self.max_leaf_nodes, - loss=self.loss, - learning_rate=self.learning_rate, - n_estimators=0, - subsample=self.subsample, - min_samples_split=self.min_samples_split, - min_samples_leaf=self.min_samples_leaf, - max_depth=self.max_depth, - init=self.init, - max_features=max_features, - alpha=self.alpha, - warm_start=True, - random_state=self.random_state, - verbose=self.verbose - ) - # JTS TODO: I think we might have to copy here if we want self.estimator - # to always be consistent on sigabort - while len(self.estimator.estimators_) < self.n_estimators: - tmp = self.estimator # TODO I think we need to copy here! - tmp.n_estimators += self.estimator_increment - tmp.fit(X, Y) - self.estimator = tmp - return self + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators def predict(self, X): if self.estimator is None: @@ -125,33 +132,28 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - - loss = CategoricalHyperparameter( - name="loss", choices=["ls", "lad"], default='ls') #, "huber", "quantile"], default='ls') - - learning_rate = UniformFloatHyperparameter( - name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) - subsample = UniformFloatHyperparameter( - name="subsample", lower=0.01, upper=1.0, default=1.0, log=False) - - n_estimators = Constant("n_estimators", 100) - - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - max_depth = UniformIntegerHyperparameter( - name="max_depth", lower=1, upper=10, default=3) - min_samples_split = UniformIntegerHyperparameter( - name="min_samples_split", lower=2, upper=20, default=2, log=False) - min_samples_leaf = UniformIntegerHyperparameter( - name="min_samples_leaf", lower=1, upper=20, default=1, log=False) - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(loss) - cs.add_hyperparameter(learning_rate) - cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(subsample) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["ls", "lad", "huber", "quantile"], default="ls")) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True)) + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False)) + min_weight_fraction_leaf = cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + subsample = cs.add_hyperparameter(UniformFloatHyperparameter( + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False)) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter( + name="max_leaf_nodes", value="None")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", lower=0.75, upper=0.99, default=0.9)) + + cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile'])) return cs \ No newline at end of file diff --git a/tests/components/classification/test_gradient_boosting.py b/tests/components/classification/test_gradient_boosting.py index 06037459a4..b157d1197f 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/tests/components/classification/test_gradient_boosting.py @@ -15,7 +15,6 @@ def test_default_configuration(self): self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) - def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = \ diff --git a/tests/components/regression/test_gradient_boosting.py b/tests/components/regression/test_gradient_boosting.py index 2b4f67d124..5f7074c8ce 100644 --- a/tests/components/regression/test_gradient_boosting.py +++ b/tests/components/regression/test_gradient_boosting.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.regression.gradient_boosting import GradientBoosting -from ParamSklearn.util import _test_regressor +from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics @@ -10,7 +10,12 @@ class GradientBoostingComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_regressor(GradientBoosting, - dataset='diabetes') - self.assertAlmostEqual(0.38851325425603489, + predictions, targets = _test_regressor(GradientBoosting) + self.assertAlmostEqual(0.35273007696557712, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_regressor(GradientBoosting) + self.assertAlmostEqual(0.35273007696557712, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_classification.py b/tests/test_classification.py index 62e6dfddbe..53135d9690 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(137, len(hyperparameters)) + self.assertEqual(140, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index e19809565c..dc15d8ee70 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(84, len(hyperparameters)) + self.assertEqual(87, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From d14fc4c23121049ca6ea1b6be7633abd9cef181a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 11:39:24 +0200 Subject: [PATCH 272/352] Update Ridge to sklearn 0.16 --- .../components/classification/ridge.py | 24 +++++----- .../components/regression/ridge_regression.py | 44 +++++++++---------- .../regression/test_ridge_regression.py | 7 ++- tests/test_regression.py | 4 +- 4 files changed, 37 insertions(+), 42 deletions(-) diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index 99f6b3ef6d..6b758b4635 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -14,7 +14,7 @@ class Ridge(ParamSklearnClassificationAlgorithm): def __init__(self, alpha, fit_intercept, tol, class_weight=None, - random_state=None): + random_state=None): self.alpha = float(alpha) self.fit_intercept = bool(fit_intercept) self.tol = float(tol) @@ -26,7 +26,9 @@ def fit(self, X, Y): self.estimator = RidgeClassifier(alpha=self.alpha, fit_intercept=self.fit_intercept, tol=self.tol, - class_weight=self.class_weight) + class_weight=self.class_weight, + copy_X=False, + normalize=False) self.estimator.fit(X, Y) return self @@ -44,7 +46,7 @@ def predict_proba(self, X): @staticmethod def get_properties(): - return {'shortname': 'Rigde Classifier', + return {'shortname': 'Rigde', 'name': 'Rigde Classifier', 'handles_missing_values': False, 'handles_nominal_values': False, @@ -64,17 +66,13 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - alpha = UniformFloatHyperparameter("alpha", 10 ** -5, 10., - log=True, default=1.) - fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") - tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, - log=True) - cs = ConfigurationSpace() - cs.add_hyperparameter(alpha) - cs.add_hyperparameter(fit_intercept) - cs.add_hyperparameter(tol) - + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10 ** -5, 10., log=True, default=1.)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) return cs def __str__(self): diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index 302848821e..f7e2d190a6 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -2,36 +2,27 @@ import sklearn.linear_model from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UnParametrizedHyperparameter from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS class RidgeRegression(ParamSklearnRegressionAlgorithm): - def __init__(self, alpha, fit_intercept=False, normalize=False, - copy_X=False, max_iter=None, tol=0.001, solver='auto', - random_state=None): + def __init__(self, alpha, fit_intercept, tol, random_state=None): self.alpha = float(alpha) - self.fit_intercept = fit_intercept - self.normalize = normalize - self.copy_X = copy_X - self.max_iter = max_iter - self.tol = tol - self.solver = solver - # We ignore it + self.fit_intercept = bool(fit_intercept) + self.tol = float(tol) self.random_state = random_state self.estimator = None def fit(self, X, Y): - self.estimator = sklearn.linear_model.Ridge( - alpha=self.alpha, - fit_intercept=self.fit_intercept, - normalize=self.normalize, - copy_X=self.copy_X, - max_iter=self.max_iter, - tol=self.tol, - solver=self.solver) + self.estimator = sklearn.linear_model.Ridge(alpha=self.alpha, + fit_intercept=self.fit_intercept, + tol=self.tol, + copy_X=False, + normalize=False) self.estimator.fit(X, Y) return self @@ -42,7 +33,7 @@ def predict(self, X): @staticmethod def get_properties(): - return {'shortname': 'RR', + return {'shortname': 'Rigde', 'name': 'Ridge Regression', 'handles_missing_values': False, 'handles_nominal_values': False, @@ -64,9 +55,14 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - alpha = UniformFloatHyperparameter( - name="alpha", lower=0.0001, upper=10, default=1.0, log=True) - cs = ConfigurationSpace() - cs.add_hyperparameter(alpha) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10 ** -5, 10., log=True, default=1.)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) return cs + + def __str__(self): + return "ParamSklearn Ridge Regression" diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index 908b55f3c8..e6e2ee5aad 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -16,9 +16,8 @@ def test_default_configuration(self): for i in range(10): # This should be a bad results - predictions, targets = _test_regressor(RidgeRegression, - dataset='diabetes') - self.assertAlmostEqual(-3.5118220034267313, + predictions, targets = _test_regressor(RidgeRegression,) + self.assertAlmostEqual(0.32614416980439365, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) # This should be much more better @@ -40,5 +39,5 @@ def test_default_configuration(self): predictor = regressor.fit(X_train_transformed, Y_train) predictions = predictor.predict(X_test_transformed) - self.assertAlmostEqual(0.37173953934750514, + self.assertAlmostEqual(0.37183512452087852, sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file diff --git a/tests/test_regression.py b/tests/test_regression.py index dc15d8ee70..e651504580 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(87, len(hyperparameters)) + self.assertEqual(89, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): @@ -138,6 +138,8 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " preprocessor:__choice__, Value: densifier\n" " regressor:__choice__, Value: ridge_regression\n" " regressor:ridge_regression:alpha, Value: 1.0\n" + " regressor:ridge_regression:fit_intercept, Constant: True\n" + " regressor:ridge_regression:tol, Value: 0.0001\n" " rescaling:strategy, Value: min/max\n" "violates forbidden clause \(Forbidden: regressor:__choice__ == " "ridge_regression && Forbidden: preprocessor:__choice__ == densifier\)", From ec918bf08449450738f7916e738ba2e56f14bff9 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 12:57:33 +0200 Subject: [PATCH 273/352] Reactivate Support Vector Regression --- .../regression/support_vector_regression.py | 80 +++++++++---------- .../test_support_vector_regression.py | 14 ++-- tests/test_regression.py | 2 +- 3 files changed, 48 insertions(+), 48 deletions(-) diff --git a/ParamSklearn/components/regression/support_vector_regression.py b/ParamSklearn/components/regression/support_vector_regression.py index 69b910defe..e7bc954df9 100644 --- a/ParamSklearn/components/regression/support_vector_regression.py +++ b/ParamSklearn/components/regression/support_vector_regression.py @@ -12,67 +12,66 @@ from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS -# Something is wrong here... -""" class SupportVectorRegression(ParamSklearnRegressionAlgorithm): - def __init__(self, kernel, C, epsilon, degree, coef0, tol, shrinking, - gamma=0.0, probability=False, cache_size=2000, verbose=False, - max_iter=-1, random_state=None - ): - - if kernel in ('linear', 'poly', 'rbf', 'sigmoid'): - self.kernel = kernel - else: - raise ValueError("'kernel' must be in ('linear', 'poly', 'rbf', " - "'sigmoid'), but is %s" % str(kernel)) - self.gamma = float(gamma) - self.C = float(C) + def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, + degree=3, coef0=0.0, cache_size=2000, verbose=False, + max_iter=-1, random_state=None): + self.kernel = kernel + self.C = C self.epsilon = epsilon - self.degree = int(float(degree)) - self.coef0 = float(coef0) - self.tol = float(tol) - - if shrinking == "True": - self.shrinking = True - elif shrinking == "False": - self.shrinking = False - else: - raise ValueError("'shrinking' must be in ('True', 'False'), " - "but is %s" % str(shrinking)) - - # We don't assume any hyperparameters here - self.probability = probability + self.tol = tol + self.shrinking = shrinking + self.degree = degree + self.gamma = gamma + self.coef0 = coef0 self.cache_size = cache_size self.verbose = verbose - self.max_iter = int(float(max_iter)) + self.max_iter = max_iter self.random_state = random_state self.estimator = None def fit(self, X, Y): + self.C = float(self.C) + self.epsilon = float(self.epsilon) + self.tol = float(self.tol) + self.shrinking = bool(self.shrinking) + self.degree = int(self.degree) + self.gamma = float(self.gamma) + if self.coef0 is None: + self.coef0 = 0.0 + else: + self.coef0 = float(self.coef0) + self.cache_size = int(self.cache_size) + self.verbose = int(self.verbose) + self.max_iter = int(self.max_iter) self.estimator = sklearn.svm.SVR( kernel=self.kernel, - degree=self.degree, - gamma=self.gamma, - coef0=self.coef0, - tol=self.tol, C=self.C, epsilon=self.epsilon, + tol=self.tol, shrinking=self.shrinking, - probability=self.probability, + degree=self.degree, + gamma=self.gamma, + coef0=self.coef0, cache_size=self.cache_size, verbose=self.verbose, - max_iter=self.max_iter, - random_state=self.random_state + max_iter=self.max_iter ) + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) - self.estimator.fit(X, Y) + self.scaler.fit(Y) + Y_scaled = self.scaler.transform(Y) + self.estimator.fit(X, Y_scaled) return self def predict(self, X): if self.estimator is None: raise NotImplementedError - return self.estimator.predict(X) + if self.scaler is None: + raise NotImplementedError + Y_pred = self.estimator.predict(X) + return self.scaler.inverse_transform(Y_pred) @staticmethod def get_properties(): @@ -91,13 +90,13 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'ouput': PREDICTIONS, + 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} @staticmethod - def get_hyperparameter_search_space(dataset_properties): + def get_hyperparameter_search_space(dataset_properties=None): # Copied from libsvm_c C = UniformFloatHyperparameter( name="C", lower=0.03125, upper=32768, log=True, default=1.0) @@ -146,4 +145,3 @@ def get_hyperparameter_search_space(dataset_properties): cs.add_condition(gamma_depends_on_kernel) cs.add_condition(coef0_depends_on_kernel) return cs -""" diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py index 9d20282eb2..5dbb035255 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/tests/components/regression/test_support_vector_regression.py @@ -1,5 +1,5 @@ import unittest -""" + from ParamSklearn.components.regression.support_vector_regression import SupportVectorRegression from ParamSklearn.util import _test_regressor @@ -10,12 +10,14 @@ class SupportVectorComponentTest(unittest.TestCase): def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(SupportVectorRegression) + self.assertAlmostEqual(0.12849591861430087, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + def test_default_configuration_sparse(self): for i in range(10): predictions, targets = _test_regressor(SupportVectorRegression, - dataset='boston') - print predictions - print targets - self.assertAlmostEqual(-0.070779979927571235, + sparse=True) + self.assertAlmostEqual(0.0098877566961463881, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) -""" \ No newline at end of file diff --git a/tests/test_regression.py b/tests/test_regression.py index e651504580..55f45370c5 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(89, len(hyperparameters)) + self.assertEqual(98, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From e23bcc6338a3407a163f38ad5b6e8a010286b089 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 13:32:55 +0200 Subject: [PATCH 274/352] Update AdaBoostClassifier, add AdaBoostRegressor --- .../components/classification/adaboost.py | 41 ++++------ .../components/regression/adaboost.py | 82 +++++++++++++++++++ .../classification/test_adaboost.py | 8 ++ tests/components/regression/test_adaboost.py | 25 ++++++ 4 files changed, 131 insertions(+), 25 deletions(-) create mode 100644 ParamSklearn/components/regression/adaboost.py create mode 100644 tests/components/regression/test_adaboost.py diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index aea98d2129..fccb7600e7 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter, Constant + UniformIntegerHyperparameter, CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS @@ -12,20 +12,19 @@ class AdaboostClassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, n_estimators, learning_rate, algorithm='SAMME.R', - max_depth=1, random_state=None): + def __init__(self, n_estimators, learning_rate, algorithm, max_depth, + random_state=None): self.n_estimators = int(n_estimators) self.learning_rate = float(learning_rate) - - if algorithm not in ('SAMME.R', "SAMME"): - raise ValueError("Illegal 'algorithm': %s" % algorithm) self.algorithm = algorithm self.random_state = random_state self.max_depth = max_depth - self.estimator = None def fit(self, X, Y, sample_weight=None): + self.n_estimators = int(self.n_estimators) + self.learning_rate = float(self.learning_rate) + self.max_depth = int(self.max_depth) base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth) self.estimator = sklearn.ensemble.AdaBoostClassifier( @@ -64,7 +63,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE,), + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? @@ -72,24 +71,16 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - learning_rate = UniformFloatHyperparameter( - name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True) - algorithm = Constant(name="algorithm", value="SAMME.R") - #base_estimator = Constant(name="base_estimator", value="None") - - n_estimators = UniformIntegerHyperparameter( - name="n_estimators", lower=50, upper=500, default=50, log=False) - - max_depth = UniformIntegerHyperparameter( - name="max_depth", lower=1, upper=10, default=1, log=False) - - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(learning_rate) - #cs.add_hyperparameter(base_estimator) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(algorithm) + # base_estimator = Constant(name="base_estimator", value="None") + n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_estimators", lower=50, upper=500, default=50, log=False)) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) + algorithm = cs.add_hyperparameter(CategoricalHyperparameter( + name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R")) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=1, log=False)) return cs diff --git a/ParamSklearn/components/regression/adaboost.py b/ParamSklearn/components/regression/adaboost.py new file mode 100644 index 0000000000..28926fdb90 --- /dev/null +++ b/ParamSklearn/components/regression/adaboost.py @@ -0,0 +1,82 @@ +import numpy as np +import sklearn.ensemble +import sklearn.tree + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter + +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + + +class AdaboostRegressor(ParamSklearnRegressionAlgorithm): + def __init__(self, n_estimators, learning_rate, loss, max_depth, + random_state=None): + self.n_estimators = int(n_estimators) + self.learning_rate = float(learning_rate) + self.loss = loss + self.random_state = random_state + self.max_depth = max_depth + self.estimator = None + + def fit(self, X, Y): + self.n_estimators = int(self.n_estimators) + self.learning_rate = float(self.learning_rate) + self.max_depth = int(self.max_depth) + base_estimator = sklearn.tree.DecisionTreeClassifier( + max_depth=self.max_depth) + + self.estimator = sklearn.ensemble.AdaBoostRegressor( + base_estimator=base_estimator, + n_estimators=self.n_estimators, + learning_rate=self.learning_rate, + loss=self.loss, + random_state=self.random_state + ) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'AB', + 'name': 'AdaBoost Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # base_estimator = Constant(name="base_estimator", value="None") + n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_estimators", lower=50, upper=500, default=50, log=False)) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + name="loss", choices=["linear", "square", "exponential"], + default="linear")) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=1, log=False)) + return cs + diff --git a/tests/components/classification/test_adaboost.py b/tests/components/classification/test_adaboost.py index 459b92c4c4..18eb42807c 100644 --- a/tests/components/classification/test_adaboost.py +++ b/tests/components/classification/test_adaboost.py @@ -15,6 +15,14 @@ def test_default_configuration_iris(self): self.assertAlmostEqual(0.93999999999999995, sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_iris_sparse(self): + for i in range(10): + predictions, targets = \ + _test_classifier(AdaboostClassifier, sparse=True) + self.assertAlmostEqual(0.88, + sklearn.metrics.accuracy_score(predictions, + targets)) + def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ diff --git a/tests/components/regression/test_adaboost.py b/tests/components/regression/test_adaboost.py new file mode 100644 index 0000000000..eeb6448106 --- /dev/null +++ b/tests/components/regression/test_adaboost.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.regression.adaboost import \ + AdaboostRegressor +from ParamSklearn.util import _test_regressor + +import sklearn.metrics + + +class AdaBoostComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(AdaboostRegressor, dataset='boston') + self.assertAlmostEqual(0.11053868761882502, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_regressor(AdaboostRegressor, sparse=True, dataset='boston') + self.assertAlmostEqual(-0.077540100211211049, + sklearn.metrics.r2_score(targets, + predictions)) From 904bfae074d3f412d705d66b2c5708037c83cbbc Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 13:33:20 +0200 Subject: [PATCH 275/352] Update kNN, add kNN regressor --- .../classification/k_nearest_neighbors.py | 51 ++++----------- .../regression/k_nearest_neighbors.py | 64 +++++++++++++++++++ .../regression/test_k_nearest_neighbors.py | 25 ++++++++ tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 5 files changed, 104 insertions(+), 40 deletions(-) create mode 100644 ParamSklearn/components/regression/k_nearest_neighbors.py create mode 100644 tests/components/regression/test_k_nearest_neighbors.py diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index aab06ebe34..e475f69008 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -2,8 +2,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - Constant, UnParametrizedHyperparameter, UniformIntegerHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition + Constant, UniformIntegerHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS @@ -11,30 +10,18 @@ class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): - def __init__(self, n_neighbors, weights, algorithm='auto', p=2, - leaf_size=30, random_state=None): - - self.n_neighbors = int(n_neighbors) - if weights not in ("uniform", "distance"): - raise ValueError("'weights' should be in ('uniform', 'distance'): " - "%s" % weights) + def __init__(self, n_neighbors, weights, p, random_state=None): + self.n_neighbors = n_neighbors self.weights = weights - #if metric not in ("euclidean", "manhattan", "chebyshev", "minkowski"): - # raise ValueError("'metric' should be in ('euclidean', - # 'chebyshev', " - # "'manhattan', 'minkowski'): %s" % metric) - #self.metric = metric - self.algorithm = algorithm - self.p = int(p) - self.leaf_size = int(leaf_size) + self.p = p self.random_state = random_state def fit(self, X, Y): self.estimator = \ sklearn.neighbors.KNeighborsClassifier( - n_neighbors=self.n_neighbors, weights=self.weights, - p=self.p, algorithm=self.algorithm, - leaf_size=self.leaf_size) + n_neighbors=self.n_neighbors, + weights=self.weights, + p=self.p) self.estimator.fit(X, Y) return self @@ -71,25 +58,13 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_neighbors = UniformIntegerHyperparameter( - name="n_neighbors", lower=1, upper=100, default=1) - weights = CategoricalHyperparameter( - name="weights", choices=["uniform", "distance"], default="uniform") - algorithm = Constant(name='algorithm', value="auto") - p = CategoricalHyperparameter( - name="p", choices=[1, 2], default=2) - leaf_size = Constant(name="leaf_size", value=30) - cs = ConfigurationSpace() - cs.add_hyperparameter(n_neighbors) - cs.add_hyperparameter(weights) - #cs.add_hyperparameter(metric) - cs.add_hyperparameter(algorithm) - cs.add_hyperparameter(p) - cs.add_hyperparameter(leaf_size) - # Conditions - #metric_p = EqualsCondition(parent=metric, child=p, value="minkowski") - #cs.add_condition(metric_p) + n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_neighbors", lower=1, upper=100, default=1)) + weights = cs.add_hyperparameter(CategoricalHyperparameter( + name="weights", choices=["uniform", "distance"], default="uniform")) + p = cs.add_hyperparameter(CategoricalHyperparameter( + name="p", choices=[1, 2], default=2)) return cs diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/ParamSklearn/components/regression/k_nearest_neighbors.py new file mode 100644 index 0000000000..71ec0ccae1 --- /dev/null +++ b/ParamSklearn/components/regression/k_nearest_neighbors.py @@ -0,0 +1,64 @@ +import sklearn.neighbors + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + Constant, UniformIntegerHyperparameter + +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS + + +class KNearestNeighborsRegressor(ParamSklearnRegressionAlgorithm): + def __init__(self, n_neighbors, weights, p, random_state=None): + self.n_neighbors = n_neighbors + self.weights = weights + self.p = p + self.random_state = random_state + + def fit(self, X, Y): + self.estimator = \ + sklearn.neighbors.KNeighborsClassifier( + n_neighbors=self.n_neighbors, + weights=self.weights, + p=self.p) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'KNN', + 'name': 'K-Nearest Neighbor Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_neighbors", lower=1, upper=100, default=1)) + weights = cs.add_hyperparameter(CategoricalHyperparameter( + name="weights", choices=["uniform", "distance"], default="uniform")) + p = cs.add_hyperparameter(CategoricalHyperparameter( + name="p", choices=[1, 2], default=2)) + + return cs diff --git a/tests/components/regression/test_k_nearest_neighbors.py b/tests/components/regression/test_k_nearest_neighbors.py new file mode 100644 index 0000000000..eb863176cd --- /dev/null +++ b/tests/components/regression/test_k_nearest_neighbors.py @@ -0,0 +1,25 @@ +import unittest + +from ParamSklearn.components.regression.k_nearest_neighbors import \ + KNearestNeighborsRegressor +from ParamSklearn.util import _test_regressor + +import sklearn.metrics + + +class KNearestNeighborsComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(KNearestNeighborsRegressor) + self.assertAlmostEqual(0.068600456340847438, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse_data(self): + for i in range(10): + predictions, targets = \ + _test_regressor(KNearestNeighborsRegressor, sparse=True) + self.assertAlmostEqual(-0.16321841460809972, + sklearn.metrics.r2_score(targets, + predictions)) diff --git a/tests/test_classification.py b/tests/test_classification.py index 53135d9690..42fa163fc3 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(140, len(hyperparameters)) + self.assertEqual(138, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index 55f45370c5..fc6b956e6c 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(98, len(hyperparameters)) + self.assertEqual(105, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 0ce482d4100099609e00db77a2526e31b10fdf0d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 13:48:08 +0200 Subject: [PATCH 276/352] Update DecisionTree, add DecisionTree for regression --- .../classification/decision_tree.py | 76 ++++++------- .../components/regression/decision_tree.py | 100 ++++++++++++++++++ .../classification/test_decision_tree.py | 7 ++ .../regression/test_decision_tree.py | 22 ++++ tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 6 files changed, 171 insertions(+), 38 deletions(-) create mode 100644 ParamSklearn/components/regression/decision_tree.py create mode 100644 tests/components/regression/test_decision_tree.py diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index e6ae936d25..660aecaab5 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -7,37 +7,40 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE # get our own forests to replace the sklearn ones from sklearn.tree import DecisionTreeClassifier class DecisionTree(ParamSklearnClassificationAlgorithm): - def __init__(self, criterion, max_features, max_depth, - min_samples_split, min_samples_leaf, + def __init__(self, criterion, splitter, max_features, max_depth, + min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, random_state=None): self.criterion = criterion - self.max_features = float(max_features) + self.splitter = splitter + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.random_state = random_state + self.estimator = None - if max_depth == "None": + def fit(self, X, y, sample_weight=None): + self.max_features = float(self.max_features) + if self.max_depth == "None": self.max_depth = None else: - self.max_depth = max_depth - - self.min_samples_split = int(min_samples_split) - self.min_samples_leaf = int(min_samples_leaf) - - if max_leaf_nodes == "None": + num_features = X.shape[1] + max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_leaf_nodes == "None": self.max_leaf_nodes = None else: - self.max_leaf_nodes = int(max_leaf_nodes) - - self.random_state = random_state - self.estimator = None - - def fit(self, X, y, sample_weight=None): - num_features = X.shape[1] - max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) self.estimator = DecisionTreeClassifier( criterion=self.criterion, @@ -74,8 +77,8 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': True, 'is_deterministic': True, - 'handles_sparse': False, - 'input': (DENSE, ), + 'handles_sparse': True, + 'input': (DENSE, SPARSE), 'output': PREDICTIONS, # TODO find out what is best used here! # But rather fortran or C-contiguous? @@ -83,20 +86,21 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - criterion = CategoricalHyperparameter( - "criterion", ["gini", "entropy"], default="gini") - max_features = Constant('max_features', 1.0) - max_depth = UniformFloatHyperparameter('max_depth', 0., 2., default=0.5) - min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 2, 20, default=2) - min_samples_leaf = UniformIntegerHyperparameter( - "min_samples_leaf", 1, 20, default=1) - max_leaf_nodes = UnParametrizedHyperparameter("max_leaf_nodes", "None") cs = ConfigurationSpace() - cs.add_hyperparameter(criterion) - cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(max_leaf_nodes) + + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + splitter = cs.add_hyperparameter(Constant("splitter", "best")) + max_features = cs.add_hyperparameter(Constant('max_features', 1.0)) + max_depth = cs.add_hyperparameter(UniformFloatHyperparameter( + 'max_depth', 0., 2., default=0.5)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter( + Constant("min_weight_fraction_leaf", 0.0)) + max_leaf_nodes = cs.add_hyperparameter( + UnParametrizedHyperparameter("max_leaf_nodes", "None")) + return cs diff --git a/ParamSklearn/components/regression/decision_tree.py b/ParamSklearn/components/regression/decision_tree.py new file mode 100644 index 0000000000..173171746b --- /dev/null +++ b/ParamSklearn/components/regression/decision_tree.py @@ -0,0 +1,100 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from ParamSklearn.components.base import \ + ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +# get our own forests to replace the sklearn ones +from sklearn.tree import DecisionTreeRegressor + + +class DecisionTree(ParamSklearnRegressionAlgorithm): + def __init__(self, criterion, splitter, max_features, max_depth, + min_samples_split, min_samples_leaf, min_weight_fraction_leaf, + max_leaf_nodes, random_state=None): + self.criterion = criterion + self.splitter = splitter + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.random_state = random_state + self.estimator = None + + def fit(self, X, y, sample_weight=None): + self.max_features = float(self.max_features) + if self.max_depth == "None": + self.max_depth = None + else: + num_features = X.shape[1] + max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + + self.estimator = DecisionTreeRegressor( + criterion=self.criterion, + max_depth=max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state) + self.estimator.fit(X, y, sample_weight=sample_weight) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'DT', + 'name': 'Decision Tree Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + criterion = cs.add_hyperparameter(Constant('criterion', 'mse')) + splitter = cs.add_hyperparameter(Constant("splitter", "best")) + max_features = cs.add_hyperparameter(Constant('max_features', 1.0)) + max_depth = cs.add_hyperparameter(UniformFloatHyperparameter( + 'max_depth', 0., 2., default=0.5)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter( + Constant("min_weight_fraction_leaf", 0.0)) + max_leaf_nodes = cs.add_hyperparameter( + UnParametrizedHyperparameter("max_leaf_nodes", "None")) + + return cs diff --git a/tests/components/classification/test_decision_tree.py b/tests/components/classification/test_decision_tree.py index 005b340906..4b521247b8 100644 --- a/tests/components/classification/test_decision_tree.py +++ b/tests/components/classification/test_decision_tree.py @@ -15,6 +15,13 @@ def test_default_configuration(self): sklearn.metrics.accuracy_score(predictions, targets)) + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_classifier(DecisionTree, sparse=True) + self.assertAlmostEqual(0.69999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) + def test_default_configuration_predict_proba(self): for i in range(10): predictions, targets = _test_classifier_predict_proba( diff --git a/tests/components/regression/test_decision_tree.py b/tests/components/regression/test_decision_tree.py new file mode 100644 index 0000000000..36ba14a7f2 --- /dev/null +++ b/tests/components/regression/test_decision_tree.py @@ -0,0 +1,22 @@ +import unittest + +from ParamSklearn.components.regression.decision_tree import DecisionTree +from ParamSklearn.util import _test_regressor + +import sklearn.metrics + + +class DecisionTreetComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(DecisionTree,) + self.assertAlmostEqual(0.14886750572325669, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_regressor(DecisionTree, sparse=True) + self.assertAlmostEqual(0.021778487309118133, + sklearn.metrics.r2_score(targets, + predictions)) diff --git a/tests/test_classification.py b/tests/test_classification.py index 42fa163fc3..53135d9690 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(138, len(hyperparameters)) + self.assertEqual(140, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index fc6b956e6c..94f0ecd61d 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(105, len(hyperparameters)) + self.assertEqual(113, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From 264d1153130c544569fb460304c3d65a06b88aa7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 14:25:57 +0200 Subject: [PATCH 277/352] Update the list of classifiers, regressors and transformers according to sklearn 0.16 --- ParamSklearn/util.py | 3 ++- misc/classifiers.csv | 2 ++ misc/regressors.csv | 28 ++++++++++++++++------------ misc/transformers.csv | 5 +++++ 4 files changed, 25 insertions(+), 13 deletions(-) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 8e41f3bf0f..dd4427f5b6 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -46,7 +46,8 @@ def find_sklearn_classes(class_): classifiers.add(classifier) print - print classifiers + for classifier in sorted([str(cls) for cls in classifiers]): + print classifier def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): diff --git a/misc/classifiers.csv b/misc/classifiers.csv index 75b08491c3..aad2610b00 100644 --- a/misc/classifiers.csv +++ b/misc/classifiers.csv @@ -1,5 +1,6 @@ class,added,comment ,False,Mixin class which adds no functionality except the score function +,, ,False,Outlier detection ,False,Please read the module name ,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. @@ -11,6 +12,7 @@ class,added,comment ,True, ,False,Mixin but no full model ,False,Is implemented using LibLinear +,, ,True,I don't know how similar to SGD this one is ,False,n fact, Perceptron() is equivalent to SGDClassifier(...) ,True, diff --git a/misc/regressors.csv b/misc/regressors.csv index e25c528686..f31502b56f 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -1,5 +1,6 @@ class,added,comment ,False,BaseClass +,, ,False,Is a preprocessing method ,False,Is a preprocessing method ,False,Is a preprocessing method @@ -7,12 +8,13 @@ class,added,comment ,False,See module name ,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. ,True, -,, +,False,Is a base class ,True, ,True, -,, -,,Crashes when getting two similar inputs -,, +,True, +,True,Crashes when getting two similar inputs +,False,Calibration instead of prediction method +,, ,, ,, ,, @@ -33,14 +35,16 @@ class,added,comment ,False,We want to perform CV ourselves ,, ,, -,True,Check range for alpha +,True, ,False,We want to perform CV ourselves -,, +,False, ,, ,FALSE,This regressor is inside a test module -,,Crashes when predicting a training input and weighted distances -,,Can crash when there is no neighbour within the radius -,, -,True,Check searchspace -,FALSE,Rfs are considered better (and are most likely faster to train) -,FALSE,ExtraTreeForests are considered better +,, +,True, +,False,Can crash when there is no neighbour within the radius +,True, +,False,Mathematical idental to SVR +,True, +,TRUE, +,FALSE,Not to be used as a base regressor diff --git a/misc/transformers.csv b/misc/transformers.csv index 8bd506b5bf..422bd67484 100644 --- a/misc/transformers.csv +++ b/misc/transformers.csv @@ -1,6 +1,7 @@ class,added,comment ,FALSE,BaseClass ,FALSE,Mixin class for feature agglomeration. +,, ,TRUE,try out ,FALSE,deprecated ,FALSE,We expect this to not work better than PCA @@ -10,12 +11,14 @@ class,added,comment ,FALSE,Regression only ,FALSE,Regression only ,FALSE,Regression only +,, ,TRUE,try out ,FALSE, ,FALSE,What is the difference to Tobis implementation? ,FALSE,Mixin class for sparse coding ,FALSE, ,TRUE,try out +,, ,TRUE,try out ,FALSE,Special case of sparse coding ,FALSE,see above @@ -59,6 +62,7 @@ class,added,comment ,FALSE,very specia case ,FALSE,Special case of GEM ,FALSE,same as LibLinear +,, ,FALSE,same as SGD ,FALSE,Base class ,FALSE,not right now @@ -69,6 +73,7 @@ class,added,comment ,FALSE,test class ,FALSE,only look at if clustering helps ,FALSE,only look at if clustering helps +,, ,FALSE,handles only binary input ,FALSE,"Right now, we do not have different feature sources." ,FALSE,"Right now, we have no need to binarize data" From b415da9acc4bf95960aaaa16c414be120e6c8ffc Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 14:26:39 +0200 Subject: [PATCH 278/352] Update LibLinear_SVC, add LibLinear_SVR --- .../classification/liblinear_svc.py | 48 +++++----- .../components/regression/liblinear_svr.py | 92 +++++++++++++++++++ ...ort_vector_regression.py => libsvm_svr.py} | 2 +- tests/components/regression/liblinear_svr.py | 17 ++++ .../test_support_vector_regression.py | 6 +- tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 7 files changed, 136 insertions(+), 33 deletions(-) create mode 100644 ParamSklearn/components/regression/liblinear_svr.py rename ParamSklearn/components/regression/{support_vector_regression.py => libsvm_svr.py} (98%) create mode 100644 tests/components/regression/liblinear_svr.py diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index e47fd255ec..458b9379b3 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -13,7 +13,6 @@ class LibLinear_SVC(ParamSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside - # TODO: maybe add dual and crammer-singer? def __init__(self, penalty, loss, dual, tol, C, multi_class, fit_intercept, intercept_scaling, class_weight=None, random_state=None): @@ -46,6 +45,9 @@ def fit(self, X, Y): tol=self.tol, C=self.C, class_weight=self.class_weight, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + multi_class=self.multi_class, random_state=self.random_state) self.estimator.fit(X, Y) return self @@ -77,47 +79,39 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': False, - # TODO find out of this is right! - # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (SPARSE, DENSE), 'output': PREDICTIONS, - # TODO find out what is best used here! 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], - default="l2") - loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") - dual = Constant("dual", "False") - # This is set ad-how - tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, - log=True) - C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, - default=1.0) - multi_class = UnParametrizedHyperparameter("multi_class", "ovr") + cs = ConfigurationSpace() + + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2"], default="l2")) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["hinge", "squared_hinge"], default="squared_hinge")) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # This is set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr")) # These are set ad-hoc - fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") - intercept_scaling = UnParametrizedHyperparameter("intercept_scaling", 1) + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) - cs = ConfigurationSpace() - cs.add_hyperparameter(penalty) - cs.add_hyperparameter(loss) - cs.add_hyperparameter(dual) - cs.add_hyperparameter(tol) - cs.add_hyperparameter(C) - cs.add_hyperparameter(multi_class) - cs.add_hyperparameter(fit_intercept) - cs.add_hyperparameter(intercept_scaling) penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(penalty, "l1"), - ForbiddenEqualsClause(loss, "l1") + ForbiddenEqualsClause(loss, "hinge") ) constant_penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(dual, "False"), ForbiddenEqualsClause(penalty, "l2"), - ForbiddenEqualsClause(loss, "l1") + ForbiddenEqualsClause(loss, "hinge") ) penalty_and_dual = ForbiddenAndConjunction( ForbiddenEqualsClause(dual, "False"), diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py new file mode 100644 index 0000000000..7f8c6aa8df --- /dev/null +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -0,0 +1,92 @@ +import sklearn.svm + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant, UnParametrizedHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.implementations.util import softmax +from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS + + +class LibLinear_SVR(ParamSklearnRegressionAlgorithm): + # Liblinear is not deterministic as it uses a RNG inside + def __init__(self, loss, epsilon, dual, tol, C, fit_intercept, + intercept_scaling, random_state=None): + self.epsilon = epsilon + self.loss = loss + self.dual = dual + self.tol = tol + self.C = C + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + self.C = float(self.C) + self.tol = float(self.tol) + self.epsilon = float(self.epsilon) + + self.dual = bool(self.dual) + self.fit_intercept = bool(self.fit_intercept) + self.intercept_scaling = float(self.intercept_scaling) + + self.estimator = sklearn.svm.LinearSVR(epsilon=self.epsilon, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + random_state=self.random_state) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + @staticmethod + def get_properties(): + return {'shortname': 'Liblinear-SVR', + 'name': 'Liblinear Support Vector Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (SPARSE, DENSE), + 'output': PREDICTIONS, + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"], + default="epsilon_insensitive")) + # Random Guess + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + name="epsilon", lower=0.001, upper=1, default=0.1, log=True)) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # These are set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) + + return cs diff --git a/ParamSklearn/components/regression/support_vector_regression.py b/ParamSklearn/components/regression/libsvm_svr.py similarity index 98% rename from ParamSklearn/components/regression/support_vector_regression.py rename to ParamSklearn/components/regression/libsvm_svr.py index e7bc954df9..c7deeeeb32 100644 --- a/ParamSklearn/components/regression/support_vector_regression.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -12,7 +12,7 @@ from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS -class SupportVectorRegression(ParamSklearnRegressionAlgorithm): +class LibSVM_SVR(ParamSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, degree=3, coef0=0.0, cache_size=2000, verbose=False, max_iter=-1, random_state=None): diff --git a/tests/components/regression/liblinear_svr.py b/tests/components/regression/liblinear_svr.py new file mode 100644 index 0000000000..d52a42aff8 --- /dev/null +++ b/tests/components/regression/liblinear_svr.py @@ -0,0 +1,17 @@ +import unittest + +from ParamSklearn.components.regression.liblinear_svr import \ + LibLinear_SVR +from ParamSklearn.util import _test_regressor + +import sklearn.metrics + + +class SupportVectorComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(LibLinear_SVR, + dataset='boston') + self.assertAlmostEqual(0.54372712745256768, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) diff --git a/tests/components/regression/test_support_vector_regression.py b/tests/components/regression/test_support_vector_regression.py index 5dbb035255..52018824e3 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/tests/components/regression/test_support_vector_regression.py @@ -1,6 +1,6 @@ import unittest -from ParamSklearn.components.regression.support_vector_regression import SupportVectorRegression +from ParamSklearn.components.regression.libsvm_svr import LibSVM_SVR from ParamSklearn.util import _test_regressor @@ -11,13 +11,13 @@ class SupportVectorComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_regressor(SupportVectorRegression) + predictions, targets = _test_regressor(LibSVM_SVR) self.assertAlmostEqual(0.12849591861430087, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) def test_default_configuration_sparse(self): for i in range(10): - predictions, targets = _test_regressor(SupportVectorRegression, + predictions, targets = _test_regressor(LibSVM_SVR, sparse=True) self.assertAlmostEqual(0.0098877566961463881, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_classification.py b/tests/test_classification.py index 53135d9690..e51b5a57a5 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -265,7 +265,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " classifier:liblinear_svc:dual, Constant: False\n" " classifier:liblinear_svc:fit_intercept, Constant: True\n" " classifier:liblinear_svc:intercept_scaling, Constant: 1\n" - " classifier:liblinear_svc:loss, Value: l2\n" + " classifier:liblinear_svc:loss, Value: squared_hinge\n" " classifier:liblinear_svc:multi_class, Constant: ovr\n" " classifier:liblinear_svc:penalty, Value: l2\n" " classifier:liblinear_svc:tol, Value: 0.0001\n" diff --git a/tests/test_regression.py b/tests/test_regression.py index 94f0ecd61d..1e327bbf54 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(113, len(hyperparameters)) + self.assertEqual(120, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From be9ba5910e4ef21b21650d9e961af5b22db4bafa Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 14:35:29 +0200 Subject: [PATCH 279/352] LibSVM_SVC: calibration no longer necessary for predict_proba with sparse data --- .../components/classification/libsvm_svc.py | 32 ++++++++++++------- .../classification/test_libsvm_svc.py | 10 +++++- 2 files changed, 29 insertions(+), 13 deletions(-) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index ece46fa699..3f5adb8f00 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -8,6 +8,7 @@ from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax class LibSVM_SVC(ParamSklearnClassificationAlgorithm): @@ -27,17 +28,21 @@ def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, def fit(self, X, Y): self.C = float(self.C) - self.degree = int(self.degree) - self.gamma = float(self.gamma) - self.coef0 = float(self.coef0) + if self.degree is None: + self.degree = 3 + else: + self.degree = int(self.degree) + if self.gamma is None: + self.gamma = 0.0 + else: + self.gamma = float(self.gamma) + if self.coef0 is None: + self.coef0 = 0.0 + else: + self.coef0 = float(self.coef0) self.tol = float(self.tol) self.max_iter = float(self.max_iter) - - try: - self.shrinking = bool(self.shrinking) - except TypeError as e: - raise TypeError("Value %s not allowed for hyperparameter " - "shrinking" % str(self.shrinking)) + self.shrinking = bool(self.shrinking) if self.class_weight == "None": self.class_weight = None @@ -52,8 +57,8 @@ def fit(self, X, Y): class_weight=self.class_weight, max_iter=self.max_iter, random_state=self.random_state, - cache_size=2000, - probability=True) + cache_size=2000) + # probability=True) self.estimator.fit(X, Y) return self @@ -65,7 +70,10 @@ def predict(self, X): def predict_proba(self, X): if self.estimator is None: raise NotImplementedError() - return self.estimator.predict_proba(X) + # return self.estimator.predict_proba(X) + decision = self.estimator.decision_function(X) + return softmax(decision) + @staticmethod def get_properties(): diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index 0f936cddbe..247ab2166d 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC -from ParamSklearn.util import _test_classifier +from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba import sklearn.metrics @@ -12,3 +12,11 @@ def test_default_configuration(self): predictions, targets = _test_classifier(LibSVM_SVC, dataset='iris') self.assertAlmostEqual(0.96, sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + LibSVM_SVC, sparse=True) + self.assertAlmostEqual(1.3028778322629093, + sklearn.metrics.log_loss(targets, + predictions)) From 03cc78a8beb1f3e8b2bd29c9ba5063ba81955336 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 17:29:29 +0200 Subject: [PATCH 280/352] Update SGD and add SGD for regression --- ParamSklearn/components/classification/sgd.py | 65 +++---- ParamSklearn/components/regression/sgd.py | 161 ++++++++++++++++++ tests/components/classification/test_sgd.py | 6 +- tests/components/regression/test_sgd.py | 39 +++++ tests/test_classification.py | 2 +- tests/test_regression.py | 2 +- 6 files changed, 238 insertions(+), 37 deletions(-) create mode 100644 ParamSklearn/components/regression/sgd.py create mode 100644 tests/components/regression/test_sgd.py diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index b8f1ed0431..3677deaeaa 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -14,7 +14,7 @@ class SGD(ParamSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, learning_rate, class_weight=None, l1_ratio=0.15, epsilon=0.1, - eta0=0.01, power_t=0.5, random_state=None): + eta0=0.01, power_t=0.5, average=False, random_state=None): self.loss = loss self.penalty = penalty self.alpha = alpha @@ -27,6 +27,7 @@ def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, self.eta0 = eta0 self.power_t = power_t self.random_state = random_state + self.average = average self.estimator = None def fit(self, X, y): @@ -41,15 +42,15 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): if self.estimator is None: self.alpha = float(self.alpha) - self.fit_intercept = bool(self.fit_intercept) + self.fit_intercept = self.fit_intercept == 'True' self.n_iter = int(self.n_iter) if self.class_weight == "None": self.class_weight = None - self.l1_ratio = float(self.l1_ratio) - self.epsilon = float(self.epsilon) + self.l1_ratio = float(self.l1_ratio) if self.l1_ratio is not None else 0.15 + self.epsilon = float(self.epsilon) if self.epsilon is not None else 0.1 self.eta0 = float(self.eta0) - self.power_t = float(self.power_t) - + self.power_t = float(self.power_t) if self.power_t is not None else 0.25 + self.average = self.average == 'True' self.estimator = SGDClassifier(loss=self.loss, penalty=self.penalty, alpha=self.alpha, @@ -62,6 +63,7 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): eta0=self.eta0, power_t=self.power_t, shuffle=True, + average=self.average, random_state=self.random_state) self.estimator.n_iter += n_iter @@ -110,33 +112,32 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - loss = CategoricalHyperparameter("loss", - ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], - default="hinge") - penalty = CategoricalHyperparameter("penalty", ["l1", "l2", "elasticnet"], - default="l2") - alpha = UniformFloatHyperparameter("alpha", 10**-7, 10**-1, - log=True, default=0.0001) - l1_ratio = UniformFloatHyperparameter("l1_ratio", 0, 1, default=0.15) - fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") - n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20) - epsilon = UniformFloatHyperparameter("epsilon", 1e-5, 1e-1, - default=1e-4, log=True) - learning_rate = CategoricalHyperparameter("learning_rate", - ["optimal", "invscaling", "constant"], default="optimal") - eta0 = UniformFloatHyperparameter("eta0", 10**-7, 0.1, default=0.01) - power_t = UniformFloatHyperparameter("power_t", 1e-5, 1, default=0.5) cs = ConfigurationSpace() - cs.add_hyperparameter(loss) - cs.add_hyperparameter(penalty) - cs.add_hyperparameter(alpha) - cs.add_hyperparameter(l1_ratio) - cs.add_hyperparameter(fit_intercept) - cs.add_hyperparameter(n_iter) - cs.add_hyperparameter(epsilon) - cs.add_hyperparameter(learning_rate) - cs.add_hyperparameter(eta0) - cs.add_hyperparameter(power_t) + + loss = cs.add_hyperparameter(CategoricalHyperparameter("loss", + ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], + default="hinge")) + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2", "elasticnet"], default="l2")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10e-7, 1e-1, log=True, default=0.0001)) + l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( + "l1_ratio", 0, 1, default=0.15)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_iter", 5, 1000, default=20)) + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) + learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( + "learning_rate", ["optimal", "invscaling", "constant"], + default="optimal")) + eta0 = cs.add_hyperparameter(UniformFloatHyperparameter( + "eta0", 10**-7, 0.1, default=0.01)) + power_t = cs.add_hyperparameter(UniformFloatHyperparameter( + "power_t", 1e-5, 1, default=0.25)) + average = cs.add_hyperparameter(CategoricalHyperparameter( + "average", ["False", "True"], default="False")) # TODO add passive/aggressive here, although not properly documented? elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py new file mode 100644 index 0000000000..525b24d23f --- /dev/null +++ b/ParamSklearn/components/regression/sgd.py @@ -0,0 +1,161 @@ +from sklearn.linear_model.stochastic_gradient import SGDRegressor +import sklearn.preprocessing + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import InCondition, EqualsCondition + +from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm +from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.implementations.util import softmax + + +class SGD(ParamSklearnRegressionAlgorithm): + def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, + learning_rate, l1_ratio=0.15, epsilon=0.1, + eta0=0.01, power_t=0.5, average=False, random_state=None): + self.loss = loss + self.penalty = penalty + self.alpha = alpha + self.fit_intercept = fit_intercept + self.n_iter = n_iter + self.learning_rate = learning_rate + self.l1_ratio = l1_ratio + self.epsilon = epsilon + self.eta0 = eta0 + self.power_t = power_t + self.random_state = random_state + self.average = average + + self.estimator = None + self.scaler = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + if refit: + self.estimator = None + self.scaler = None + + if self.estimator is None: + self.alpha = float(self.alpha) + self.fit_intercept = self.fit_intercept == 'True' + self.n_iter = int(self.n_iter) + self.l1_ratio = float( + self.l1_ratio) if self.l1_ratio is not None else 0.15 + self.epsilon = float( + self.epsilon) if self.epsilon is not None else 0.1 + self.eta0 = float(self.eta0) + self.power_t = float( + self.power_t) if self.power_t is not None else 0.25 + self.average = self.average == 'True' + self.estimator = SGDRegressor(loss=self.loss, + penalty=self.penalty, + alpha=self.alpha, + fit_intercept=self.fit_intercept, + n_iter=self.n_iter, + learning_rate=self.learning_rate, + l1_ratio=self.l1_ratio, + epsilon=self.epsilon, + eta0=self.eta0, + power_t=self.power_t, + shuffle=True, + average=self.average, + random_state=self.random_state) + + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) + self.scaler.fit(y) + + Y_scaled = self.scaler.transform(y) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, Y_scaled) + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + Y_pred = self.estimator.predict(X) + return self.scaler.inverse_transform(Y_pred) + + @staticmethod + def get_properties(): + return {'shortname': 'SGD Regressor', + 'name': 'Stochastic Gradient Descent Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE), + 'output': PREDICTIONS, + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + loss = cs.add_hyperparameter(CategoricalHyperparameter("loss", + ["squared_loss", "huber", "epsilon_insensitive", "squared_epsilon_insensitive"], + default="squared_loss")) + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2", "elasticnet"], default="l2")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10e-7, 1e-1, log=True, default=0.01)) + l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( + "l1_ratio", 0., 1., default=0.15)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_iter", 5, 1000, default=20)) + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) + learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( + "learning_rate", ["optimal", "invscaling", "constant"], + default="optimal")) + eta0 = cs.add_hyperparameter(UniformFloatHyperparameter( + "eta0", 10 ** -7, 0.1, default=0.01)) + power_t = cs.add_hyperparameter(UniformFloatHyperparameter( + "power_t", 1e-5, 1, default=0.5)) + average = cs.add_hyperparameter(CategoricalHyperparameter( + "average", ["False", "True"], default="False")) + + # TODO add passive/aggressive here, although not properly documented? + elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") + epsilon_condition = InCondition(epsilon, loss, + ["huber", "epsilon_insensitive", "squared_epsilon_insensitive"]) + # eta0 seems to be always active according to the source code; when + # learning_rate is set to optimial, eta0 is the starting value: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx + # eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling") + #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant") + #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant) + power_t_condition = EqualsCondition(power_t, learning_rate, + "invscaling") + + cs.add_condition(elasticnet) + cs.add_condition(epsilon_condition) + cs.add_condition(power_t_condition) + + return cs + + def __str__(self): + return "ParamSklearn StochasticGradientClassifier" diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py index 43c0acc00f..8310b742ff 100644 --- a/tests/components/classification/test_sgd.py +++ b/tests/components/classification/test_sgd.py @@ -9,7 +9,7 @@ class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): - predictions, targets = _test_classifier(SGD, dataset='iris') + predictions, targets = _test_classifier(SGD) self.assertAlmostEqual(1.0, sklearn.metrics.accuracy_score(predictions, targets)) @@ -25,7 +25,7 @@ def test_default_configuration_iterative_fit(self): def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ - _test_classifier(classifier=SGD, dataset='digits') + _test_classifier(SGD, dataset='digits') self.assertAlmostEqual(0.89313904068002425, sklearn.metrics.accuracy_score(predictions, targets)) @@ -33,7 +33,7 @@ def test_default_configuration_digits(self): def test_default_configuration_digits_iterative_fit(self): for i in range(10): predictions, targets = _test_classifier_iterative_fit( - classifier=SGD, + SGD, dataset='digits') self.assertAlmostEqual(0.89313904068002425, sklearn.metrics.accuracy_score( diff --git a/tests/components/regression/test_sgd.py b/tests/components/regression/test_sgd.py new file mode 100644 index 0000000000..4bbe70dacd --- /dev/null +++ b/tests/components/regression/test_sgd.py @@ -0,0 +1,39 @@ +import unittest + +from ParamSklearn.components.regression.sgd import SGD +from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class SGDComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(SGD) + print predictions + print targets + self.assertAlmostEqual(0.092460881802630235, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_regressor_iterative_fit(SGD) + self.assertAlmostEqual(0.092460881802630235, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = _test_regressor(SGD, dataset='boston') + self.assertAlmostEqual(-2.9165866511775519e+31, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) + + def test_default_configuration_digits_iterative_fit(self): + for i in range(10): + predictions, targets = _test_regressor_iterative_fit(SGD, + dataset='boston') + self.assertAlmostEqual(-2.9165866511775519e+31, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) \ No newline at end of file diff --git a/tests/test_classification.py b/tests/test_classification.py index e51b5a57a5..0a0c87457c 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(140, len(hyperparameters)) + self.assertEqual(141, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_regression.py b/tests/test_regression.py index 1e327bbf54..cc8d49f432 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(120, len(hyperparameters)) + self.assertEqual(131, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From fc80312e4fc0032326b6dff799a0df738b354a3f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 23 Jul 2015 17:30:07 +0200 Subject: [PATCH 281/352] FIX wrong casting to bool --- .../classification/liblinear_svc.py | 4 +-- .../components/classification/libsvm_svc.py | 2 +- .../classification/passive_aggresive.py | 2 +- .../components/classification/ridge.py | 2 +- .../components/preprocessing/fast_ica.py | 35 +++++++++---------- .../liblinear_svc_preprocessor.py | 4 +-- .../components/preprocessing/polynomial.py | 4 +-- .../components/regression/liblinear_svr.py | 4 +-- .../components/regression/libsvm_svr.py | 2 +- .../components/regression/ridge_regression.py | 2 +- .../components/preprocessing/test_fast_ica.py | 16 ++++----- 11 files changed, 38 insertions(+), 39 deletions(-) diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index 458b9379b3..57c90fe2e4 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -32,8 +32,8 @@ def fit(self, X, Y): self.C = float(self.C) self.tol = float(self.tol) - self.dual = bool(self.dual) - self.fit_intercept = bool(self.fit_intercept) + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' self.intercept_scaling = float(self.intercept_scaling) if self.class_weight == "None": diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 3f5adb8f00..27eb14aa2c 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -42,7 +42,7 @@ def fit(self, X, Y): self.coef0 = float(self.coef0) self.tol = float(self.tol) self.max_iter = float(self.max_iter) - self.shrinking = bool(self.shrinking) + self.shrinking = self.shrinking == 'True' if self.class_weight == "None": self.class_weight = None diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index 9f91ffdcdb..a010de8047 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -15,7 +15,7 @@ class PassiveAggressive(ParamSklearnClassificationAlgorithm): def __init__(self, C, fit_intercept, n_iter, loss, random_state=None): self.C = float(C) - self.fit_intercept = bool(fit_intercept) + self.fit_intercept = fit_intercept == 'True' self.n_iter = int(n_iter) self.loss = loss self.random_state = random_state diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index 6b758b4635..b37ddf7401 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -16,7 +16,7 @@ class Ridge(ParamSklearnClassificationAlgorithm): def __init__(self, alpha, fit_intercept, tol, class_weight=None, random_state=None): self.alpha = float(alpha) - self.fit_intercept = bool(fit_intercept) + self.fit_intercept = fit_intercept == 'True' self.tol = float(tol) self.class_weight = class_weight self.random_state = random_state diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py index fe9b317bb9..c0077edde2 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -4,9 +4,8 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ - UniformIntegerHyperparameter, UniformFloatHyperparameter -from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ - ForbiddenAndConjunction, ForbiddenEqualsClause + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm @@ -16,11 +15,11 @@ class FastICA(ParamSklearnPreprocessingAlgorithm): - def __init__(self, n_components, algorithm, whiten, fun, + def __init__(self, algorithm, whiten, fun, n_components=None, random_state=None): - self.n_components = int(n_components) + self.n_components = None if n_components is None else int(n_components) self.algorithm = algorithm - self.whiten = bool(whiten) + self.whiten = whiten == 'True' self.fun = fun self.random_state = random_state @@ -62,19 +61,19 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_components = UniformIntegerHyperparameter( - "n_components", 10, 2000, default=100) - algorithm = CategoricalHyperparameter('algorithm', - ['parallel', 'deflation'], 'parallel') - whiten = CategoricalHyperparameter('whiten', - ['False', 'True'], 'False') - fun = CategoricalHyperparameter('fun', ['logcosh', 'exp', 'cube'], - 'logcosh') cs = ConfigurationSpace() - cs.add_hyperparameter(n_components) - cs.add_hyperparameter(algorithm) - cs.add_hyperparameter(whiten) - cs.add_hyperparameter(fun) + + n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_components", 10, 2000, default=100)) + algorithm = cs.add_hyperparameter(CategoricalHyperparameter('algorithm', + ['parallel', 'deflation'], 'parallel')) + whiten = cs.add_hyperparameter(CategoricalHyperparameter('whiten', + ['False', 'True'], 'False')) + fun = cs.add_hyperparameter(CategoricalHyperparameter( + 'fun', ['logcosh', 'exp', 'cube'], 'logcosh')) + + cs.add_condition(EqualsCondition(n_components, whiten, "True")) + return cs diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index d1a0512acb..3165d770cb 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -31,8 +31,8 @@ def fit(self, X, Y): self.C = float(self.C) self.tol = float(self.tol) - self.dual = bool(self.dual) - self.fit_intercept = bool(self.fit_intercept) + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' self.intercept_scaling = float(self.intercept_scaling) if self.class_weight == "None": diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py index e60e6ba8f9..a40a942a0c 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -16,8 +16,8 @@ class PolynomialFeatures(ParamSklearnPreprocessingAlgorithm): def __init__(self, degree, interaction_only, include_bias, random_state=None): self.degree = int(degree) - self.interaction_only = bool(interaction_only) - self.include_bias = bool(include_bias) + self.interaction_only = interaction_only == 'True' + self.include_bias = include_bias == 'True' self.random_state = random_state self.preprocessor = None diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py index 7f8c6aa8df..10ef780936 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -30,8 +30,8 @@ def fit(self, X, Y): self.tol = float(self.tol) self.epsilon = float(self.epsilon) - self.dual = bool(self.dual) - self.fit_intercept = bool(self.fit_intercept) + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' self.intercept_scaling = float(self.intercept_scaling) self.estimator = sklearn.svm.LinearSVR(epsilon=self.epsilon, diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index c7deeeeb32..02df3c1575 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -34,7 +34,7 @@ def fit(self, X, Y): self.C = float(self.C) self.epsilon = float(self.epsilon) self.tol = float(self.tol) - self.shrinking = bool(self.shrinking) + self.shrinking = self.shrinking == 'True' self.degree = int(self.degree) self.gamma = float(self.gamma) if self.coef0 is None: diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index f7e2d190a6..8f2b91723c 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -12,7 +12,7 @@ class RidgeRegression(ParamSklearnRegressionAlgorithm): def __init__(self, alpha, fit_intercept, tol, random_state=None): self.alpha = float(alpha) - self.fit_intercept = bool(fit_intercept) + self.fit_intercept = fit_intercept == 'True' self.tol = float(tol) self.random_state = random_state self.estimator = None diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/preprocessing/test_fast_ica.py index bf15bb0d31..c71bf5967b 100644 --- a/tests/components/preprocessing/test_fast_ica.py +++ b/tests/components/preprocessing/test_fast_ica.py @@ -1,6 +1,6 @@ import unittest -from sklearn.linear_model import RidgeClassifier +from sklearn.linear_model import Ridge from ParamSklearn.components.preprocessing.fast_ica import \ FastICA from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ @@ -10,14 +10,14 @@ class FastICAComponentTest(PreprocessingTestCase): def test_default_configuration(self): - transformation, original = _test_preprocessing(FastICA) + transformation, original = _test_preprocessing(FastICA, + dataset="diabetes") self.assertEqual(transformation.shape[0], original.shape[0]) self.assertFalse((transformation == 0).all()) - def test_default_configuration_classify(self): + def test_default_configuration_regression(self): for i in range(5): - X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris', - make_sparse=False) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') configuration_space = FastICA.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() preprocessor = FastICA(random_state=1, @@ -28,11 +28,11 @@ def test_default_configuration_classify(self): X_test_trans = preprocessor.transform(X_test) # fit a classifier on top - classifier = RidgeClassifier() + classifier = Ridge() predictor = classifier.fit(X_train_trans, Y_train) predictions = predictor.predict(X_test_trans) - accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertAlmostEqual(accuracy, 0.90000000000000002) + accuracy = sklearn.metrics.r2_score(Y_test, predictions) + self.assertAlmostEqual(accuracy, 0.32614416980439365) @unittest.skip("Always returns float64") def test_preprocessing_dtype(self): From d751eafdbc175d8009fd0dea5d4fab3f722e366c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 28 Jul 2015 11:30:49 +0200 Subject: [PATCH 282/352] Update all classifiers to sklearn 0.16 --- .../classification/decision_tree.py | 4 +- .../components/classification/extra_trees.py | 5 +- ParamSklearn/components/classification/lda.py | 46 ++++++++++++++----- .../classification/multinomial_nb.py | 6 +-- ParamSklearn/components/classification/qda.py | 4 +- .../classification/random_forest.py | 4 +- .../components/classification/ridge.py | 11 +++-- .../components/preprocessing/balancing.py | 5 +- misc/regressors.csv | 18 ++++---- .../preprocessing/test_balancing.py | 6 ++- tests/test_classification.py | 2 +- 11 files changed, 73 insertions(+), 38 deletions(-) diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index 660aecaab5..a1d7b33ce1 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -15,7 +15,7 @@ class DecisionTree(ParamSklearnClassificationAlgorithm): def __init__(self, criterion, splitter, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, - max_leaf_nodes, random_state=None): + max_leaf_nodes, class_weight=None, random_state=None): self.criterion = criterion self.splitter = splitter self.max_features = max_features @@ -25,6 +25,7 @@ def __init__(self, criterion, splitter, max_features, max_depth, self.max_leaf_nodes = max_leaf_nodes self.min_weight_fraction_leaf = min_weight_fraction_leaf self.random_state = random_state + self.class_weight = class_weight self.estimator = None def fit(self, X, y, sample_weight=None): @@ -48,6 +49,7 @@ def fit(self, X, y, sample_weight=None): min_samples_split=self.min_samples_split, min_samples_leaf=self.min_samples_leaf, max_leaf_nodes=self.max_leaf_nodes, + class_weight=self.class_weight, random_state=self.random_state) self.estimator.fit(X, y, sample_weight=sample_weight) return self diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index f6c7143f9f..1f8b6c2cbc 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -16,7 +16,8 @@ class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", bootstrap=False, max_leaf_nodes=None, max_depth="None", - oob_score=False, n_jobs=1, random_state=None, verbose=0): + oob_score=False, n_jobs=1, random_state=None, verbose=0, + class_weight=None): self.n_estimators = int(n_estimators) self.estimator_increment = 10 @@ -56,6 +57,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.n_jobs = int(n_jobs) self.random_state = random_state self.verbose = int(verbose) + self.class_weight = class_weight self.estimator = None def fit(self, X, y, sample_weight=None, refit=False): @@ -84,6 +86,7 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, random_state=self.random_state, + class_weight=self.class_weight, warm_start=True ) diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index a8dbab4b21..e15d501817 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -2,7 +2,8 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - UniformIntegerHyperparameter + UniformIntegerHyperparameter, CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm @@ -12,14 +13,33 @@ class LDA(ParamSklearnClassificationAlgorithm): - def __init__(self, n_components, tol, random_state=None): - self.n_components = int(n_components) - self.tol = float(tol) + def __init__(self, shrinkage, n_components, tol, shrinkage_factor=0.5, + random_state=None): + self.shrinkage = shrinkage + self.n_components = n_components + self.tol = tol + self.shrinkage_factor = shrinkage_factor self.estimator = None def fit(self, X, Y): + if self.shrinkage == "None": + self.shrinkage = None + solver = 'svd' + elif self.shrinkage == "auto": + solver = 'lsqr' + elif self.shrinkage == "manual": + self.shrinkage = float(self.shrinkage_factor) + solver = 'lsqr' + else: + raise ValueError(self.shrinkage) - self.estimator = sklearn.lda.LDA(n_components=self.n_components) + self.n_components = int(self.n_components) + self.tol = float(self.tol) + + self.estimator = sklearn.lda.LDA(n_components=self.n_components, + shrinkage=self.shrinkage, + tol=self.tol, + solver=solver) self.estimator.fit(X, Y, tol=self.tol) return self @@ -58,11 +78,15 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_components = UniformIntegerHyperparameter('n_components', 1, 250, - default=10) - tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, - log=True) cs = ConfigurationSpace() - cs.add_hyperparameter(n_components) - cs.add_hyperparameter(tol) + shrinkage = cs.add_hyperparameter(CategoricalHyperparameter( + "shrinkage", ["None", "auto", "manual"], default="None")) + shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter( + "shrinkage_factor", 0., 1., 0.5)) + n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( + 'n_components', 1, 250, default=10)) + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + + cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual")) return cs diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 243668ae45..58a42f6698 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -103,9 +103,9 @@ def get_hyperparameter_search_space(dataset_properties=None): alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, default=1, log=True) - fit_prior = CategoricalHyperparameter( name="fit_prior", - choices=["True", "False"], - default="True") + fit_prior = CategoricalHyperparameter(name="fit_prior", + choices=["True", "False"], + default="True") cs.add_hyperparameter(alpha) cs.add_hyperparameter(fit_prior) diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index 8bbeb9472f..1faa372463 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -56,11 +56,11 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_components = UniformFloatHyperparameter('reg_param', 0.0, 10.0, + reg_param = UniformFloatHyperparameter('reg_param', 0.0, 10.0, default=0.5) tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, log=True) cs = ConfigurationSpace() - cs.add_hyperparameter(n_components) + cs.add_hyperparameter(reg_param) cs.add_hyperparameter(tol) return cs diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 9b214f5268..21f893d446 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -16,7 +16,7 @@ class RandomForest(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, bootstrap, max_leaf_nodes, - random_state=None, n_jobs=1): + random_state=None, n_jobs=1, class_weight=None): self.n_estimators = n_estimators self.estimator_increment = 10 self.criterion = criterion @@ -29,6 +29,7 @@ def __init__(self, n_estimators, criterion, max_features, self.max_leaf_nodes = max_leaf_nodes self.random_state = random_state self.n_jobs = n_jobs + self.class_weight = class_weight self.estimator = None def fit(self, X, y, sample_weight=None, refit=False): @@ -80,6 +81,7 @@ def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): max_leaf_nodes=self.max_leaf_nodes, random_state=self.random_state, n_jobs=self.n_jobs, + class_weight=self.class_weight, warm_start=True) tmp = self.estimator diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index b37ddf7401..aad69d7e47 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -24,11 +24,12 @@ def __init__(self, alpha, fit_intercept, tol, class_weight=None, def fit(self, X, Y): self.estimator = RidgeClassifier(alpha=self.alpha, - fit_intercept=self.fit_intercept, - tol=self.tol, - class_weight=self.class_weight, - copy_X=False, - normalize=False) + fit_intercept=self.fit_intercept, + tol=self.tol, + class_weight=self.class_weight, + copy_X=False, + normalize=False, + solver='auto') self.estimator.fit(X, Y) return self diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py index f9434e7a5b..ee0d087d4d 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -29,7 +29,7 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # We can have adaboost in here, because in the fit method, # the sample weights are normalized: # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 - clf_ = ['adaboost', 'decision_tree', 'extra_trees', 'random_forest'] + clf_ = ['adaboost', 'gradient_boosting'] pre_ = ['extra_trees_preproc_for_classification'] if classifier in clf_ or preprocessor in pre_: if len(Y.shape) > 1: @@ -55,7 +55,8 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # Classifiers which can adjust sample weights themselves via the # argument `class_weight` - clf_ = ['liblinear_svc', 'libsvm_svc', 'sgd'] + clf_ = ['decision_tree', 'extra_trees', 'liblinear_svc', + 'libsvm_svc', 'random_forest', 'sgd'] pre_ = ['liblinear_svc_preprocessor'] if classifier in clf_: init_params['classifier:class_weight'] = 'auto' diff --git a/misc/regressors.csv b/misc/regressors.csv index f31502b56f..d8616e8814 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -14,27 +14,27 @@ class,added,comment ,True, ,True,Crashes when getting two similar inputs ,False,Calibration instead of prediction method -,, -,, -,, -,, -,, +,False,Add +,False,No +,False,Wait for Tobias' feedback +,False,Wait for Tobias' feedback +,False,Wait for Tobias' feedback ,False, -,, +,False,Wait for Tobias' feedback ,False,We want to perform CV ourselves ,False,MultiTask ,False,We want to perform CV ourselves ,False,MultiTask ,False,MultiTask -,, +,,No ,False,We want to perform CV ourselves -,, +,,No ,False,We want to perform CV ourselves ,False,We want to perform CV ourselves ,, ,False,We want to perform CV ourselves ,, -,, +,,no ,True, ,False,We want to perform CV ourselves ,False, diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index 15c912c0a2..e86fbba811 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -28,7 +28,7 @@ def test_balancing_get_weights_treed_single_label(self): Y = np.array([0] * 80 + [1] * 20) balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( - Y, 'random_forest', None, None, None) + Y, 'adaboost', None, None, None) self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 80 + [1.6] * 20))) init_params, fit_params = balancing.get_weights( @@ -41,7 +41,7 @@ def test_balancing_get_weights_treed_multilabel(self): [[1, 1, 0]] * 100 + [[0, 0, 1]] * 100 + [[1, 0, 1]] * 10) balancing = Balancing(strategy='weighting') init_params, fit_params = balancing.get_weights( - Y, 'random_forest', None, None, None) + Y, 'adaboost', None, None, None) self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 500 + [4.0] * 10))) init_params, fit_params = balancing.get_weights( @@ -74,6 +74,7 @@ def test_weighting_effect(self): [('adaboost', AdaboostClassifier, 0.692, 0.719), ('decision_tree', DecisionTree, 0.712, 0.668), ('extra_trees', ExtraTreesClassifier, 0.901, 0.919), + ('gradient_boosting', GradientBoostingClassifier, 0.879, 0.883), ('random_forest', RandomForest, 0.886, 0.885), ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), @@ -89,6 +90,7 @@ def test_weighting_effect(self): default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) + print classifier predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, diff --git a/tests/test_classification.py b/tests/test_classification.py index 0a0c87457c..6b5bd8d2ce 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(141, len(hyperparameters)) + self.assertEqual(143, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) From 8fd35f58ddb570ce1cf628a48358e4239836cf97 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 28 Jul 2015 13:28:57 +0200 Subject: [PATCH 283/352] Update preprocessors to sklearn 0.16 --- .../components/classification/extra_trees.py | 30 +-------- .../classification/liblinear_svc.py | 2 +- .../components/preprocessing/balancing.py | 5 +- .../extra_trees_preproc_for_classification.py | 55 ++++++++------- .../preprocessing/feature_agglomeration.py | 30 ++++++--- .../components/preprocessing/kernel_pca.py | 2 +- .../liblinear_svc_preprocessor.py | 67 +++++++++---------- ParamSklearn/components/preprocessing/pca.py | 23 ++----- .../components/preprocessing/polynomial.py | 10 +-- .../preprocessing/random_trees_embedding.py | 30 +++++---- .../components/preprocessing/truncatedSVD.py | 2 +- ParamSklearn/util.py | 15 +++-- .../preprocessing/test_balancing.py | 17 +++-- .../preprocessing/test_extra_trees.py | 3 +- .../components/preprocessing/test_fast_ica.py | 2 +- .../preprocessing/test_kernel_pca.py | 7 ++ .../preprocessing/test_kitchen_sinks.py | 64 ++---------------- tests/components/preprocessing/test_pca.py | 31 ++------- .../test_random_trees_embedding.py | 2 +- .../preprocessing/test_truncatedSVD.py | 43 ++++++++++++ tests/test_classification.py | 2 +- ..._create_searchspace_util_classification.py | 7 +- tests/test_regression.py | 2 +- 23 files changed, 195 insertions(+), 256 deletions(-) create mode 100644 tests/components/preprocessing/test_truncatedSVD.py diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 1f8b6c2cbc..38671a5c68 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -16,6 +16,7 @@ class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", bootstrap=False, max_leaf_nodes=None, max_depth="None", + min_weight_fraction_leaf=0.0, oob_score=False, n_jobs=1, random_state=None, verbose=0, class_weight=None): @@ -150,35 +151,10 @@ def get_hyperparameter_search_space(dataset_properties=None): "min_samples_split", 2, 20, default=2)) min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( "min_samples_leaf", 1, 20, default=1)) - - # Unparametrized, we use min_samples as regularization - # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( - # name="max_leaf_nodes_or_max_depth", value="max_depth") - # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", - # choices=["max_leaf_nodes", "max_depth"], default="max_depth") - # min_weight_fraction_leaf = UniformFloatHyperparameter( - # "min_weight_fraction_leaf", 0.0, 0.1) - # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", - # value="None") + min_weight_fraction_leaf = cs.add_hyperparameter(Constant( + 'min_weight_fraction_leaf', 0.)) bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( "bootstrap", ["True", "False"], default="False")) - # Conditions - # Not applicable because max_leaf_nodes is no legal value of the parent - #cond_max_leaf_nodes_or_max_depth = \ - # EqualsCondition(child=max_leaf_nodes, - # parent=max_leaf_nodes_or_max_depth, - # value="max_leaf_nodes") - #cond2_max_leaf_nodes_or_max_depth = \ - # EqualsCondition(child=use_max_depth, - # parent=max_leaf_nodes_or_max_depth, - # value="max_depth") - - #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, - #value="True") - #cs.add_condition(cond_max_leaf_nodes_or_max_depth) - #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) - #cs.add_condition(cond_max_depth) - return cs diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index 57c90fe2e4..c9236b8156 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -2,7 +2,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, Constant, UnParametrizedHyperparameter + CategoricalHyperparameter, Constant from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py index ee0d087d4d..26a72d4545 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -30,7 +30,7 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # the sample weights are normalized: # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 clf_ = ['adaboost', 'gradient_boosting'] - pre_ = ['extra_trees_preproc_for_classification'] + pre_ = [] if classifier in clf_ or preprocessor in pre_: if len(Y.shape) > 1: offsets = [2 ** i for i in range(Y.shape[1])] @@ -57,7 +57,8 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # argument `class_weight` clf_ = ['decision_tree', 'extra_trees', 'liblinear_svc', 'libsvm_svc', 'random_forest', 'sgd'] - pre_ = ['liblinear_svc_preprocessor'] + pre_ = ['liblinear_svc_preprocessor', + 'extra_trees_preproc_for_classification'] if classifier in clf_: init_params['classifier:class_weight'] = 'auto' if preprocessor in pre_: diff --git a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py index db9729a97a..63d43533fc 100644 --- a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py @@ -8,10 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, INPUT - -# get our own forests to replace the sklearn ones -#from ParamSklearn.implementations import forest +from ParamSklearn.util import DENSE, INPUT, SPARSE class ExtraTreesPreprocessor(ParamSklearnPreprocessingAlgorithm): @@ -19,7 +16,9 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", bootstrap=False, max_leaf_nodes=None, max_depth="None", - oob_score=False, n_jobs=1, random_state=None, verbose=0): + min_weight_fraction_leaf=0.0, + oob_score=False, n_jobs=1, random_state=None, verbose=0, + class_weight=None): self.n_estimators = int(n_estimators) self.estimator_increment = 10 @@ -59,6 +58,7 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.n_jobs = int(n_jobs) self.random_state = random_state self.verbose = int(verbose) + self.class_weight = class_weight self.preprocessor = None def fit(self, X, Y, sample_weight=None): @@ -73,7 +73,7 @@ def fit(self, X, Y, sample_weight=None): min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, - random_state=self.random_state, + random_state=self.random_state, class_weight=self.class_weight, warm_start=True ) # JTS TODO: I think we might have to copy here if we want self.estimator @@ -106,7 +106,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': INPUT, # TODO find out what is best used here! # But rather fortran or C-contiguous? @@ -114,26 +114,25 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - bootstrap = CategoricalHyperparameter( - "bootstrap", ["True", "False"], default="False") - n_estimators = Constant("n_estimators", 100) - criterion = CategoricalHyperparameter( - "criterion", ["gini", "entropy"], default="gini") - max_features = UniformFloatHyperparameter( - "max_features", 0.5, 5, default=1) - min_samples_split = UniformIntegerHyperparameter( - "min_samples_split", 2, 20, default=2) - min_samples_leaf = UniformIntegerHyperparameter( - "min_samples_leaf", 1, 20, default=1) - - max_depth = UnParametrizedHyperparameter(name="max_depth", value="None") - cs = ConfigurationSpace() - cs.add_hyperparameter(n_estimators) - cs.add_hyperparameter(criterion) - cs.add_hyperparameter(max_features) - cs.add_hyperparameter(max_depth) - cs.add_hyperparameter(min_samples_split) - cs.add_hyperparameter(min_samples_leaf) - cs.add_hyperparameter(bootstrap) + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) + + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter(Constant( + 'min_weight_fraction_leaf', 0.)) + + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) + return cs diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/preprocessing/feature_agglomeration.py index 7a88852fd1..aa77f4f366 100644 --- a/ParamSklearn/components/preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/preprocessing/feature_agglomeration.py @@ -1,3 +1,4 @@ +import numpy as np import sklearn.cluster from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -12,18 +13,26 @@ class FeatureAgglomeration(ParamSklearnPreprocessingAlgorithm): - def __init__(self, n_clusters, affinity, linkage, random_state=None): + def __init__(self, n_clusters, affinity, linkage, pooling_func, + random_state=None): self.n_clusters = int(n_clusters) self.affinity = affinity self.linkage = linkage + self.pooling_func = pooling_func self.random_state = random_state + self.pooling_func_mapping = dict(mean=np.mean, + median=np.median, + max=np.max) + def fit(self, X, Y=None): n_clusters = min(self.n_clusters, X.shape[1]) + if not callable(self.pooling_func): + self.pooling_func = self.pooling_func_mapping[self.pooling_func] self.preprocessor = sklearn.cluster.FeatureAgglomeration( n_clusters=n_clusters, affinity=self.affinity, - linkage=self.linkage) + linkage=self.linkage, pooling_func=self.pooling_func) self.preprocessor.fit(X) return self @@ -54,15 +63,16 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - n_clusters = UniformIntegerHyperparameter("n_clusters", 2, 400, 25) - affinity = CategoricalHyperparameter("affinity", - ["euclidean", "manhattan", "cosine"], "euclidean") - linkage = CategoricalHyperparameter("linkage", - ["ward", "complete", "average"], "ward") cs = ConfigurationSpace() - cs.add_hyperparameter(n_clusters) - cs.add_hyperparameter(affinity) - cs.add_hyperparameter(linkage) + n_clusters = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_clusters", 2, 400, 25)) + affinity = cs.add_hyperparameter(CategoricalHyperparameter( + "affinity", ["euclidean", "manhattan", "cosine"], "euclidean")) + linkage = cs.add_hyperparameter(CategoricalHyperparameter( + "linkage", ["ward", "complete", "average"], "ward")) + pooling_func = cs.add_hyperparameter(CategoricalHyperparameter( + "pooling_func", ["mean", "median", "max"])) + affinity_and_linkage = ForbiddenAndConjunction( ForbiddenInClause(affinity, ["manhattan", "cosine"]), ForbiddenEqualsClause(linkage, "ward")) diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index 9fe0506f1f..fe731f4bb3 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -56,7 +56,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': INPUT, 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index 3165d770cb..1c35c0d80a 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -12,8 +12,9 @@ class LibLinear_Preprocessor(ParamSklearnPreprocessingAlgorithm): + # Liblinear is not deterministic as it uses a RNG inside def __init__(self, penalty, loss, dual, tol, C, multi_class, - fit_intercept, intercept_scaling, class_weight, + fit_intercept, intercept_scaling, class_weight=None, random_state=None): self.penalty = penalty self.loss = loss @@ -39,12 +40,15 @@ def fit(self, X, Y): self.class_weight = None self.preprocessor = sklearn.svm.LinearSVC(penalty=self.penalty, - loss=self.loss, - dual=self.dual, - tol=self.tol, - C=self.C, - class_weight=self.class_weight, - random_state=self.random_state) + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + multi_class=self.multi_class, + random_state=self.random_state) self.preprocessor.fit(X, Y) return self @@ -68,8 +72,6 @@ def get_properties(): 'handles_multiclass': True, 'handles_multilabel': False, 'is_deterministic': False, - # TODO find out of this is right! - # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (SPARSE, DENSE), 'output': INPUT, @@ -78,41 +80,32 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - penalty = CategoricalHyperparameter("penalty", ["l1", "l2"], - default="l2") - loss = CategoricalHyperparameter("loss", ["l1", "l2"], default="l2") - dual = Constant("dual", "False") - # This is set ad-how - tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, - log=True) - C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, - default=1.0) - multi_class = UnParametrizedHyperparameter("multi_class", "ovr") - # These are set ad-hoc - fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") - intercept_scaling = UnParametrizedHyperparameter("intercept_scaling", 1) - # This does not allow for other resampling methods! - class_weight = CategoricalHyperparameter("class_weight", - ["None", "auto"], - default="None") cs = ConfigurationSpace() - cs.add_hyperparameter(penalty) - cs.add_hyperparameter(loss) - cs.add_hyperparameter(dual) - cs.add_hyperparameter(tol) - cs.add_hyperparameter(C) - cs.add_hyperparameter(multi_class) - cs.add_hyperparameter(fit_intercept) - cs.add_hyperparameter(intercept_scaling) - cs.add_hyperparameter(class_weight) + + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2"], default="l2")) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["hinge", "squared_hinge"], default="squared_hinge")) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # This is set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr")) + # These are set ad-hoc + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) + penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(penalty, "l1"), - ForbiddenEqualsClause(loss, "l1") + ForbiddenEqualsClause(loss, "hinge") ) constant_penalty_and_loss = ForbiddenAndConjunction( ForbiddenEqualsClause(dual, "False"), ForbiddenEqualsClause(penalty, "l2"), - ForbiddenEqualsClause(loss, "l1") + ForbiddenEqualsClause(loss, "hinge") ) penalty_and_dual = ForbiddenAndConjunction( ForbiddenEqualsClause(dual, "False"), diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index a52d2a8bdb..74b11a490f 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -11,32 +11,17 @@ class PCA(ParamSklearnPreprocessingAlgorithm): def __init__(self, keep_variance, whiten, random_state=None): - # TODO document that this implementation does not allow the number of - # components to be specified, but rather the amount of variance to - # be kept! - # TODO it would also be possible to use a heuristic for the number of - # PCA components! self.keep_variance = keep_variance self.whiten = whiten self.random_state = random_state def fit(self, X, Y=None): - self.preprocessor = sklearn.decomposition.PCA(whiten=self.whiten, + n_components = float(self.keep_variance) + self.preprocessor = sklearn.decomposition.PCA(n_components=n_components, + whiten=self.whiten, copy=True) self.preprocessor.fit(X) - sum_ = 0. - idx = 0 - while idx < len(self.preprocessor.explained_variance_ratio_) and \ - sum_ < self.keep_variance: - sum_ += self.preprocessor.explained_variance_ratio_[idx] - idx += 1 - - components = self.preprocessor.components_ - self.preprocessor.components_ = components[:idx] - self.preprocessor.explained_variance_ = \ - self.preprocessor.explained_variance_[:idx] - if not np.isfinite(self.preprocessor.components_).all(): raise ValueError("PCA found non-finite components.") @@ -74,7 +59,7 @@ def get_properties(): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): keep_variance = UniformFloatHyperparameter( - "keep_variance", 0.5, 1.0, default=1.0) + "keep_variance", 0.5, 0.9999, default=0.9999) whiten = CategoricalHyperparameter( "whiten", ["False", "True"], default="False") cs = ConfigurationSpace() diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py index a40a942a0c..37bad65f97 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -1,23 +1,19 @@ import sklearn.preprocessing from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, Constant, UnParametrizedHyperparameter, \ +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ UniformIntegerHyperparameter -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ - ForbiddenAndConjunction from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.util import softmax from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS class PolynomialFeatures(ParamSklearnPreprocessingAlgorithm): def __init__(self, degree, interaction_only, include_bias, random_state=None): self.degree = int(degree) - self.interaction_only = interaction_only == 'True' - self.include_bias = include_bias == 'True' + self.interaction_only = interaction_only.lower() == 'true' + self.include_bias = include_bias.lower() == 'true' self.random_state = random_state self.preprocessor = None diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py index 71f93bc51f..5b4e222a37 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -2,7 +2,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ - UnParametrizedHyperparameter + UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.util import SPARSE, DENSE @@ -11,24 +11,28 @@ class RandomTreesEmbedding(ParamSklearnPreprocessingAlgorithm): def __init__(self, n_estimators, max_depth, min_samples_split, - min_samples_leaf, max_leaf_nodes, sparse_output=True, - n_jobs=1, random_state=None): + min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, + sparse_output=True, n_jobs=1, random_state=None): self.n_estimators = n_estimators - if max_depth == "None": - self.max_depth = None - else: - self.max_depth = int(max_depth) + self.max_depth = max_depth self.min_samples_split = min_samples_split self.min_samples_leaf = min_samples_leaf - if max_leaf_nodes == "None": - self.max_leaf_nodes = None - else: - self.max_leaf_nodes = max_leaf_nodes + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf self.sparse_output = sparse_output self.n_jobs = n_jobs self.random_state = random_state def fit(self, X, Y=None): + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.preprocessor = sklearn.ensemble.RandomTreesEmbedding( n_estimators=self.n_estimators, max_depth=self.max_depth, @@ -63,7 +67,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, - 'input': (DENSE, ), + 'input': (DENSE, SPARSE), 'output': SPARSE, 'preferred_dtype': None} @@ -81,6 +85,7 @@ def get_hyperparameter_search_space(dataset_properties=None): min_samples_leaf = UniformIntegerHyperparameter(name="min_samples_leaf", lower=1, upper=20, default=1) + min_weight_fraction_leaf = Constant('min_weight_fraction_leaf', 1.0) max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", value="None") cs = ConfigurationSpace() @@ -88,6 +93,7 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(max_depth) cs.add_hyperparameter(min_samples_split) cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(min_weight_fraction_leaf) cs.add_hyperparameter(max_leaf_nodes) return cs diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index f6c876fbc9..aec900d484 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -22,7 +22,7 @@ def fit(self, X, Y): # TODO: remove when migrating to sklearn 0.16 # Circumvents a bug in sklearn # https://github.com/scikit-learn/scikit-learn/commit/f08b8c8e52663167819f242f605db39f3b5a6d0c - X = X.astype(np.float64) + # X = X.astype(np.float64) self.preprocessor.fit(X, Y) return self diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index dd4427f5b6..0e8f7633a5 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -130,7 +130,7 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): original_X_train = X_train.copy() configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() - print default + preprocessor = Preprocessor(random_state=1, **{hp_name: default[hp_name] for hp_name in default if default[hp_name] is not None}) @@ -141,10 +141,10 @@ def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): class PreprocessingTestCase(unittest.TestCase): def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, - test_sparse=True): + test_sparse=True, dataset='iris'): # Dense # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris", add_NaNs=add_NaNs) + X_train, Y_train, X_test, Y_test = get_dataset(dataset, add_NaNs=add_NaNs) self.assertEqual(X_train.dtype, np.float32) configuration_space = Preprocessor.get_hyperparameter_search_space() @@ -157,7 +157,7 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, self.assertEqual(Xt.dtype, np.float32) # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris", add_NaNs=add_NaNs) + X_train, Y_train, X_test, Y_test = get_dataset(dataset, add_NaNs=add_NaNs) X_train = X_train.astype(np.float64) configuration_space = Preprocessor.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() @@ -171,7 +171,7 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, if test_sparse is True: # Sparse # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + X_train, Y_train, X_test, Y_test = get_dataset(dataset, make_sparse=True, add_NaNs=add_NaNs) self.assertEqual(X_train.dtype, np.float32) configuration_space = Preprocessor.get_hyperparameter_search_space() @@ -184,7 +184,8 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, self.assertEqual(Xt.dtype, np.float32) # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True, + X_train, Y_train, X_test, Y_test = get_dataset(dataset, + make_sparse=True, add_NaNs=add_NaNs) X_train = X_train.astype(np.float64) configuration_space = Preprocessor.get_hyperparameter_search_space() @@ -192,7 +193,7 @@ def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, preprocessor = Preprocessor(random_state=1, **{hp_name: default[hp_name] for hp_name in default}) - preprocessor.fit(X_train) + preprocessor.fit(X_train, Y_train) Xt = preprocessor.transform(X_train) self.assertEqual(Xt.dtype, np.float64) diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index e86fbba811..40dc4aca7e 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -31,10 +31,10 @@ def test_balancing_get_weights_treed_single_label(self): Y, 'adaboost', None, None, None) self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 80 + [1.6] * 20))) - init_params, fit_params = balancing.get_weights( - Y, None, 'extra_trees_preproc_for_classification', None, None) - self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], - np.array([0.4] * 80 + [1.6] * 20))) + #init_params, fit_params = balancing.get_weights( + # Y, None, 'extra_trees_preproc_for_classification', None, None) + #self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], + # np.array([0.4] * 80 + [1.6] * 20))) def test_balancing_get_weights_treed_multilabel(self): Y = np.array([[0, 0, 0]] * 100 + [[1, 0, 0]] * 100 + [[0, 1, 0]] * 100 + @@ -44,10 +44,10 @@ def test_balancing_get_weights_treed_multilabel(self): Y, 'adaboost', None, None, None) self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], np.array([0.4] * 500 + [4.0] * 10))) - init_params, fit_params = balancing.get_weights( - Y, None, 'extra_trees_preproc_for_classification', None, None) - self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], - np.array([0.4] * 500 + [4.0] * 10))) + #init_params, fit_params = balancing.get_weights( + # Y, None, 'extra_trees_preproc_for_classification', None, None) + #self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], + # np.array([0.4] * 500 + [4.0] * 10))) def test_balancing_get_weights_svm_sgd(self): Y = np.array([0] * 80 + [1] * 20) @@ -90,7 +90,6 @@ def test_weighting_effect(self): default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy classifier = ParamSklearnClassifier(default, random_state=1) - print classifier predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, diff --git a/tests/components/preprocessing/test_extra_trees.py b/tests/components/preprocessing/test_extra_trees.py index 457c8be9e8..fdae495445 100644 --- a/tests/components/preprocessing/test_extra_trees.py +++ b/tests/components/preprocessing/test_extra_trees.py @@ -36,5 +36,4 @@ def test_default_configuration_classify(self): def test_preprocessing_dtype(self): super(ExtreTreesComponentTest, - self)._test_preprocessing_dtype(ExtraTreesPreprocessor, - test_sparse=False) + self)._test_preprocessing_dtype(ExtraTreesPreprocessor) diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/preprocessing/test_fast_ica.py index c71bf5967b..db9d6ba15c 100644 --- a/tests/components/preprocessing/test_fast_ica.py +++ b/tests/components/preprocessing/test_fast_ica.py @@ -37,5 +37,5 @@ def test_default_configuration_regression(self): @unittest.skip("Always returns float64") def test_preprocessing_dtype(self): super(FastICAComponentTest, - self)._test_preprocessing_dtype(FastICA) + self)._test_preprocessing_dtype(FastICA, dataset='diabetes') diff --git a/tests/components/preprocessing/test_kernel_pca.py b/tests/components/preprocessing/test_kernel_pca.py index 5d4c7825c5..6be82de927 100644 --- a/tests/components/preprocessing/test_kernel_pca.py +++ b/tests/components/preprocessing/test_kernel_pca.py @@ -15,6 +15,13 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[0], original.shape[0]) self.assertFalse((transformation == 0).all()) + def test_default_configuration_sparse(self): + transformation, original = _test_preprocessing(KernelPCA, + make_sparse=True, + dataset='digits') + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + def test_default_configuration_classify(self): for i in range(5): X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/preprocessing/test_kitchen_sinks.py index 3e0552984d..3994feccb6 100644 --- a/tests/components/preprocessing/test_kitchen_sinks.py +++ b/tests/components/preprocessing/test_kitchen_sinks.py @@ -3,10 +3,10 @@ import numpy as np from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks -from ParamSklearn.util import _test_preprocessing, get_dataset +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class KitchenSinkComponent(unittest.TestCase): +class KitchenSinkComponent(PreprocessingTestCase): def test_default_configuration(self): transformation, original = _test_preprocessing(RandomKitchenSinks) self.assertEqual(transformation.shape[0], original.shape[0]) @@ -14,60 +14,6 @@ def test_default_configuration(self): self.assertFalse((transformation == 0).all()) @unittest.skip("Right now, the RBFSampler returns a float64 array!") - def _test_preprocessing_dtype(self): - # Dense - # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris") - self.assertEqual(X_train.dtype, np.float32) - - configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = RandomKitchenSinks(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) - preprocessor.fit(X_train) - print id(X_train) - Xt = preprocessor.transform(X_train) - print id(Xt) - self.assertEqual(Xt.dtype, np.float32) - - # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris") - X_train = X_train.astype(np.float64) - configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = RandomKitchenSinks(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) - preprocessor.fit(X_train, Y_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float64) - - # Sparse - # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) - self.assertEqual(X_train.dtype, np.float32) - configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = RandomKitchenSinks(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) - preprocessor.fit(X_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float32) - - # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) - X_train = X_train.astype(np.float64) - configuration_space = RandomKitchenSinks.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = RandomKitchenSinks(random_state=1, - **{hp.hyperparameter.name: hp.value for hp - in - default.values.values()}) - preprocessor.fit(X_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float64) + def test_preprocessing_dtype(self): + super(KitchenSinkComponent, + self)._test_preprocessing_dtype(RandomKitchenSinks) diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/preprocessing/test_pca.py index 88d91bf861..b56c3d61aa 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/preprocessing/test_pca.py @@ -3,10 +3,10 @@ import numpy as np from ParamSklearn.components.preprocessing.pca import PCA -from ParamSklearn.util import _test_preprocessing, get_dataset +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase -class PCAComponentTest(unittest.TestCase): +class PCAComponentTest(PreprocessingTestCase): def test_default_configuration(self): transformations = [] for i in range(10): @@ -18,28 +18,5 @@ def test_default_configuration(self): self.assertTrue((transformations[-1] == transformations[-2]).all()) def test_preprocessing_dtype(self): - # Dense - # np.float32 - X_train, Y_train, X_test, Y_test = get_dataset("iris") - self.assertEqual(X_train.dtype, np.float32) - - configuration_space = PCA.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = PCA(random_state=1, - **{hp_name: default[hp_name] for hp_name in - default}) - preprocessor.fit(X_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float32) - - # np.float64 - X_train, Y_train, X_test, Y_test = get_dataset("iris") - X_train = X_train.astype(np.float64) - configuration_space = PCA.get_hyperparameter_search_space() - default = configuration_space.get_default_configuration() - preprocessor = PCA(random_state=1, - **{hp_name: default[hp_name] for hp_name in - default}) - preprocessor.fit(X_train, Y_train) - Xt = preprocessor.transform(X_train) - self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file + super(PCAComponentTest, self)._test_preprocessing_dtype(PCA, + test_sparse=False) \ No newline at end of file diff --git a/tests/components/preprocessing/test_random_trees_embedding.py b/tests/components/preprocessing/test_random_trees_embedding.py index ce1c19a492..8844d13fba 100644 --- a/tests/components/preprocessing/test_random_trees_embedding.py +++ b/tests/components/preprocessing/test_random_trees_embedding.py @@ -32,7 +32,7 @@ def test_preprocessing_dtype(self): default}) preprocessor.fit(X_train) Xt = preprocessor.transform(X_train) - print Xt + self.assertEqual(Xt.dtype, np.float32) # np.float64 diff --git a/tests/components/preprocessing/test_truncatedSVD.py b/tests/components/preprocessing/test_truncatedSVD.py new file mode 100644 index 0000000000..9bffc7226d --- /dev/null +++ b/tests/components/preprocessing/test_truncatedSVD.py @@ -0,0 +1,43 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from ParamSklearn.components.preprocessing.truncatedSVD import \ + TruncatedSVD +from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class TruncatedSVDComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(TruncatedSVD) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + configuration_space = TruncatedSVD.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = TruncatedSVD(random_state=1, + **{hp_name: default[hp_name] + for hp_name in + default if default[ + hp_name] is not None}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.44201578627808136, places=2) + + @unittest.skip("Truncated SVD returns np.float64.") + def test_preprocessing_dtype(self): + super(TruncatedSVDComponentTest, + self)._test_preprocessing_dtype(TruncatedSVD, + test_sparse=False) diff --git a/tests/test_classification.py b/tests/test_classification.py index 6b5bd8d2ce..1eb438c335 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -191,7 +191,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(143, len(hyperparameters)) + self.assertEqual(146, len(hyperparameters)) # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) diff --git a/tests/test_create_searchspace_util_classification.py b/tests/test_create_searchspace_util_classification.py index 740ab16462..bfddd8ebb9 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/tests/test_create_searchspace_util_classification.py @@ -12,6 +12,7 @@ from ParamSklearn.components.preprocessing.pca import PCA from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing +from ParamSklearn.components.preprocessing.fast_ica import FastICA from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding import ParamSklearn.create_searchspace_util @@ -87,14 +88,14 @@ def get_available_components(self, *args, **kwargs): self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none - preprocessors['rte'] = RandomTreesEmbedding + preprocessors['fast_ica'] = FastICA m = ParamSklearn.create_searchspace_util.get_match_array( node_0=Preprocessors, node_1=Classifiers, dataset_properties={'sparse': False}) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none - self.assertListEqual(list(m[3, :]), [0, 1]) # random trees embedding + self.assertListEqual(list(m[3, :]), [1, 1]) # fast_ica m = ParamSklearn.create_searchspace_util.get_match_array( node_0=Preprocessors, node_1=Classifiers, @@ -102,7 +103,7 @@ def get_available_components(self, *args, **kwargs): self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none - self.assertListEqual(list(m[3, :]), [0, 0]) # random trees embedding + self.assertListEqual(list(m[3, :]), [0, 0]) # fast_ica def test_get_idx_to_keep(self): m = numpy.zeros([3, 4]) diff --git a/tests/test_regression.py b/tests/test_regression.py index cc8d49f432..a59d0ce27a 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -85,7 +85,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(131, len(hyperparameters)) + self.assertEqual(133, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 4, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From f7153f668b8724ef95bcaa7f1d14f8ebe5b0b38b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 28 Jul 2015 13:43:09 +0200 Subject: [PATCH 284/352] LibLinear4preproc: fix penalty to L1 penalty --- .../preprocessing/liblinear_svc_preprocessor.py | 14 +------------- tests/components/preprocessing/test_liblinear.py | 2 +- 2 files changed, 2 insertions(+), 14 deletions(-) diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index 1c35c0d80a..43767114bc 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -82,8 +82,7 @@ def get_properties(): def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() - penalty = cs.add_hyperparameter(CategoricalHyperparameter( - "penalty", ["l1", "l2"], default="l2")) + penalty = cs.add_hyperparameter(Constant("penalty", "l1")) loss = cs.add_hyperparameter(CategoricalHyperparameter( "loss", ["hinge", "squared_hinge"], default="squared_hinge")) dual = cs.add_hyperparameter(Constant("dual", "False")) @@ -102,16 +101,5 @@ def get_hyperparameter_search_space(dataset_properties=None): ForbiddenEqualsClause(penalty, "l1"), ForbiddenEqualsClause(loss, "hinge") ) - constant_penalty_and_loss = ForbiddenAndConjunction( - ForbiddenEqualsClause(dual, "False"), - ForbiddenEqualsClause(penalty, "l2"), - ForbiddenEqualsClause(loss, "hinge") - ) - penalty_and_dual = ForbiddenAndConjunction( - ForbiddenEqualsClause(dual, "False"), - ForbiddenEqualsClause(penalty, "l1") - ) cs.add_forbidden_clause(penalty_and_loss) - cs.add_forbidden_clause(constant_penalty_and_loss) - cs.add_forbidden_clause(penalty_and_dual) return cs diff --git a/tests/components/preprocessing/test_liblinear.py b/tests/components/preprocessing/test_liblinear.py index 5bf40a1107..668abe440c 100644 --- a/tests/components/preprocessing/test_liblinear.py +++ b/tests/components/preprocessing/test_liblinear.py @@ -34,7 +34,7 @@ def test_default_configuration_classify(self): predictor = classifier.fit(X_train_trans, Y_train) predictions = predictor.predict(X_test_trans) accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertAlmostEqual(accuracy, 0.87310261080752882, places=2) + self.assertAlmostEqual(accuracy, 0.87917425622343659, places=2) def test_preprocessing_dtype(self): super(LiblinearComponentTest, From c50f5fceafeb6054563e90f9a0f9620a9179bf1b Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 28 Jul 2015 13:51:41 +0200 Subject: [PATCH 285/352] Reduce cache size of libsvm --- ParamSklearn/components/classification/libsvm_svc.py | 2 +- ParamSklearn/components/regression/libsvm_svr.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 27eb14aa2c..7c41d91cf6 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -57,7 +57,7 @@ def fit(self, X, Y): class_weight=self.class_weight, max_iter=self.max_iter, random_state=self.random_state, - cache_size=2000) + cache_size=1000) # probability=True) self.estimator.fit(X, Y) return self diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index 02df3c1575..d8aa4a4a8a 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -56,7 +56,8 @@ def fit(self, X, Y): coef0=self.coef0, cache_size=self.cache_size, verbose=self.verbose, - max_iter=self.max_iter + max_iter=self.max_iter, + cache_size=1000 ) self.scaler = sklearn.preprocessing.StandardScaler(copy=True) From 20b882ce4c68fced50aae66d2101e9892d00291d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 28 Jul 2015 13:52:41 +0200 Subject: [PATCH 286/352] Fix previous commit --- ParamSklearn/components/regression/libsvm_svr.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index d8aa4a4a8a..96eb62436c 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -14,7 +14,7 @@ class LibSVM_SVR(ParamSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, - degree=3, coef0=0.0, cache_size=2000, verbose=False, + degree=3, coef0=0.0, cache_size=1000, verbose=False, max_iter=-1, random_state=None): self.kernel = kernel self.C = C @@ -56,8 +56,7 @@ def fit(self, X, Y): coef0=self.coef0, cache_size=self.cache_size, verbose=self.verbose, - max_iter=self.max_iter, - cache_size=1000 + max_iter=self.max_iter ) self.scaler = sklearn.preprocessing.StandardScaler(copy=True) From ff2038962c8da488745ae9e4d13325cfad8f1504 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 30 Jul 2015 15:35:31 +0200 Subject: [PATCH 287/352] Make test lest resource hungry --- tests/test_classification.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/test_classification.py b/tests/test_classification.py index 1eb438c335..ca6c79d322 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,5 +1,6 @@ __author__ = 'feurerm' +import resource import sys import traceback import unittest @@ -80,8 +81,12 @@ def test_default_configuration(self): scores = auto.predict_proba(X_test) def test_configurations(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 2014 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + cs = ParamSklearnClassifier.get_hyperparameter_search_space() - for i in range(10): + for i in range(1000): config = cs.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) @@ -122,6 +127,8 @@ def test_configurations(self): continue elif "divide by zero encountered in divide" in e.message: continue + elif "invalid value encountered in divide" in e.message: + continue else: print config print traceback.format_exc() From 05ccf0c480ffda58da08cbf687e0c5d58ded858a Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 30 Jul 2015 15:35:41 +0200 Subject: [PATCH 288/352] Fix test --- tests/components/preprocessing/test_balancing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/preprocessing/test_balancing.py index 40dc4aca7e..35169d9253 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/preprocessing/test_balancing.py @@ -115,7 +115,7 @@ def test_weighting_effect(self): [('extra_trees_preproc_for_classification', ExtraTreesPreprocessor, 0.892, 0.910), ('liblinear_svc_preprocessor', LibLinear_Preprocessor, - 0.889, 0.885)]: + 0.906, 0.887)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: From b3bc62634e895892d1e516b4cde2207d14b397d3 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 30 Jul 2015 15:36:02 +0200 Subject: [PATCH 289/352] Fix naive bayes algorithms iterative_fit --- ParamSklearn/components/classification/bernoulli_nb.py | 4 ++-- ParamSklearn/components/classification/gaussian_nb.py | 4 ++-- ParamSklearn/components/classification/multinomial_nb.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 41d0e82637..fc42576c2e 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -41,8 +41,8 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): self.classes_ = np.unique(y.astype(int)) for iter in range(n_iter): - start = self.n_iter * 1000 - stop = (self.n_iter + 1) * 1000 + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) # Upper limit, scipy.sparse doesn't seem to handle max > len(matrix) stop = min(stop, y.shape[0]) self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index caf5d6d717..8e82f660a9 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -31,8 +31,8 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): self.classes_ = np.unique(y.astype(int)) for iter in range(n_iter): - start = self.n_iter * 1000 - stop = (self.n_iter + 1) * 1000 + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) self.n_iter += 1 diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 58a42f6698..a32fe3290f 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -41,8 +41,8 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): self.classes_ = np.unique(y.astype(int)) for iter in range(n_iter): - start = self.n_iter * 1000 - stop = (self.n_iter + 1) * 1000 + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) self.n_iter += 1 From 2e09e923c750c13284e75b2ddff243d5b7c8cce6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 31 Jul 2015 16:07:11 +0200 Subject: [PATCH 290/352] Fix output metadata of KernelPCA --- ParamSklearn/components/preprocessing/kernel_pca.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index fe731f4bb3..d1652721f4 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -57,7 +57,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, SPARSE), - 'output': INPUT, + 'output': DENSE, 'preferred_dtype': None} @staticmethod From 902319114a325515266c64e3617afa22ad937993 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 31 Jul 2015 16:15:23 +0200 Subject: [PATCH 291/352] Make search space generation more general --- ParamSklearn/base.py | 145 +++----- ParamSklearn/create_searchspace_util.py | 311 +++++++++++------- tests/test_classification.py | 10 +- ..._create_searchspace_util_classification.py | 111 ++----- 4 files changed, 263 insertions(+), 314 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index ac8485ab92..6d5964a0d2 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -251,108 +251,53 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, @classmethod def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, include, pipeline): - for node_0_idx, node_1_idx in zip(range(len(pipeline) - 1), - range(1, len(pipeline))): - node_0_name = pipeline[node_0_idx][0] - node_1_name = pipeline[node_1_idx][0] - node_0 = pipeline[node_0_idx][1] - node_1 = pipeline[node_1_idx][1] - - node_0_include = include.get( - node_0_name) if include is not None else None - node_0_exclude = exclude.get( - node_0_name) if exclude is not None else None - node_1_include = include.get( - node_1_name) if include is not None else None - node_1_exclude = exclude.get( - node_1_name) if exclude is not None else None - - matches = ParamSklearn.create_searchspace_util.get_match_array( - node_0=node_0, node_1=node_1, node_0_include=node_0_include, - node_0_exclude=node_0_exclude, node_1_include=node_1_include, - node_1_exclude=node_1_exclude, - dataset_properties=dataset_properties, ) - - # Now we have only legal combinations at this step of the pipeline - # Simple sanity checks - assert np.sum(matches) != 0, "No valid %s/%s combination found, " \ - "probably a bug." % (node_0_name, - node_1_name) - - assert np.sum(matches) <= (matches.shape[0] * matches.shape[1]), \ - "'matches' is not binary; %s <= %d, [%d*%d]" % \ - (str(np.sum(matches)), matches.shape[0] * matches.shape[1], - matches.shape[0], matches.shape[1]) - - if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - matches, node_0_list, node_1_list = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - matches=matches, node_0=node_0, node_1=node_1, - dataset_properties=dataset_properties, - node_0_include=node_0_include, - node_0_exclude=node_0_exclude, - node_1_include=node_1_include, - node_1_exclude=node_1_exclude) - - # Check if we reached a dead end - assert len(node_0_list) > 0, "No valid node 0 found" - assert len(node_1_list) > 0, "No valid node 1 found" - - # Check for inconsistencies - assert len(node_0_list) == matches.shape[0], \ - "Node 0 deleting went wrong" - assert len(node_1_list) == matches.shape[1], \ - "Node 1 deleting went wrong" + if include is None: + include = {} + if exclude is None: + exclude = {} + + matches = ParamSklearn.create_searchspace_util.get_match_array( + pipeline, dataset_properties, include=include, exclude=exclude) + + # Now we have only legal combinations at this step of the pipeline + # Simple sanity checks + assert np.sum(matches) != 0, "No valid pipeline found." + + assert np.sum(matches) <= np.size(matches), \ + "'matches' is not binary; %s <= %d, %s" % \ + (str(np.sum(matches)), np.size(matches), str(matches.shape)) + + # Iterate each dimension of the matches array (each step of the + # pipeline) to see if we can add a hyperparameter for that step + for node_idx, n_ in enumerate(pipeline): + node_name, node = n_ + is_choice = hasattr(node, "get_available_components") + + # if the node isn't a choice we can add it immediately because it + # must be active (if it wouldn't, np.sum(matches) would be zero + if not is_choice: + cs.add_configuration_space(node_name, + node.get_hyperparameter_search_space(dataset_properties)) + # If the node isn't a choice, we have to figure out which of it's + # choices are actually legal choices else: - if hasattr(node_0, "get_components"): - node_0_list = node_0.get_available_components( - data_prop=dataset_properties, - include=node_0_include, - exclude=node_0_exclude - ) - else: - node_0_list = None - if hasattr(node_1, "get_components"): - node_1_list = node_1.get_available_components( - data_prop=dataset_properties, - include=node_1_include, - exclude=node_1_exclude - ) - else: - node_1_list = None - - if hasattr(node_0, "get_components"): - node_0_name += ":__choice__" + choices_list = ParamSklearn.create_searchspace_util.\ + find_active_choices(matches, node, node_idx, + dataset_properties, + include.get(node_name), + exclude.get(node_name)) + cs.add_configuration_space(node_name, + node.get_hyperparameter_search_space( + dataset_properties, include=choices_list)) + + # And now add forbidden parameter configurations + # According to matches + if np.sum(matches) < np.size(matches): + cs = ParamSklearn.create_searchspace_util.add_forbidden( + conf_space=cs, pipeline=pipeline, matches=matches, + dataset_properties=dataset_properties, include=include, + exclude=exclude) - if node_0_idx == 0: - if hasattr(node_0, "get_components"): - cs.add_configuration_space(node_0_name, - node_0.get_hyperparameter_search_space( - dataset_properties, - include=node_0_list)) - else: - cs.add_configuration_space(node_0_name, - node_0.get_hyperparameter_search_space( - dataset_properties)) - - if hasattr(node_1, "get_components"): - cs.add_configuration_space(node_1_name, - node_1.get_hyperparameter_search_space( - dataset_properties, - include=node_1_list)) - node_1_name += ":__choice__" - else: - cs.add_configuration_space(node_1_name, - node_1.get_hyperparameter_search_space( - dataset_properties)) - - # And now add forbidden parameter configurations - # According to matches - if np.sum(matches) < (matches.shape[0] * matches.shape[1]): - cs = ParamSklearn.create_searchspace_util.add_forbidden( - conf_space=cs, node_0_list=node_0_list, - node_1_list=node_1_list, matches=matches, - node_0_name=node_0_name, node_1_name=node_1_name) return cs @staticmethod diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index 3c757017d0..3f2022ee49 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -1,137 +1,200 @@ +import itertools + import numpy as np from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause -from ParamSklearn.util import SPARSE, DENSE, INPUT - - -def get_match_array(node_0, node_1, dataset_properties, - node_0_include=None, node_0_exclude=None, - node_1_include=None, node_1_exclude=None): - # Select combinations of nodes that work - # Three cases possible: - # * node_0 and node_1 are both nodes: - # Check if they fit together, return a (1, 1) array - # * node_0 is a node, node_1 is a composite of nodes (or vice versa) - # Check if they fit together, return a (1, n) array - # * node_0 and node_1 are both composites of nodes - # Check if they fit together, return a (n, m) array - # - # We build a binary array, where a 1 indicates, that a combination - # works on this dataset based on the dataset and the input/output formats - # - # A 'zero'-row (column) is an unusable preprocessor (classifier) - # A single zero results in an forbidden condition +from ParamSklearn.util import SPARSE, DENSE, INPUT, PREDICTIONS - # Duck typing, not sure if it's good... + +def get_match_array(pipeline, dataset_properties, include=None, + exclude=None): sparse = dataset_properties.get('sparse') - node_0_is_choice = hasattr(node_0, "get_available_components") - node_1_is_choice = hasattr(node_1, "get_available_components") - - if node_0_is_choice: - node_0_choices = node_0.get_available_components( - dataset_properties, include=node_0_include, exclude=node_0_exclude).values() - else: - node_0_choices = [node_0] - if node_1_is_choice: - node_1_choices = node_1.get_available_components( - dataset_properties, include=node_1_include, exclude=node_1_exclude).values() - else: - node_1_choices = [node_1] - - matches = np.zeros([len(node_0_choices), len(node_1_choices)]) - - for n0_idx, n0 in enumerate(node_0_choices): - if node_0_is_choice and node_0 == n0: - continue - - node0_in = node_0_choices[n0_idx].get_properties()['input'] - node0_out = node_0_choices[n0_idx].get_properties()['output'] - - if sparse and SPARSE not in node0_in: - continue - elif not sparse and DENSE not in node0_in: - continue - - for n1_idx, n1 in enumerate(node_1_choices): - if node_1_is_choice and node_1 == n1: - continue - - node1_in = n1.get_properties()['input'] - if node0_out == INPUT: - # Preprocessor does not change the format - if (sparse and SPARSE in node1_in) or \ - (not sparse and DENSE in node1_in): - # Estimator input = Dataset format - matches[n0_idx, n1_idx] = 1 - else: - # These won't work - pass - elif node0_out == DENSE and DENSE in node1_in: - matches[n0_idx, n1_idx] = 1 - elif node0_out == SPARSE and SPARSE in node1_in: - matches[n0_idx, n1_idx] = 1 - else: - # These won't work + # Duck typing, not sure if it's good... + node_i_is_choice = [] + node_i_choices = [] + all_nodes = [] + for node_name, node in pipeline: + all_nodes.append(node) + is_choice = hasattr(node, "get_available_components") + node_i_is_choice.append(is_choice) + + node_include = include.get( + node_name) if include is not None else None + node_exclude = exclude.get( + node_name) if exclude is not None else None + + if is_choice: + node_i_choices.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).values()) + + else: + node_i_choices.append([node]) + + matches_dimensions = [len(choices) for choices in node_i_choices] + # Start by allowing every combination of nodes. Go through all + # combinations/pipelines and erase the illegal ones + matches = np.ones(matches_dimensions, dtype=int) + + pipeline_idxs = [range(dim) for dim in matches_dimensions] + for pipeline_instantiation_idxs in itertools.product(*pipeline_idxs): + pipeline_instantiation = [node_i_choices[i][idx] for i, idx in + enumerate(pipeline_instantiation_idxs)] + + data_is_sparse = sparse + for node in pipeline_instantiation: + node_input = node.get_properties()['input'] + node_output = node.get_properties()['output'] + + if (data_is_sparse and SPARSE not in node_input) or \ + not data_is_sparse and DENSE not in node_input: + matches[pipeline_instantiation_idxs] = 0 + break + + if INPUT in node_output or PREDICTIONS in node_output or\ + (not data_is_sparse and DENSE in node_input and + node_output == DENSE) or \ + (data_is_sparse and SPARSE in node_input and node_output + == SPARSE): + # Don't change the data_is_sparse flag pass + elif data_is_sparse and DENSE in node_output: + data_is_sparse = False + elif not data_is_sparse and SPARSE in node_output: + data_is_sparse = True + else: + print node + print data_is_sparse + print node_input, node_output + raise ValueError("This combination is not allowed!") + return matches -def _get_idx_to_keep(matches): - # Returns all rows and cols where matches contains not only zeros - keep_row = [idx for idx in range(matches.shape[0]) if np.sum(matches[idx, :]) != 0] - keep_col = [idx for idx in range(matches.shape[1]) if np.sum(matches[:, idx]) != 0] - return keep_col, keep_row - - -def sanitize_arrays(matches, node_0, node_1, dataset_properties, - node_0_include=None, node_0_exclude=None, - node_1_include=None, node_1_exclude=None): - node_0_is_choice = hasattr(node_0, "get_available_components") - node_1_is_choice = hasattr(node_1, "get_available_components") - - if not node_0_is_choice: - node_0 = [node_0] - else: - node_0 = node_0.get_available_components(dataset_properties, - include=node_0_include, - exclude=node_0_exclude).keys() - if not node_1_is_choice: - node_1 = [node_1] - else: - node_1 = node_1.get_available_components(dataset_properties, - include=node_1_include, - exclude=node_1_exclude).keys() - - assert matches.shape[0] == len(node_0), (matches.shape[0], len(node_0)) - assert matches.shape[1] == len(node_1), (matches.shape[1], len(node_1)) - assert isinstance(matches, np.ndarray) - # remove components that are not usable for this problem - keep_col, keep_row = _get_idx_to_keep(matches) - - matches = matches[keep_row, :] - matches = matches[:, keep_col] - - node_0_list = [node_0[p] for p in keep_row] - node_1_list = [node_1[p] for p in keep_col] - - assert len(node_0_list) == matches.shape[0] - assert len(node_1_list) == matches.shape[1] - return matches, node_0_list, node_1_list - - -def add_forbidden(conf_space, node_0_list, node_1_list, matches, - node_0_name, node_1_name): - for pdx, p in enumerate(node_0_list): - if np.sum(matches[pdx, :]) == matches.shape[1]: - continue - for cdx, c in enumerate(node_1_list): - if matches[pdx, cdx] == 0: - conf_space.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(conf_space.get_hyperparameter( - node_1_name), c), - ForbiddenEqualsClause(conf_space.get_hyperparameter( - node_0_name), p))) +def find_active_choices(matches, node, node_idx, dataset_properties, \ + include=None, exclude=None): + if not hasattr(node, "get_available_components"): + raise ValueError() + available_components = node.get_available_components(dataset_properties, + include=include, + exclude=exclude) + assert matches.shape[node_idx] == len(available_components), \ + (matches.shape[node_idx], len(available_components)) + + choices = [] + for c_idx, component in enumerate(available_components): + slices = [slice(None) if idx != node_idx else slice(c_idx, c_idx+1) + for idx in range(len(matches.shape))] + + if np.sum(matches[slices]) > 0: + choices.append(component) + return choices + + +def add_forbidden(conf_space, pipeline, matches, dataset_properties, + include, exclude): + # Not sure if this works for 3D + node_i_is_choice = [] + node_i_choices = [] + all_nodes = [] + for node_name, node in pipeline: + all_nodes.append(node) + is_choice = hasattr(node, "get_available_components") + node_i_is_choice.append(is_choice) + + node_include = include.get( + node_name) if include is not None else None + node_exclude = exclude.get( + node_name) if exclude is not None else None + + if is_choice: + node_i_choices.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).values()) + + else: + node_i_choices.append([node]) + + # Find out all chains of choices. Only in such a chain its possible to + # have several forbidden constraints + choices_chains = [] + idx = 0 + while idx < len(pipeline): + if node_i_is_choice[idx]: + chain_start = idx + idx += 1 + while idx < len(pipeline) and node_i_is_choice[idx]: + idx += 1 + chain_stop = idx + choices_chains.append((chain_start, chain_stop)) + idx += 1 + + for choices_chain in choices_chains: + constraints = set() + possible_constraints = set() + possible_constraints_by_length = dict() + + chain_start = choices_chain[0] + chain_stop = choices_chain[1] + chain_length = chain_stop - chain_start + + # Add one to have also have chain_length in the range + for sub_chain_length in range(2, chain_length + 1): + if sub_chain_length > 2: + break + + for start_idx in range(chain_start, chain_stop - sub_chain_length + 1): + #print chain_start + start_idx, sub_chain_length + + indices = range(start_idx, start_idx + sub_chain_length) + #print indices + + node_0_idx = indices[0] + node_1_idx = indices[1] + + node_0_name, node_0 = pipeline[node_0_idx] + node_1_name, node_1 = pipeline[node_1_idx] + node_0_is_choice = hasattr(node_0, "get_available_components") + node_1_is_choice = hasattr(node_1, "get_available_components") + + if not node_0_is_choice or not node_1_is_choice: + continue + + # Now iterate all combinations and add them as forbidden! + for pdx, p in enumerate(node_0.get_available_components(dataset_properties)): + slices_0 = [ + slice(None) if idx != node_0_idx else + slice(pdx, pdx + 1) for idx in range(len(matches.shape))] + if np.sum(matches[slices_0]) == 0: + continue + + for cdx, c in enumerate(node_1.get_available_components(dataset_properties)): + + slices_1 = [ + slice(None) if idx != node_1_idx else + slice(cdx, cdx + 1) for idx in range(len(matches.shape))] + if np.sum(matches[slices_1]) == 0: + continue + + slices = [slice(None) if idx not in (node_0_idx, node_1_idx) + else slice(pdx if idx is node_0_idx else cdx, + pdx+1 if idx is node_0_idx else cdx+1) + for idx in range(len(matches.shape))] + + #print node_0_name, node_1_name, p, c, matches[slices] + if np.sum(matches[slices]) == 0: + conf_space.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(conf_space.get_hyperparameter( + node_1_name + ":__choice__"), c), + ForbiddenEqualsClause(conf_space.get_hyperparameter( + node_0_name + ":__choice__"), p))) + constraints.add(((node_0_name, p), (node_1_name, c))) + + elif np.size(matches[slices]) > np.sum(matches[slices]) > 0: + #possible_constraints.add() + pass + return conf_space diff --git a/tests/test_classification.py b/tests/test_classification.py index ca6c79d322..37fd2e38d2 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -86,7 +86,10 @@ def test_configurations(self): resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) cs = ParamSklearnClassifier.get_hyperparameter_search_space() - for i in range(1000): + + print cs + + for i in range(10): config = cs.sample_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) @@ -114,9 +117,12 @@ def test_configurations(self): print config print traceback.format_exc() raise e - except AttributeError as e: + except KeyError as e: # Some error in QDA if "log" == e.message: + print config + print traceback.format_exc() + raise e continue else: print config diff --git a/tests/test_create_searchspace_util_classification.py b/tests/test_create_searchspace_util_classification.py index bfddd8ebb9..06b0b952e7 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/tests/test_create_searchspace_util_classification.py @@ -23,7 +23,7 @@ def test_get_match_array(self): preprocessors = OrderedDict() preprocessors['pca'] = PCA classifiers = OrderedDict() - classifiers['rf'] = LDA + classifiers['lda'] = LDA # Sparse + dense class Preprocessors(object): @classmethod @@ -37,37 +37,37 @@ def get_available_components(self, *args, **kwargs): # Dense m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=PCA, node_1=LDA, dataset_properties={'sparse': True}) + pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(numpy.sum(m), 0) m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=PCA, node_1=LDA, dataset_properties={'sparse': False}) + pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m, [[1]]) # Sparse preprocessors['tSVD'] = TruncatedSVD m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=LDA, + pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(m[0], [0]) # pca self.assertEqual(m[1], [1]) # svd m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=LDA, + pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m[0], [1]) # pca self.assertEqual(m[1], [0]) # svd preprocessors['none'] = NoPreprocessing m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=LDA, + pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(m[0, :], [0]) # pca self.assertEqual(m[1, :], [1]) # tsvd self.assertEqual(m[2, :], [0]) # none m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=LDA, + pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m[0, :], [1]) # pca self.assertEqual(m[1, :], [0]) # tsvd @@ -75,100 +75,35 @@ def get_available_components(self, *args, **kwargs): classifiers['libsvm'] = LibLinear_SVC m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=Classifiers, + pipeline=((0, Preprocessors), (1, Classifiers)), dataset_properties={'sparse': False}) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=Classifiers, + pipeline=((0, Preprocessors), (1, Classifiers)), dataset_properties={'sparse': True}) self.assertListEqual(list(m[0, :]), [0, 0]) # pca self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd self.assertListEqual(list(m[2, :]), [0, 1]) # none - preprocessors['fast_ica'] = FastICA + # Do fancy 3d stuff + preprocessors['random_trees'] = RandomTreesEmbedding m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=Classifiers, + pipeline=((0, Preprocessors), (1, Preprocessors), (2, Classifiers)), dataset_properties={'sparse': False}) - self.assertListEqual(list(m[0, :]), [1, 1]) # pca - self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd - self.assertListEqual(list(m[2, :]), [1, 1]) # none - self.assertListEqual(list(m[3, :]), [1, 1]) # fast_ica - - m = ParamSklearn.create_searchspace_util.get_match_array( - node_0=Preprocessors, node_1=Classifiers, - dataset_properties={'sparse': True}) - self.assertListEqual(list(m[0, :]), [0, 0]) # pca - self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd - self.assertListEqual(list(m[2, :]), [0, 1]) # none - self.assertListEqual(list(m[3, :]), [0, 0]) # fast_ica - - def test_get_idx_to_keep(self): - m = numpy.zeros([3, 4]) - col, row = ParamSklearn.create_searchspace_util._get_idx_to_keep(m) - self.assertListEqual(col, []) - self.assertListEqual(row, []) - - m = numpy.zeros([100, 50]) - c_keep = set() - r_keep = set() - for i in range(20): - col_idx = numpy.random.randint(low=0, high=50, size=1)[0] - c_keep.add(col_idx) - row_idx = numpy.random.randint(low=0, high=100, size=1)[0] - r_keep.add(row_idx) - m[row_idx, col_idx] = 1 - col, row = ParamSklearn.create_searchspace_util._get_idx_to_keep(m) - self.assertListEqual(col, sorted(c_keep)) - self.assertListEqual(row, sorted(r_keep)) - [self.assertTrue(c < m.shape[1]) for c in c_keep] - [self.assertTrue(r < m.shape[0]) for r in r_keep] - - def test_sanitize_arrays(self): - class Choices(list): - def get_available_components(self, *args, **kwargs): - return OrderedDict(((v, v) for i, v in enumerate(self[:]))) - - m = numpy.zeros([2, 3]) - preprocessors = Choices(['pa', 'pb']) - classifiers = Choices(['ca', 'cb', 'cc']) - - # all zeros -> empty - new_m, new_preproc_list, new_class_list = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - matches=m, node_0=preprocessors, node_1=classifiers, - dataset_properties={}) - self.assertEqual(len(new_m), 0) - self.assertTrue(len(new_preproc_list) == len(new_class_list) == 0) - - for i in range(20): - m = numpy.zeros([2, 3]) - class_idx = numpy.random.randint(low=0, high=m.shape[1], size=1)[0] - pre_idx = numpy.random.randint(low=0, high=m.shape[0], size=1)[0] - m[pre_idx, class_idx] = 1 - new_m, new_preproc_list, new_class_list = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - matches=m, node_0=preprocessors, node_1=classifiers, - dataset_properties={}) - print preprocessors, pre_idx, new_preproc_list - self.assertIn(preprocessors[pre_idx], new_preproc_list) - self.assertIn(classifiers[class_idx], new_class_list) - self.assertTrue(new_m.shape[0] == new_m.shape[1] == 1) - - m = numpy.array([[1, 0, 0], [0, 1, 0]]) - new_m, new_preproc_list, new_class_list = \ - ParamSklearn.create_searchspace_util.sanitize_arrays( - matches=m, node_0=preprocessors, node_1=classifiers, - dataset_properties={}) - self.assertListEqual(preprocessors, new_preproc_list) - [self.assertIn(p, preprocessors) for p in preprocessors] - self.assertListEqual(classifiers[:-1], new_class_list) - [self.assertIn(c, classifiers) for c in new_class_list] - self.assertTrue(m.shape[0], new_m.shape[0]) - self.assertTrue(m.shape[1], new_m.shape[1]) - + # PCA followed by truncated SVD is forbidden + self.assertEqual(list(m[0].flatten()), [1, 1, 0, 0, 1, 1, 0, 1]) + # Truncated SVD is forbidden + self.assertEqual(list(m[1].flatten()), [0, 0, 0, 0, 0, 0, 0, 0]) + # Truncated SVD is forbidden after no_preprocessing + self.assertEqual(list(m[2].flatten()), [1, 1, 0, 0, 1, 1, 0, 1]) + # PCA is forbidden, truncatedSVD allowed after random trees embedding + # lda only allowed after truncatedSVD + self.assertEqual(list(m[3].flatten()), [0, 0, 1, 1, 0, 1, 0, 1]) + + @unittest.skip("Not currently working.") def test_add_forbidden(self): m = numpy.ones([2, 3]) preprocessors_list = ['pa', 'pb'] From 233a06d0b4e457250a6881b0baa219ff1b2b994c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 31 Jul 2015 18:16:53 +0200 Subject: [PATCH 292/352] Make create_searchspace_util work with several chained choices --- ParamSklearn/create_searchspace_util.py | 123 +++++++++++++++--------- tests/test_base.py | 39 ++++++++ 2 files changed, 116 insertions(+), 46 deletions(-) create mode 100644 tests/test_base.py diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index 3f2022ee49..fa54e54c6b 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -15,6 +15,7 @@ def get_match_array(pipeline, dataset_properties, include=None, # Duck typing, not sure if it's good... node_i_is_choice = [] node_i_choices = [] + node_i_choices_names = [] all_nodes = [] for node_name, node in pipeline: all_nodes.append(node) @@ -27,6 +28,9 @@ def get_match_array(pipeline, dataset_properties, include=None, node_name) if exclude is not None else None if is_choice: + node_i_choices_names.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).keys()) node_i_choices.append(node.get_available_components( dataset_properties, include=node_include, exclude=node_exclude).values()) @@ -98,6 +102,7 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, include, exclude): # Not sure if this works for 3D node_i_is_choice = [] + node_i_choices_names = [] node_i_choices = [] all_nodes = [] for node_name, node in pipeline: @@ -111,6 +116,9 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, node_name) if exclude is not None else None if is_choice: + node_i_choices_names.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).keys()) node_i_choices.append(node.get_available_components( dataset_properties, include=node_include, exclude=node_exclude).values()) @@ -134,8 +142,6 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, for choices_chain in choices_chains: constraints = set() - possible_constraints = set() - possible_constraints_by_length = dict() chain_start = choices_chain[0] chain_stop = choices_chain[1] @@ -143,58 +149,83 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, # Add one to have also have chain_length in the range for sub_chain_length in range(2, chain_length + 1): - if sub_chain_length > 2: - break for start_idx in range(chain_start, chain_stop - sub_chain_length + 1): - #print chain_start + start_idx, sub_chain_length - indices = range(start_idx, start_idx + sub_chain_length) - #print indices - - node_0_idx = indices[0] - node_1_idx = indices[1] - - node_0_name, node_0 = pipeline[node_0_idx] - node_1_name, node_1 = pipeline[node_1_idx] - node_0_is_choice = hasattr(node_0, "get_available_components") - node_1_is_choice = hasattr(node_1, "get_available_components") - - if not node_0_is_choice or not node_1_is_choice: - continue - - # Now iterate all combinations and add them as forbidden! - for pdx, p in enumerate(node_0.get_available_components(dataset_properties)): - slices_0 = [ - slice(None) if idx != node_0_idx else - slice(pdx, pdx + 1) for idx in range(len(matches.shape))] - if np.sum(matches[slices_0]) == 0: + node_names = [pipeline[idx][0] for idx in indices] + + num_node_choices = [] + node_choice_names = [] + skip_array_shape = [] + + for idx in indices: + node = all_nodes[idx] + available_components = node.get_available_components( + dataset_properties, + include=node_i_choices_names[idx-start_idx]) + skip_array_shape.append(len(available_components)) + num_node_choices.append(range(len(available_components))) + node_choice_names.append([name for name in available_components]) + + # Figure out which choices were already abandoned + skip_array = np.zeros(skip_array_shape) + for product in itertools.product(*num_node_choices): + for node_idx, choice_idx in enumerate(product): + node_idx += start_idx + slices_ = [ + slice(None) if idx != node_idx else + slice(choice_idx, choice_idx + 1) for idx in + range(len(matches.shape))] + + if np.sum(matches[slices_]) == 0: + skip_array[product] = 1 + + for product in itertools.product(*num_node_choices): + if skip_array[product]: continue - for cdx, c in enumerate(node_1.get_available_components(dataset_properties)): - - slices_1 = [ - slice(None) if idx != node_1_idx else - slice(cdx, cdx + 1) for idx in range(len(matches.shape))] - if np.sum(matches[slices_1]) == 0: + slices = [] + for idx in range(len(matches.shape)): + if idx not in indices: + slices.append(slice(None)) + else: + slices.append(slice(product[idx - start_idx], + product[idx - start_idx] + 1)) + + # This prints the affected nodes + # print [node_choice_names[i][product[i]] + # for i in range(len(product))] + + if np.sum(matches[slices]) == 0: + constraint = tuple([(node_names[i], + node_choice_names[i][product[i]]) + for i in range(len(product))]) + + # Check if a more general constraint/forbidden clause + # was already added + continue_ = False + for constraint_length in range(2, len(constraint)): + for constraint_start_idx in range(len(constraint) + - constraint_length + 1): + sub_constraint = constraint[ + constraint_start_idx:constraint_start_idx + constraint_length] + if sub_constraint in constraints: + continue_ = True + break + if continue_: + break + if continue_: continue - slices = [slice(None) if idx not in (node_0_idx, node_1_idx) - else slice(pdx if idx is node_0_idx else cdx, - pdx+1 if idx is node_0_idx else cdx+1) - for idx in range(len(matches.shape))] + constraints.add(constraint) - #print node_0_name, node_1_name, p, c, matches[slices] - if np.sum(matches[slices]) == 0: - conf_space.add_forbidden_clause(ForbiddenAndConjunction( + forbiddens = [] + for i in range(len(product)): + forbiddens.append( ForbiddenEqualsClause(conf_space.get_hyperparameter( - node_1_name + ":__choice__"), c), - ForbiddenEqualsClause(conf_space.get_hyperparameter( - node_0_name + ":__choice__"), p))) - constraints.add(((node_0_name, p), (node_1_name, c))) - - elif np.size(matches[slices]) > np.sum(matches[slices]) > 0: - #possible_constraints.add() - pass + node_names[i] + ":__choice__"), + node_choice_names[i][product[i]])) + forbidden = ForbiddenAndConjunction(*forbiddens) + conf_space.add_forbidden_clause(forbidden) return conf_space diff --git a/tests/test_base.py b/tests/test_base.py new file mode 100644 index 0000000000..5a1d7a1234 --- /dev/null +++ b/tests/test_base.py @@ -0,0 +1,39 @@ +import unittest + +import HPOlibConfigSpace.configuration_space + +import ParamSklearn.base +import ParamSklearn.components.preprocessing +import ParamSklearn.components.classification + +class BaseTest(unittest.TestCase): + def test_get_hyperparameter_configuration_space_3choices(self): + base = ParamSklearn.base.ParamSklearnBaseEstimator + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {} + exclude = {} + include = {} + pipeline = [('p0', ParamSklearn.components.preprocessing._preprocessors[ + 'preprocessor']), + ('p1', ParamSklearn.components.preprocessing._preprocessors[ + 'preprocessor']), + ('c', ParamSklearn.components.classification._classifiers[ + 'classifier'])] + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) + self.assertEqual(143, len(cs.forbidden_clauses)) + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'sparse': True} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 11) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + self.assertEqual(387, len(cs.forbidden_clauses)) From 5d89b4b00c99ab5f5c64038e04568d99d37e7d31 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 14 Aug 2015 17:13:41 +0200 Subject: [PATCH 293/352] Move constants to own file; make them int; prepares for new constraint --- ParamSklearn/classification.py | 11 ++++++++++- ParamSklearn/components/classification/adaboost.py | 4 ++-- .../components/classification/bernoulli_nb.py | 5 ++--- .../components/classification/decision_tree.py | 4 ++-- .../components/classification/extra_trees.py | 4 ++-- .../components/classification/gaussian_nb.py | 4 ++-- .../components/classification/gradient_boosting.py | 4 ++-- .../classification/k_nearest_neighbors.py | 4 ++-- ParamSklearn/components/classification/lda.py | 4 ++-- .../components/classification/liblinear_svc.py | 4 ++-- .../components/classification/libsvm_svc.py | 4 ++-- .../components/classification/multinomial_nb.py | 4 ++-- .../components/classification/passive_aggresive.py | 4 ++-- .../components/classification/proj_logit.py | 4 ++-- ParamSklearn/components/classification/qda.py | 4 ++-- .../components/classification/random_forest.py | 4 ++-- ParamSklearn/components/classification/ridge.py | 4 ++-- ParamSklearn/components/classification/sgd.py | 4 ++-- ParamSklearn/components/preprocessing/balancing.py | 4 ++-- ParamSklearn/components/preprocessing/densifier.py | 4 ++-- .../extra_trees_preproc_for_classification.py | 4 ++-- ParamSklearn/components/preprocessing/fast_ica.py | 6 ++---- .../preprocessing/feature_agglomeration.py | 4 ++-- ParamSklearn/components/preprocessing/gem.py | 4 ++-- .../components/preprocessing/imputation.py | 4 ++-- .../components/preprocessing/kernel_pca.py | 4 ++-- .../components/preprocessing/kitchen_sinks.py | 4 ++-- .../preprocessing/liblinear_svc_preprocessor.py | 4 ++-- .../components/preprocessing/no_preprocessing.py | 4 ++-- .../components/preprocessing/nystroem_sampler.py | 4 ++-- ParamSklearn/components/preprocessing/pca.py | 4 ++-- .../components/preprocessing/polynomial.py | 4 ++-- .../preprocessing/random_trees_embedding.py | 4 ++-- ParamSklearn/components/preprocessing/rescaling.py | 4 ++-- .../select_percentile_classification.py | 4 ++-- .../preprocessing/select_percentile_regression.py | 4 ++-- .../components/preprocessing/select_rates.py | 4 ++-- .../components/preprocessing/truncatedSVD.py | 4 ++-- ParamSklearn/components/regression/adaboost.py | 4 ++-- .../components/regression/decision_tree.py | 4 ++-- ParamSklearn/components/regression/extra_trees.py | 4 ++-- .../components/regression/gaussian_process.py | 4 ++-- .../components/regression/gradient_boosting.py | 4 ++-- .../components/regression/k_nearest_neighbors.py | 4 ++-- .../components/regression/liblinear_svr.py | 9 +++------ ParamSklearn/components/regression/libsvm_svr.py | 6 +++--- .../components/regression/random_forest.py | 4 ++-- .../components/regression/ridge_regression.py | 4 ++-- ParamSklearn/components/regression/sgd.py | 5 ++--- ParamSklearn/constants.py | 14 ++++++++++++++ ParamSklearn/create_searchspace_util.py | 13 ++++++------- ParamSklearn/regression.py | 2 +- ParamSklearn/util.py | 6 ------ tests/test_classification.py | 7 ++++--- tests/test_regression.py | 7 ++++--- 55 files changed, 137 insertions(+), 126 deletions(-) create mode 100644 ParamSklearn/constants.py diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 21ec5e9a4b..05785857a4 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -10,7 +10,7 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator -from ParamSklearn.util import SPARSE +from ParamSklearn.constants import SPARSE from ParamSklearn.components.preprocessing.balancing import Balancing import ParamSklearn.create_searchspace_util @@ -140,6 +140,15 @@ def predict_proba(self, X, batch_size=None): @classmethod def get_hyperparameter_search_space(cls, include=None, exclude=None, dataset_properties=None): + """Create the hyperparameter configuration space. + + Parameters + ---------- + include : dict (optional, default=None) + + Returns + ------- + """ cs = ConfigurationSpace() if dataset_properties is None or not isinstance(dataset_properties, dict): diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index fccb7600e7..0dca024187 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -7,7 +7,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.constants import * class AdaboostClassifier(ParamSklearnClassificationAlgorithm): @@ -64,7 +64,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index fc42576c2e..82c16471f6 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -1,13 +1,12 @@ import numpy as np import sklearn.naive_bayes -from sklearn.utils.validation import check_is_fitted from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class BernoulliNB(ParamSklearnClassificationAlgorithm): @@ -90,7 +89,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': np.bool} @staticmethod diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index a1d7b33ce1..c5a5c35c1f 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -7,7 +7,7 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * # get our own forests to replace the sklearn ones from sklearn.tree import DecisionTreeClassifier @@ -81,7 +81,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 38671a5c68..60ace9237c 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -6,7 +6,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * from sklearn.ensemble import ExtraTreesClassifier as ETC @@ -129,7 +129,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 8e82f660a9..90b8502bff 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -4,7 +4,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * class GaussianNB(ParamSklearnClassificationAlgorithm): @@ -77,7 +77,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index c02cde2ebf..a9874f3347 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -7,7 +7,7 @@ CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): @@ -125,7 +125,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index e475f69008..04ab8f040a 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -5,7 +5,7 @@ Constant, UniformIntegerHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): @@ -52,7 +52,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index e15d501817..6657b185b2 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -7,7 +7,7 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -72,7 +72,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index c9236b8156..66371ea611 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.implementations.util import softmax -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.constants import * class LibLinear_SVC(ParamSklearnClassificationAlgorithm): @@ -81,7 +81,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 7c41d91cf6..5a9847c30b 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -7,7 +7,7 @@ UnParametrizedHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -94,7 +94,7 @@ def get_properties(): # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # C-continouos and double precision... 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index a32fe3290f..cdce66e221 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -6,7 +6,7 @@ CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class MultinomialNB(ParamSklearnClassificationAlgorithm): @@ -90,7 +90,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index a010de8047..920a1360a9 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -77,7 +77,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index 1fdfeccb95..5473b5ee7a 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -6,7 +6,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations import ProjLogit @@ -48,7 +48,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index 1faa372463..3e4eb7edf6 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -5,7 +5,7 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -50,7 +50,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 21f893d446..eb4ddba5e5 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -7,7 +7,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * # get our own forests to replace the sklearn ones #from ParamSklearn.implementations import forest @@ -122,7 +122,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index aad69d7e47..62cc3744d9 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -61,7 +61,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index 3677deaeaa..a46fa7b773 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -7,7 +7,7 @@ from HPOlibConfigSpace.conditions import EqualsCondition from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * from ParamSklearn.implementations.util import softmax @@ -106,7 +106,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/preprocessing/balancing.py index 26a72d4545..59083b6d1e 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/preprocessing/balancing.py @@ -5,7 +5,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, SPARSE, INPUT +from ParamSklearn.constants import * class Balancing(ParamSklearnPreprocessingAlgorithm): @@ -97,7 +97,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, SPARSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/preprocessing/densifier.py index d7b9ba5d41..7462c42217 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/preprocessing/densifier.py @@ -4,7 +4,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, SPARSE +from ParamSklearn.constants import * class Densifier(ParamSklearnPreprocessingAlgorithm): @@ -37,7 +37,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': False, 'input': (SPARSE,), - 'output': DENSE, + 'output': (DENSE,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py index 63d43533fc..6be60760a8 100644 --- a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, INPUT, SPARSE +from ParamSklearn.constants import * class ExtraTreesPreprocessor(ParamSklearnPreprocessingAlgorithm): @@ -107,7 +107,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE), - 'output': INPUT, + 'output': (INPUT,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/preprocessing/fast_ica.py index c0077edde2..29662ecdf4 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/preprocessing/fast_ica.py @@ -9,9 +9,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT - -import numpy as np +from ParamSklearn.constants import * class FastICA(ParamSklearnPreprocessingAlgorithm): @@ -56,7 +54,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, ), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/preprocessing/feature_agglomeration.py index aa77f4f366..a5f487aabc 100644 --- a/ParamSklearn/components/preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/preprocessing/feature_agglomeration.py @@ -9,7 +9,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class FeatureAgglomeration(ParamSklearnPreprocessingAlgorithm): @@ -58,7 +58,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, ), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/preprocessing/gem.py index 6b405f8ff7..82e689dbd8 100644 --- a/ParamSklearn/components/preprocessing/gem.py +++ b/ParamSklearn/components/preprocessing/gem.py @@ -3,7 +3,7 @@ from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.implementations.gem import GEM as GEMImpl -from ParamSklearn.util import DENSE +from ParamSklearn.constants import * class GEM(ParamSklearnPreprocessingAlgorithm): @@ -38,7 +38,7 @@ def get_properties(): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, ), - 'output': DENSE, + 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/preprocessing/imputation.py index c9fa66fec9..1d5c49f9d6 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/preprocessing/imputation.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, SPARSE, INPUT +from ParamSklearn.constants import * class Imputation(ParamSklearnPreprocessingAlgorithm): @@ -42,7 +42,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, SPARSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/preprocessing/kernel_pca.py index d1652721f4..d49d77ca70 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/preprocessing/kernel_pca.py @@ -9,7 +9,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class KernelPCA(ParamSklearnPreprocessingAlgorithm): @@ -57,7 +57,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (DENSE, SPARSE), - 'output': DENSE, + 'output': (DENSE,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/preprocessing/kitchen_sinks.py index 3a6f854a80..5329c6779e 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/preprocessing/kitchen_sinks.py @@ -5,7 +5,7 @@ UniformIntegerHyperparameter from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): @@ -48,7 +48,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py index 43767114bc..8b363aad59 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class LibLinear_Preprocessor(ParamSklearnPreprocessingAlgorithm): @@ -74,7 +74,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/no_preprocessing.py b/ParamSklearn/components/preprocessing/no_preprocessing.py index 466f6dc2e7..f3ddf79376 100644 --- a/ParamSklearn/components/preprocessing/no_preprocessing.py +++ b/ParamSklearn/components/preprocessing/no_preprocessing.py @@ -1,7 +1,7 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class NoPreprocessing(ParamSklearnPreprocessingAlgorithm): @@ -36,7 +36,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/nystroem_sampler.py b/ParamSklearn/components/preprocessing/nystroem_sampler.py index 2d4099f1cf..2fe0ed11a0 100644 --- a/ParamSklearn/components/preprocessing/nystroem_sampler.py +++ b/ParamSklearn/components/preprocessing/nystroem_sampler.py @@ -8,7 +8,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, INPUT +from ParamSklearn.constants import * class Nystroem(ParamSklearnPreprocessingAlgorithm): @@ -51,7 +51,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/preprocessing/pca.py index 74b11a490f..12a0593258 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/preprocessing/pca.py @@ -6,7 +6,7 @@ CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, INPUT +from ParamSklearn.constants import * class PCA(ParamSklearnPreprocessingAlgorithm): @@ -52,7 +52,7 @@ def get_properties(): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, ), - 'output': INPUT, + 'output': (INPUT,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/preprocessing/polynomial.py index 37bad65f97..0da8a686b2 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/preprocessing/polynomial.py @@ -6,7 +6,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.constants import * class PolynomialFeatures(ParamSklearnPreprocessingAlgorithm): @@ -48,7 +48,7 @@ def get_properties(): # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, 'input': (DENSE,), - 'output': DENSE, + 'output': (INPUT,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/preprocessing/random_trees_embedding.py index 5b4e222a37..da6f070bff 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/preprocessing/random_trees_embedding.py @@ -5,7 +5,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE +from ParamSklearn.constants import * class RandomTreesEmbedding(ParamSklearnPreprocessingAlgorithm): @@ -68,7 +68,7 @@ def get_properties(): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, SPARSE), - 'output': SPARSE, + 'output': (SPARSE,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py index 9a93214733..ed2c376999 100644 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ b/ParamSklearn/components/preprocessing/rescaling.py @@ -5,7 +5,7 @@ from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler from ParamSklearn.implementations.Normalizer import Normalizer from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, SPARSE, INPUT +from ParamSklearn.constants import * class none(object): @@ -58,7 +58,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), # Add something here... 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/preprocessing/select_percentile_classification.py index e82c82403c..2834def5c2 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/preprocessing/select_percentile_classification.py @@ -5,7 +5,7 @@ from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase -from ParamSklearn.util import DENSE, SPARSE, INPUT +from ParamSklearn.constants import * class SelectPercentileClassification(SelectPercentileBase, @@ -45,7 +45,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/preprocessing/select_percentile_regression.py index 2d18d07828..07e4f727b6 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/preprocessing/select_percentile_regression.py @@ -5,7 +5,7 @@ from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase -from ParamSklearn.util import DENSE +from ParamSklearn.constants import * class SelectPercentileRegression(SelectPercentileBase, @@ -43,7 +43,7 @@ def get_properties(): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, ), - 'output': DENSE, + 'output': (DENSE,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/select_rates.py b/ParamSklearn/components/preprocessing/select_rates.py index dcb9e1cdde..ad19cecb1a 100644 --- a/ParamSklearn/components/preprocessing/select_rates.py +++ b/ParamSklearn/components/preprocessing/select_rates.py @@ -6,7 +6,7 @@ from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import DENSE, SPARSE, INPUT +from ParamSklearn.constants import * class SelectRates(ParamSklearnPreprocessingAlgorithm): @@ -67,7 +67,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': True, 'input': (SPARSE, DENSE), - 'output': INPUT, + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/preprocessing/truncatedSVD.py index aec900d484..6dda2d73a6 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/preprocessing/truncatedSVD.py @@ -6,7 +6,7 @@ from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.util import SPARSE, DENSE +from ParamSklearn.constants import * class TruncatedSVD(ParamSklearnPreprocessingAlgorithm): @@ -49,7 +49,7 @@ def get_properties(): 'handles_sparse': True, 'handles_dense': False, 'input': (SPARSE, ), - 'output': DENSE, + 'output': (DENSE,), 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/regression/adaboost.py b/ParamSklearn/components/regression/adaboost.py index 28926fdb90..1dbac3951f 100644 --- a/ParamSklearn/components/regression/adaboost.py +++ b/ParamSklearn/components/regression/adaboost.py @@ -7,7 +7,7 @@ UniformIntegerHyperparameter, CategoricalHyperparameter from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.constants import * class AdaboostRegressor(ParamSklearnRegressionAlgorithm): @@ -59,7 +59,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS, ), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/decision_tree.py b/ParamSklearn/components/regression/decision_tree.py index 173171746b..dae551ddd7 100644 --- a/ParamSklearn/components/regression/decision_tree.py +++ b/ParamSklearn/components/regression/decision_tree.py @@ -7,7 +7,7 @@ from ParamSklearn.components.base import \ ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * # get our own forests to replace the sklearn ones from sklearn.tree import DecisionTreeRegressor @@ -74,7 +74,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/extra_trees.py b/ParamSklearn/components/regression/extra_trees.py index 7007f47e0f..123c5de69e 100644 --- a/ParamSklearn/components/regression/extra_trees.py +++ b/ParamSklearn/components/regression/extra_trees.py @@ -6,7 +6,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * from sklearn.ensemble import ExtraTreesRegressor as ETR @@ -126,7 +126,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index 82e48010d9..cc92230df2 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -7,7 +7,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * class GaussianProcess(ParamSklearnRegressionAlgorithm): @@ -63,7 +63,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index bbcbc87647..ad030b2ace 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -8,7 +8,7 @@ from HPOlibConfigSpace.conditions import InCondition from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS +from ParamSklearn.constants import * class GradientBoosting(ParamSklearnRegressionAlgorithm): @@ -125,7 +125,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, ), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/ParamSklearn/components/regression/k_nearest_neighbors.py index 71ec0ccae1..88796ab534 100644 --- a/ParamSklearn/components/regression/k_nearest_neighbors.py +++ b/ParamSklearn/components/regression/k_nearest_neighbors.py @@ -5,7 +5,7 @@ Constant, UniformIntegerHyperparameter from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class KNearestNeighborsRegressor(ParamSklearnRegressionAlgorithm): @@ -46,7 +46,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py index 10ef780936..8d91a34d1e 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -2,13 +2,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, Constant, UnParametrizedHyperparameter -from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ - ForbiddenAndConjunction + CategoricalHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.implementations.util import softmax -from ParamSklearn.util import SPARSE, DENSE, PREDICTIONS +from ParamSklearn.constants import * class LibLinear_SVR(ParamSklearnRegressionAlgorithm): @@ -67,7 +64,7 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index 96eb62436c..16ad3a2645 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -8,9 +8,9 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter - from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * + class LibSVM_SVR(ParamSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, @@ -90,7 +90,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index b117af732b..f237d3112b 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -6,7 +6,7 @@ UnParametrizedHyperparameter, Constant from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, PREDICTIONS, SPARSE +from ParamSklearn.constants import * # get our own forests to replace the sklearn ones #from ParamSklearn.implementations import forest from sklearn.ensemble import RandomForestRegressor @@ -113,7 +113,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index 8f2b91723c..6fca3926c2 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -6,7 +6,7 @@ UnParametrizedHyperparameter from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class RidgeRegression(ParamSklearnRegressionAlgorithm): @@ -48,7 +48,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (SPARSE, DENSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py index 525b24d23f..7abb3e08fe 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/ParamSklearn/components/regression/sgd.py @@ -8,8 +8,7 @@ from HPOlibConfigSpace.conditions import InCondition, EqualsCondition from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.util import DENSE, SPARSE, PREDICTIONS -from ParamSklearn.implementations.util import softmax +from ParamSklearn.constants import * class SGD(ParamSklearnRegressionAlgorithm): @@ -105,7 +104,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE), - 'output': PREDICTIONS, + 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/constants.py b/ParamSklearn/constants.py new file mode 100644 index 0000000000..9692b5fbce --- /dev/null +++ b/ParamSklearn/constants.py @@ -0,0 +1,14 @@ +"""Constants which are used as dataset properties. +""" +BINARY_CLASSIFICATION = 1 +MULTICLASS_CLASSIFICATION = 2 +MULTILABEL_CLASSIFICATION = 3 +REGRESSION = 4 + +DENSE = 0 +SPARSE = 1 +PREDICTIONS = 2 +INPUT = 3 + +REAL_DATA = 0 +POSITIVE_REAL_DATA = 1 \ No newline at end of file diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index fa54e54c6b..ea620b7e43 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -5,11 +5,11 @@ from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause -from ParamSklearn.util import SPARSE, DENSE, INPUT, PREDICTIONS +from ParamSklearn.constants import * -def get_match_array(pipeline, dataset_properties, include=None, - exclude=None): +def get_match_array(pipeline, dataset_properties, + include=None, exclude=None): sparse = dataset_properties.get('sparse') # Duck typing, not sure if it's good... @@ -57,12 +57,11 @@ def get_match_array(pipeline, dataset_properties, include=None, not data_is_sparse and DENSE not in node_input: matches[pipeline_instantiation_idxs] = 0 break - if INPUT in node_output or PREDICTIONS in node_output or\ (not data_is_sparse and DENSE in node_input and - node_output == DENSE) or \ - (data_is_sparse and SPARSE in node_input and node_output - == SPARSE): + DENSE in node_output) or \ + (data_is_sparse and SPARSE in node_input and + SPARSE in node_output): # Don't change the data_is_sparse flag pass elif data_is_sparse and DENSE in node_output: diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index f0a7b96404..632749bd5d 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -13,7 +13,7 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator -from ParamSklearn.util import SPARSE +from ParamSklearn.constants import SPARSE import ParamSklearn.create_searchspace_util diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 0e8f7633a5..879cdfcd9c 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -11,12 +11,6 @@ import sklearn.datasets -SPARSE = 'SPARSE' -DENSE = 'DENSE' -PREDICTIONS = 'PREDICTIONS' -INPUT = 'INPUT' - - def find_sklearn_classes(class_): classifiers = set() all_subdirectories = [] diff --git a/tests/test_classification.py b/tests/test_classification.py index 37fd2e38d2..15dc3af012 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -23,7 +23,8 @@ from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.classification as classification_components import ParamSklearn.components.preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset, DENSE, SPARSE, PREDICTIONS +from ParamSklearn.util import get_dataset +from ParamSklearn.constants import DENSE, SPARSE, PREDICTIONS class TestParamSklearnClassifier(unittest.TestCase): @@ -39,10 +40,10 @@ def test_io_dict(self): output = props['output'] self.assertIsInstance(inp, tuple) - self.assertIsInstance(output, str) + self.assertIsInstance(output, tuple) for i in inp: self.assertIn(i, (SPARSE, DENSE)) - self.assertEqual(output, PREDICTIONS) + self.assertEqual(output, (PREDICTIONS,)) self.assertIn('handles_regression', props) self.assertFalse(props['handles_regression']) self.assertIn('handles_classification', props) diff --git a/tests/test_regression.py b/tests/test_regression.py index a59d0ce27a..b8685cd2aa 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -18,7 +18,8 @@ from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.regression as regression_components import ParamSklearn.components.preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset, SPARSE, DENSE, PREDICTIONS +from ParamSklearn.util import get_dataset +from ParamSklearn.constants import * class TestParamSKlearnRegressor(unittest.TestCase): @@ -35,10 +36,10 @@ def test_io_dict(self): output = props['output'] self.assertIsInstance(inp, tuple) - self.assertIsInstance(output, str) + self.assertIsInstance(output, tuple) for i in inp: self.assertIn(i, (SPARSE, DENSE)) - self.assertEqual(output, PREDICTIONS) + self.assertEqual(output, (PREDICTIONS,)) self.assertIn('handles_regression', props) self.assertTrue(props['handles_regression']) self.assertIn('handles_classification', props) From 5758fe92d45b14234e4188af31ef99e6d2b4cfdc Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 26 Aug 2015 17:13:00 +0200 Subject: [PATCH 294/352] Add functionality to deal with non-negative datasets --- ParamSklearn/base.py | 21 +- ParamSklearn/classification.py | 63 +---- ParamSklearn/components/__init__.py | 3 +- ParamSklearn/components/base.py | 6 +- .../components/classification/adaboost.py | 4 +- .../components/classification/bernoulli_nb.py | 10 +- .../classification/decision_tree.py | 4 +- .../components/classification/extra_trees.py | 4 +- .../components/classification/gaussian_nb.py | 4 +- .../classification/gradient_boosting.py | 4 +- .../classification/k_nearest_neighbors.py | 4 +- ParamSklearn/components/classification/lda.py | 4 +- .../classification/liblinear_svc.py | 4 +- .../components/classification/libsvm_svc.py | 4 +- .../classification/multinomial_nb.py | 12 +- .../classification/passive_aggresive.py | 4 +- .../components/classification/proj_logit.py | 4 +- ParamSklearn/components/classification/qda.py | 4 +- .../classification/random_forest.py | 4 +- .../components/classification/ridge.py | 4 +- ParamSklearn/components/classification/sgd.py | 4 +- .../components/data_preprocessing/__init__.py | 33 +++ .../balancing.py | 4 +- .../imputation.py | 4 +- .../data_preprocessing/rescaling.py | 243 ++++++++++++++++++ .../__init__.py | 11 +- .../densifier.py | 6 +- .../extra_trees_preproc_for_classification.py | 4 +- .../fast_ica.py | 6 +- .../feature_agglomeration.py | 4 +- .../gem.py | 6 +- .../kernel_pca.py | 10 +- .../kitchen_sinks.py | 6 +- .../liblinear_svc_preprocessor.py | 4 +- .../no_preprocessing.py | 4 +- .../nystroem_sampler.py | 34 ++- .../pca.py | 6 +- .../polynomial.py | 4 +- .../random_trees_embedding.py | 6 +- .../select_percentile.py | 0 .../select_percentile_classification.py | 48 +++- .../select_percentile_regression.py | 6 +- .../select_rates.py | 28 +- .../tfidf.py | 2 +- .../truncatedSVD.py | 6 +- .../components/preprocessing/rescaling.py | 77 ------ .../components/regression/adaboost.py | 4 +- .../components/regression/decision_tree.py | 4 +- .../components/regression/extra_trees.py | 4 +- .../components/regression/gaussian_process.py | 4 +- .../regression/gradient_boosting.py | 4 +- .../regression/k_nearest_neighbors.py | 4 +- .../components/regression/liblinear_svr.py | 4 +- .../components/regression/libsvm_svr.py | 4 +- .../components/regression/random_forest.py | 4 +- .../components/regression/ridge_regression.py | 4 +- ParamSklearn/components/regression/sgd.py | 4 +- ParamSklearn/constants.py | 38 ++- ParamSklearn/create_searchspace_util.py | 40 ++- ParamSklearn/implementations/gem.py | 1 + ParamSklearn/regression.py | 9 +- source/first_steps.rst | 4 +- .../classification/test_bernoulli_nb.py | 4 +- .../classification/test_multinomial_nb.py | 24 +- .../__init__.py | 0 .../test_balancing.py | 8 +- .../test_imputation.py | 2 +- .../data_preprocessing/test_scaling.py | 53 ++++ .../feature_preprocessing/__init__.py | 1 + .../test_NoPreprocessing.py | 2 +- .../test_densifier.py | 2 +- .../test_extra_trees.py | 2 +- .../test_fast_ica.py | 2 +- .../test_feature_agglomeration.py | 2 +- .../test_gem.py | 2 +- .../test_kernel_pca.py | 2 +- .../test_kitchen_sinks.py | 2 +- .../test_liblinear.py | 2 +- .../test_nystroem_sampler.py | 22 +- .../test_pca.py | 2 +- .../test_polynomial.py | 2 +- .../test_random_trees_embedding.py | 2 +- .../test_select_percentile_classification.py | 20 +- .../test_select_percentile_regression.py | 2 +- .../test_select_rates.py | 25 +- .../test_truncatedSVD.py | 2 +- .../components/preprocessing/test_scaling.py | 37 --- .../regression/test_ridge_regression.py | 2 +- tests/test_base.py | 68 ++++- tests/test_classification.py | 170 ++++++++---- ..._create_searchspace_util_classification.py | 15 +- tests/test_regression.py | 8 +- 92 files changed, 960 insertions(+), 400 deletions(-) create mode 100644 ParamSklearn/components/data_preprocessing/__init__.py rename ParamSklearn/components/{preprocessing => data_preprocessing}/balancing.py (97%) rename ParamSklearn/components/{preprocessing => data_preprocessing}/imputation.py (95%) create mode 100644 ParamSklearn/components/data_preprocessing/rescaling.py rename ParamSklearn/components/{preprocessing => feature_preprocessing}/__init__.py (93%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/densifier.py (91%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/extra_trees_preproc_for_classification.py (98%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/fast_ica.py (94%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/feature_agglomeration.py (97%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/gem.py (92%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/kernel_pca.py (92%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/kitchen_sinks.py (93%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/liblinear_svc_preprocessor.py (97%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/no_preprocessing.py (93%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/nystroem_sampler.py (75%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/pca.py (94%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/polynomial.py (96%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/random_trees_embedding.py (96%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/select_percentile.py (100%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/select_percentile_classification.py (63%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/select_percentile_regression.py (92%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/select_rates.py (78%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/tfidf.py (97%) rename ParamSklearn/components/{preprocessing => feature_preprocessing}/truncatedSVD.py (94%) delete mode 100644 ParamSklearn/components/preprocessing/rescaling.py rename tests/components/{preprocessing => data_preprocessing}/__init__.py (100%) rename tests/components/{preprocessing => data_preprocessing}/test_balancing.py (95%) rename tests/components/{preprocessing => data_preprocessing}/test_imputation.py (94%) create mode 100644 tests/components/data_preprocessing/test_scaling.py create mode 100644 tests/components/feature_preprocessing/__init__.py rename tests/components/{preprocessing => feature_preprocessing}/test_NoPreprocessing.py (91%) rename tests/components/{preprocessing => feature_preprocessing}/test_densifier.py (88%) rename tests/components/{preprocessing => feature_preprocessing}/test_extra_trees.py (94%) rename tests/components/{preprocessing => feature_preprocessing}/test_fast_ica.py (96%) rename tests/components/{preprocessing => feature_preprocessing}/test_feature_agglomeration.py (94%) rename tests/components/{preprocessing => feature_preprocessing}/test_gem.py (96%) rename tests/components/{preprocessing => feature_preprocessing}/test_kernel_pca.py (96%) rename tests/components/{preprocessing => feature_preprocessing}/test_kitchen_sinks.py (88%) rename tests/components/{preprocessing => feature_preprocessing}/test_liblinear.py (95%) rename tests/components/{preprocessing => feature_preprocessing}/test_nystroem_sampler.py (76%) rename tests/components/{preprocessing => feature_preprocessing}/test_pca.py (92%) rename tests/components/{preprocessing => feature_preprocessing}/test_polynomial.py (96%) rename tests/components/{preprocessing => feature_preprocessing}/test_random_trees_embedding.py (96%) rename tests/components/{preprocessing => feature_preprocessing}/test_select_percentile_classification.py (76%) rename tests/components/{preprocessing => feature_preprocessing}/test_select_percentile_regression.py (94%) rename tests/components/{preprocessing => feature_preprocessing}/test_select_rates.py (72%) rename tests/components/{preprocessing => feature_preprocessing}/test_truncatedSVD.py (96%) delete mode 100644 tests/components/preprocessing/test_scaling.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 6d5964a0d2..76ee81a4ee 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -100,9 +100,17 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): preproc_params.update(init_params_per_method[preproc_name]) - preprocessor_object = components.preprocessing_components. \ - _preprocessors[preproc_name](random_state=self.random_state, - **preproc_params) + if preproc_name in \ + components.feature_preprocessing_components._preprocessors: + _preprocessors = components.feature_preprocessing_components._preprocessors + elif preproc_name in \ + components.data_preprocessing_components._preprocessors: + _preprocessors = components.data_preprocessing_components._preprocessors + else: + raise ValueError() + + preprocessor_object = _preprocessors[preproc_name]( + random_state=self.random_state, **preproc_params) # Ducktyping... if hasattr(preprocessor_object, 'get_components'): @@ -256,6 +264,13 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, if exclude is None: exclude = {} + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + if 'signed' not in dataset_properties: + # This dataset probably contains unsigned data + dataset_properties['signed'] = False + matches = ParamSklearn.create_searchspace_util.get_match_array( pipeline, dataset_properties, include=include, exclude=exclude) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 05785857a4..b1e864983b 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -11,7 +11,7 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator from ParamSklearn.constants import SPARSE -from ParamSklearn.components.preprocessing.balancing import Balancing +from ParamSklearn.components.data_preprocessing.balancing import Balancing import ParamSklearn.create_searchspace_util @@ -154,10 +154,6 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() - if 'sparse' not in dataset_properties: - # This dataset is probaby dense - dataset_properties['sparse'] = False - pipeline = cls._get_pipeline() cs = cls._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) @@ -206,24 +202,10 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, pass # Won't work - # Multinomial NB etc does not work with negative values, don't use - # it with standardization, features learning, pca - classifiers_ = ["multinomial_nb", "bernoulli_nb"] + # Multinomial NB etc don't use with features learning, pca etc + classifiers_ = ["multinomial_nb"] preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", "fast_ica", "kernel_pca", "nystroem_sampler"] - scaling_strategies = ['standard', 'none', "normalize"] - for c in classifiers_: - if c not in classifiers: - continue - for scaling_strategy in scaling_strategies: - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "rescaling:strategy"), scaling_strategy), - ForbiddenEqualsClause(cs.get_hyperparameter( - "classifier:__choice__"), c))) - except KeyError: - pass for c, f in product(classifiers_, preproc_with_negative_X): if c not in classifiers: @@ -239,37 +221,6 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, except KeyError: pass - # Now try to add things for which we know that they don't work - forbidden_hyperparameter_combinations = \ - [("preprocessor:select_percentile_classification:score_func", "chi2", - "rescaling:strategy", "standard"), - ("preprocessor:select_percentile_classification:score_func", "chi2", - "rescaling:strategy", "normalize"), - ("preprocessor:select_percentile_classification:score_func", "chi2", - "rescaling:strategy", "none"), - ("preprocessor:select_rates:score_func", "chi2", - "rescaling:strategy", "standard"), - ("preprocessor:select_rates:score_func", "chi2", - "rescaling:strategy", "none"), - ("preprocessor:select_rates:score_func", "chi2", - "rescaling:strategy", "normalize"), - ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", - "standard"), - ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", - "normalize"), - ("preprocessor:nystroem_sampler:kernel", 'chi2', "rescaling:strategy", - "none")] - for hp_name_1, hp_value_1, hp_name_2, hp_value_2 in \ - forbidden_hyperparameter_combinations: - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - hp_name_1), hp_value_1), - ForbiddenEqualsClause(cs.get_hyperparameter( - hp_name_2), hp_value_2) - )) - except: - pass return cs @@ -280,15 +231,15 @@ def _get_pipeline(): # Add the always active preprocessing components steps.extend( [["imputation", - components.preprocessing._preprocessors['imputation']], + components.data_preprocessing._preprocessors['imputation']], ["rescaling", - components.preprocessing._preprocessors['rescaling']], + components.data_preprocessing._preprocessors['rescaling']], ["balancing", - components.preprocessing._preprocessors['balancing']]]) + components.data_preprocessing._preprocessors['balancing']]]) # Add the preprocessing component steps.append(['preprocessor', - components.preprocessing._preprocessors['preprocessor']]) + components.feature_preprocessing._preprocessors['preprocessor']]) # Add the classification component steps.append(['classifier', diff --git a/ParamSklearn/components/__init__.py b/ParamSklearn/components/__init__.py index 8485d27b68..44fd0f9e7f 100644 --- a/ParamSklearn/components/__init__.py +++ b/ParamSklearn/components/__init__.py @@ -38,7 +38,8 @@ from . import classification as classification_components from . import regression as regression_components -from . import preprocessing as preprocessing_components +from . import feature_preprocessing as feature_preprocessing_components +from . import data_preprocessing as data_preprocessing_components diff --git a/ParamSklearn/components/base.py b/ParamSklearn/components/base.py index 9fd15a1934..ffb860a3d6 100644 --- a/ParamSklearn/components/base.py +++ b/ParamSklearn/components/base.py @@ -10,7 +10,7 @@ def __init__(self): self.properties = None @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): """Get the properties of the underlying algorithm. These are: * Short name @@ -134,7 +134,7 @@ def __init__(self): self.preprocessor = None @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): """Get the properties of the underlying algorithm. These are: * Short name @@ -249,7 +249,7 @@ def __init__(self): self.properties = None @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): """Get the properties of the underlying algorithm. These are: * Short name diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 0dca024187..4a1ef153bc 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -48,7 +48,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'AB', 'name': 'AdaBoost Classifier', 'handles_missing_values': False, @@ -63,7 +63,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 82c16471f6..2fe640d2a5 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -35,7 +35,7 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): if self.estimator is None: self.n_iter = 0 self.fully_fit_ = False - self.estimator = sklearn.naive_bayes.MultinomialNB( + self.estimator = sklearn.naive_bayes.BernoulliNB( alpha=self.alpha, fit_prior=self.fit_prior) self.classes_ = np.unique(y.astype(int)) @@ -72,9 +72,9 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): - return {'shortname': 'MultinomialNB', - 'name': 'Multinomial Naive Bayes classifier', + def get_properties(dataset_properties=None): + return {'shortname': 'BernoulliNB', + 'name': 'Bernoulli Naive Bayes classifier', 'handles_missing_values': False, 'handles_nominal_values': False, # sklearn website says: ... BernoulliNB is designed for @@ -88,7 +88,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': np.bool} diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index c5a5c35c1f..b39e35da06 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -65,7 +65,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'DT', 'name': 'Decision Tree Classifier', 'handles_missing_values': False, @@ -80,7 +80,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index 60ace9237c..de64d68299 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -113,7 +113,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'ET', 'name': 'Extra Trees Classifier', 'handles_missing_values': False, @@ -128,7 +128,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index 90b8502bff..d44bc68d27 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -62,7 +62,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'GaussianNB', 'name': 'Gaussian Naive Bayes classifier', 'handles_missing_values': False, @@ -76,7 +76,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index a9874f3347..d8c6630704 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -109,7 +109,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'GB', 'name': 'Gradient Boosting Classifier', 'handles_missing_values': False, @@ -124,7 +124,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index 04ab8f040a..da31a35610 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -36,7 +36,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'KNN', 'name': 'K-Nearest Neighbor Classification', 'handles_missing_values': False, @@ -51,7 +51,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index 6657b185b2..86052b66cd 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -56,7 +56,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'LDA', 'name': 'Linear Discriminant Analysis', 'handles_missing_values': False, @@ -71,7 +71,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index 66371ea611..96b9399862 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -65,7 +65,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Liblinear-SVC', 'name': 'Liblinear Support Vector Classification', 'handles_missing_values': False, @@ -80,7 +80,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': False, 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 5a9847c30b..79cf20c6a3 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -76,7 +76,7 @@ def predict_proba(self, X): @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'LibSVM-SVC', 'name': 'LibSVM Support Vector Classification', 'handles_missing_values': False, @@ -93,7 +93,7 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # C-continouos and double precision... diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index cdce66e221..438513062d 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -1,5 +1,6 @@ import numpy as np import sklearn.naive_bayes +import scipy.sparse from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -40,6 +41,13 @@ def iterative_fit(self, X, y, n_iter=1, refit=False): alpha=self.alpha, fit_prior=self.fit_prior) self.classes_ = np.unique(y.astype(int)) + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + for iter in range(n_iter): start = min(self.n_iter * 1000, y.shape[0]) stop = min((self.n_iter + 1) * 1000, y.shape[0]) @@ -72,7 +80,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'MultinomialNB', 'name': 'Multinomial Naive Bayes classifier', 'handles_missing_values': False, @@ -89,7 +97,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, SIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggresive.py index 920a1360a9..8f870dbf65 100644 --- a/ParamSklearn/components/classification/passive_aggresive.py +++ b/ParamSklearn/components/classification/passive_aggresive.py @@ -61,7 +61,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'PassiveAggressive Classifier', 'name': 'Passive Aggressive Stochastic Gradient Descent ' 'Classifier', @@ -76,7 +76,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index 5473b5ee7a..6c5277702c 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -33,7 +33,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'PLogit', 'name': 'Logistic Regresion using Least Squares', 'handles_missing_values': False, @@ -47,7 +47,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': np.float32} diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index 3e4eb7edf6..f0f9c1382e 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -34,7 +34,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'QDA', 'name': 'Quadratic Discriminant Analysis', 'handles_missing_values': False, @@ -49,7 +49,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index eb4ddba5e5..49ca2b5857 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -107,7 +107,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'RF', 'name': 'Random Forest Classifier', 'handles_missing_values': False, @@ -121,7 +121,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py index 62cc3744d9..73b0865544 100644 --- a/ParamSklearn/components/classification/ridge.py +++ b/ParamSklearn/components/classification/ridge.py @@ -46,7 +46,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Rigde', 'name': 'Rigde Classifier', 'handles_missing_values': False, @@ -60,7 +60,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index a46fa7b773..377c88e245 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -91,7 +91,7 @@ def predict_proba(self, X): return softmax(df) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'SGD Classifier', 'name': 'Stochastic Gradient Descent Classifier', 'handles_missing_values': False, @@ -105,7 +105,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype' : None} diff --git a/ParamSklearn/components/data_preprocessing/__init__.py b/ParamSklearn/components/data_preprocessing/__init__.py new file mode 100644 index 0000000000..d6536d41aa --- /dev/null +++ b/ParamSklearn/components/data_preprocessing/__init__.py @@ -0,0 +1,33 @@ +__author__ = 'feurerm' + +from collections import OrderedDict +import copy +import inspect +import os +import pkgutil +import sys + +from ..base import ParamSklearnPreprocessingAlgorithm +from .rescaling import RescalingChoice + + +preprocessors_directory = os.path.split(__file__)[0] +_preprocessors = {} + +for module_loader, module_name, ispkg in pkgutil.iter_modules( + [preprocessors_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = module_loader.find_module(module_name).load_module( + full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass( + obj) and ParamSklearnPreprocessingAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + preprocessor = obj + _preprocessors[module_name] = preprocessor + +_preprocessors['rescaling'] = RescalingChoice \ No newline at end of file diff --git a/ParamSklearn/components/preprocessing/balancing.py b/ParamSklearn/components/data_preprocessing/balancing.py similarity index 97% rename from ParamSklearn/components/preprocessing/balancing.py rename to ParamSklearn/components/data_preprocessing/balancing.py index 59083b6d1e..ae08329321 100644 --- a/ParamSklearn/components/preprocessing/balancing.py +++ b/ParamSklearn/components/data_preprocessing/balancing.py @@ -81,7 +81,7 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): return init_params, fit_params @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Balancing', 'name': 'Balancing Imbalanced Class Distributions', 'handles_missing_values': True, @@ -96,7 +96,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA, SIGNED_DATA), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/imputation.py b/ParamSklearn/components/data_preprocessing/imputation.py similarity index 95% rename from ParamSklearn/components/preprocessing/imputation.py rename to ParamSklearn/components/data_preprocessing/imputation.py index 1d5c49f9d6..7961762a3b 100644 --- a/ParamSklearn/components/preprocessing/imputation.py +++ b/ParamSklearn/components/data_preprocessing/imputation.py @@ -25,7 +25,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Imputation', 'name': 'Imputation', 'handles_missing_values': True, @@ -41,7 +41,7 @@ def get_properties(): # TODO find out of this is right! 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/data_preprocessing/rescaling.py b/ParamSklearn/components/data_preprocessing/rescaling.py new file mode 100644 index 0000000000..3646012a0a --- /dev/null +++ b/ParamSklearn/components/data_preprocessing/rescaling.py @@ -0,0 +1,243 @@ +from collections import OrderedDict +import copy + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction + +from ParamSklearn.implementations.StandardScaler import StandardScaler +from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler +from ParamSklearn.implementations.Normalizer import Normalizer +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.constants import * + + +class Rescaling(object): + def fit(self, X, y=None): + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + + +class NoRescalingComponent(Rescaling): + def __init__(self, random_state): + pass + + def fit(self, X, y=None): + return self + + def transform(self, X): + return X + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'MinMaxScaler', + 'name': 'MinMaxScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class MinMaxScalerComponent(Rescaling): + def __init__(self, random_state): + self.preprocessor = MinMaxScaler() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'MinMaxScaler', + 'name': 'MinMaxScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT, SIGNED_DATA), + 'preferred_dtype': None} + + +class StandardScalerComponent(Rescaling): + def __init__(self, random_state): + self.preprocessor = StandardScaler() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'StandardScaler', + 'name': 'StandardScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class NormalizerComponent(Rescaling): + def __init__(self, random_state): + self.preprocessor = Normalizer() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Normalizer', + 'name': 'Normalizer', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class RescalingChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return {'none': NoRescalingComponent, + 'min/max': MinMaxScalerComponent, + 'standardize': StandardScalerComponent, + 'normalize': NormalizerComponent} + + @classmethod + def get_available_components(cls, data_prop=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + available_comp = cls.get_components() + + components_dict = OrderedDict() + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + entry = available_comp[name] + + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties=None, + default=None, + include=None, + exclude=None): + cs = ConfigurationSpace() + + # Compile a list of legal preprocessors for this problem + available_preprocessors = cls.get_available_components( + data_prop=dataset_properties, + include=include, exclude=exclude) + + if len(available_preprocessors) == 0: + raise ValueError( + "No rescaling algorithm found.") + + if default is None: + defaults = ['min/max', 'standardize', 'none', 'normalize'] + for default_ in defaults: + if default_ in available_preprocessors: + default = default_ + break + + preprocessor = CategoricalHyperparameter('__choice__', + available_preprocessors.keys(), + default=default) + cs.add_hyperparameter(preprocessor) + for name in available_preprocessors: + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, preprocessor, + name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + if not isinstance(condition, AbstractConjunction): + dlcs = [condition] + else: + dlcs = condition.get_descendent_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + diff --git a/ParamSklearn/components/preprocessing/__init__.py b/ParamSklearn/components/feature_preprocessing/__init__.py similarity index 93% rename from ParamSklearn/components/preprocessing/__init__.py rename to ParamSklearn/components/feature_preprocessing/__init__.py index 2d22d37e20..8c8808a338 100644 --- a/ParamSklearn/components/preprocessing/__init__.py +++ b/ParamSklearn/components/feature_preprocessing/__init__.py @@ -31,7 +31,7 @@ _preprocessors[module_name] = preprocessor -class PreprocessorChoice(object): +class FeaturePreprocessorChoice(object): def __init__(self, **params): choice = params['__choice__'] del params['__choice__'] @@ -61,7 +61,7 @@ def get_available_components(cls, data_prop, entry = available_comp[name] # Exclude itself to avoid infinite loop - if entry == PreprocessorChoice or hasattr(entry, 'get_components'): + if entry == FeaturePreprocessorChoice or hasattr(entry, 'get_components'): continue if entry.get_properties()['handles_classification'] is False: @@ -74,11 +74,6 @@ def get_available_components(cls, data_prop, continue components_dict[name] = entry - always_active = ["imputation", "rescaling", "balancing"] - components_dict = {key: value for key, value - in components_dict.items() - if key not in always_active} - return components_dict @classmethod @@ -151,4 +146,4 @@ def get_hyperparameter_search_space(cls, dataset_properties, return cs -_preprocessors['preprocessor'] = PreprocessorChoice \ No newline at end of file +_preprocessors['preprocessor'] = FeaturePreprocessorChoice \ No newline at end of file diff --git a/ParamSklearn/components/preprocessing/densifier.py b/ParamSklearn/components/feature_preprocessing/densifier.py similarity index 91% rename from ParamSklearn/components/preprocessing/densifier.py rename to ParamSklearn/components/feature_preprocessing/densifier.py index 7462c42217..7446a37df5 100644 --- a/ParamSklearn/components/preprocessing/densifier.py +++ b/ParamSklearn/components/feature_preprocessing/densifier.py @@ -21,7 +21,7 @@ def transform(self, X): return X @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'RandomTreesEmbedding', 'name': 'Random Trees Embedding', 'handles_missing_values': True, @@ -36,8 +36,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': False, - 'input': (SPARSE,), - 'output': (DENSE,), + 'input': (SPARSE, UNSIGNED_DATA), + 'output': (DENSE, INPUT), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py similarity index 98% rename from ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py rename to ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py index 6be60760a8..fdfc34ba9b 100644 --- a/ParamSklearn/components/preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py @@ -91,7 +91,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'ET', 'name': 'Extra Trees Classifier Preprocessing', 'handles_missing_values': False, @@ -106,7 +106,7 @@ def get_properties(): 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (INPUT,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/preprocessing/fast_ica.py b/ParamSklearn/components/feature_preprocessing/fast_ica.py similarity index 94% rename from ParamSklearn/components/preprocessing/fast_ica.py rename to ParamSklearn/components/feature_preprocessing/fast_ica.py index 29662ecdf4..65b63d8cac 100644 --- a/ParamSklearn/components/preprocessing/fast_ica.py +++ b/ParamSklearn/components/feature_preprocessing/fast_ica.py @@ -38,7 +38,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'FastICA', 'name': 'Fast Independent Component Analysis', 'handles_missing_values': False, @@ -53,8 +53,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, ), - 'output': (INPUT,), + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/feature_agglomeration.py b/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py similarity index 97% rename from ParamSklearn/components/preprocessing/feature_agglomeration.py rename to ParamSklearn/components/feature_preprocessing/feature_agglomeration.py index a5f487aabc..770c749428 100644 --- a/ParamSklearn/components/preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py @@ -42,7 +42,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Feature Agglomeration', 'name': 'Feature Agglomeration', 'handles_missing_values': False, @@ -57,7 +57,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/gem.py b/ParamSklearn/components/feature_preprocessing/gem.py similarity index 92% rename from ParamSklearn/components/preprocessing/gem.py rename to ParamSklearn/components/feature_preprocessing/gem.py index 82e689dbd8..339f5ef281 100644 --- a/ParamSklearn/components/preprocessing/gem.py +++ b/ParamSklearn/components/feature_preprocessing/gem.py @@ -22,7 +22,7 @@ def transform(self, X): @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'GEM', 'name': 'Generalized Eigenvector extraction', 'handles_missing_values': False, @@ -37,8 +37,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, - 'input': (DENSE, ), - 'output': (INPUT,), + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/kernel_pca.py b/ParamSklearn/components/feature_preprocessing/kernel_pca.py similarity index 92% rename from ParamSklearn/components/preprocessing/kernel_pca.py rename to ParamSklearn/components/feature_preprocessing/kernel_pca.py index d49d77ca70..07589c2c12 100644 --- a/ParamSklearn/components/preprocessing/kernel_pca.py +++ b/ParamSklearn/components/feature_preprocessing/kernel_pca.py @@ -1,5 +1,7 @@ import warnings +import numpy as np +import scipy.sparse import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -27,6 +29,8 @@ def fit(self, X, Y=None): n_components=self.n_components, kernel=self.kernel, degree=self.degree, gamma=self.gamma, coef0=self.coef0) # Make the RuntimeWarning an Exception! + if scipy.sparse.issparse(X): + X = X.astype(np.float64) with warnings.catch_warnings(): warnings.filterwarnings("error") self.preprocessor.fit(X) @@ -41,7 +45,7 @@ def transform(self, X): return X_new @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'KernelPCA', 'name': 'Kernel Principal Component Analysis', 'handles_missing_values': False, @@ -56,8 +60,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': True, 'handles_dense': True, - 'input': (DENSE, SPARSE), - 'output': (DENSE,), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (DENSE, UNSIGNED_DATA), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/kitchen_sinks.py b/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py similarity index 93% rename from ParamSklearn/components/preprocessing/kitchen_sinks.py rename to ParamSklearn/components/feature_preprocessing/kitchen_sinks.py index 5329c6779e..2b31182771 100644 --- a/ParamSklearn/components/preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py @@ -32,7 +32,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'KitchenSink', 'name': 'Random Kitchen Sinks', 'handles_missing_values': False, @@ -47,8 +47,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), - 'output': (INPUT,), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py similarity index 97% rename from ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py rename to ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py index 8b363aad59..badb6b0f5f 100644 --- a/ParamSklearn/components/preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py @@ -58,7 +58,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Liblinear-Preprocessor', 'name': 'Liblinear Support Vector Preprocessing', 'handles_missing_values': False, @@ -73,7 +73,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': False, 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (INPUT,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/no_preprocessing.py b/ParamSklearn/components/feature_preprocessing/no_preprocessing.py similarity index 93% rename from ParamSklearn/components/preprocessing/no_preprocessing.py rename to ParamSklearn/components/feature_preprocessing/no_preprocessing.py index f3ddf79376..3a95204dc4 100644 --- a/ParamSklearn/components/preprocessing/no_preprocessing.py +++ b/ParamSklearn/components/feature_preprocessing/no_preprocessing.py @@ -20,7 +20,7 @@ def transform(self, X): return X @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'no', 'name': 'NoPreprocessing', 'handles_missing_values': True, @@ -35,7 +35,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/nystroem_sampler.py b/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py similarity index 75% rename from ParamSklearn/components/preprocessing/nystroem_sampler.py rename to ParamSklearn/components/feature_preprocessing/nystroem_sampler.py index 2fe0ed11a0..9764569520 100644 --- a/ParamSklearn/components/preprocessing/nystroem_sampler.py +++ b/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py @@ -1,4 +1,5 @@ import numpy as np +import scipy.sparse import sklearn.kernel_approximation from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -26,16 +27,39 @@ def fit(self, X, Y=None): kernel=self.kernel, n_components=self.n_components, gamma=self.gamma, degree=self.degree, coef0=self.coef0, random_state=self.random_state) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.kernel == 'chi2': + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + self.preprocessor.fit(X.astype(np.float64)) return self def transform(self, X): + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.kernel == 'chi2': + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + if self.preprocessor is None: raise NotImplementedError() return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA return {'shortname': 'Nystroem', 'name': 'Nystroem kernel approximation', 'handles_missing_values': False, @@ -50,13 +74,15 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), - 'output': (INPUT,), + 'input': (SPARSE, DENSE, data_type), + 'output': (INPUT, UNSIGNED_DATA), 'preferred_dtype': None} @staticmethod def get_hyperparameter_search_space(dataset_properties=None): - if dataset_properties is not None and dataset_properties.get("sparse"): + if dataset_properties is not None and \ + (dataset_properties.get("sparse") is True or + dataset_properties.get("signed") is False): allow_chi2 = False else: allow_chi2 = True diff --git a/ParamSklearn/components/preprocessing/pca.py b/ParamSklearn/components/feature_preprocessing/pca.py similarity index 94% rename from ParamSklearn/components/preprocessing/pca.py rename to ParamSklearn/components/feature_preprocessing/pca.py index 12a0593258..d1fed4d1a5 100644 --- a/ParamSklearn/components/preprocessing/pca.py +++ b/ParamSklearn/components/feature_preprocessing/pca.py @@ -33,7 +33,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'PCA', 'name': 'Principle Component Analysis', 'handles_missing_values': False, @@ -51,8 +51,8 @@ def get_properties(): 'is_deterministic': False, 'handles_sparse': False, 'handles_dense': True, - 'input': (DENSE, ), - 'output': (INPUT,), + 'input': (DENSE, UNSIGNED_DATA), + 'output': (DENSE, UNSIGNED_DATA), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/polynomial.py b/ParamSklearn/components/feature_preprocessing/polynomial.py similarity index 96% rename from ParamSklearn/components/preprocessing/polynomial.py rename to ParamSklearn/components/feature_preprocessing/polynomial.py index 0da8a686b2..c9d12a0477 100644 --- a/ParamSklearn/components/preprocessing/polynomial.py +++ b/ParamSklearn/components/feature_preprocessing/polynomial.py @@ -30,7 +30,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'PolynomialFeatures', 'name': 'PolynomialFeatures', 'handles_missing_values': False, @@ -47,7 +47,7 @@ def get_properties(): # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, - 'input': (DENSE,), + 'input': (DENSE, UNSIGNED_DATA), 'output': (INPUT,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/random_trees_embedding.py b/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py similarity index 96% rename from ParamSklearn/components/preprocessing/random_trees_embedding.py rename to ParamSklearn/components/feature_preprocessing/random_trees_embedding.py index da6f070bff..5ae095798b 100644 --- a/ParamSklearn/components/preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py @@ -52,7 +52,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'RandomTreesEmbedding', 'name': 'Random Trees Embedding', 'handles_missing_values': False, @@ -67,8 +67,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, - 'input': (DENSE, SPARSE), - 'output': (SPARSE,), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (SPARSE, SIGNED_DATA), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/preprocessing/select_percentile.py b/ParamSklearn/components/feature_preprocessing/select_percentile.py similarity index 100% rename from ParamSklearn/components/preprocessing/select_percentile.py rename to ParamSklearn/components/feature_preprocessing/select_percentile.py diff --git a/ParamSklearn/components/preprocessing/select_percentile_classification.py b/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py similarity index 63% rename from ParamSklearn/components/preprocessing/select_percentile_classification.py rename to ParamSklearn/components/feature_preprocessing/select_percentile_classification.py index 2834def5c2..510dca9be7 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py @@ -4,9 +4,11 @@ import sklearn.feature_selection from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.constants import * +import scipy.sparse + class SelectPercentileClassification(SelectPercentileBase, ParamSklearnPreprocessingAlgorithm): @@ -28,8 +30,48 @@ def __init__(self, percentile, score_func="chi2", random_state=None): raise ValueError("score_func must be in ('chi2, 'f_classif'), " "but is: %s" % score_func) + def fit(self, X, y): + self.preprocessor = sklearn.feature_selection.SelectPercentile( + score_func=self.score_func, + percentile=self.percentile) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data<0] = 0.0 + else: + X[X<0] = 0.0 + + self.preprocessor.fit(X, y) + return self + + def transform(self, X): + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + if self.preprocessor is None: + raise NotImplementedError() + Xt = self.preprocessor.transform(X) + if Xt.shape[1] == 0: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + return Xt + + @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA + return {'shortname': 'SPC', 'name': 'Select Percentile Classification', 'handles_missing_values': False, @@ -44,7 +86,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, data_type), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/select_percentile_regression.py b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py similarity index 92% rename from ParamSklearn/components/preprocessing/select_percentile_regression.py rename to ParamSklearn/components/feature_preprocessing/select_percentile_regression.py index 07e4f727b6..045428837c 100644 --- a/ParamSklearn/components/preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py @@ -4,7 +4,7 @@ import sklearn.feature_selection from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.components.preprocessing.select_percentile import SelectPercentileBase +from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.constants import * @@ -27,7 +27,7 @@ def __init__(self, percentile, score_func="f_classif", random_state=None): raise ValueError("Don't know this scoring function: %s" % score_func) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'SPR', 'name': 'Select Percentile Regression', 'handles_missing_values': False, @@ -42,7 +42,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': False, 'handles_dense': True, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (DENSE,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/select_rates.py b/ParamSklearn/components/feature_preprocessing/select_rates.py similarity index 78% rename from ParamSklearn/components/preprocessing/select_rates.py rename to ParamSklearn/components/feature_preprocessing/select_rates.py index ad19cecb1a..837ef74163 100644 --- a/ParamSklearn/components/preprocessing/select_rates.py +++ b/ParamSklearn/components/feature_preprocessing/select_rates.py @@ -2,6 +2,7 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant +import scipy.sparse import sklearn.feature_selection from ParamSklearn.components.base import \ @@ -29,10 +30,26 @@ def fit(self, X, y): self.preprocessor = sklearn.feature_selection.GenericUnivariateSelect( score_func=self.score_func, param=self.alpha, mode=self.mode) + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + self.preprocessor.fit(X, y) return self def transform(self, X): + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + if self.preprocessor is None: raise NotImplementedError() try: @@ -51,7 +68,14 @@ def transform(self, X): return Xt @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA + return {'shortname': 'SR', 'name': 'Univariate Feature Selection based on rates', 'handles_missing_values': False, @@ -66,7 +90,7 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, data_type), 'output': (INPUT,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/preprocessing/tfidf.py b/ParamSklearn/components/feature_preprocessing/tfidf.py similarity index 97% rename from ParamSklearn/components/preprocessing/tfidf.py rename to ParamSklearn/components/feature_preprocessing/tfidf.py index 3a4ffe4105..0fd0915249 100644 --- a/ParamSklearn/components/preprocessing/tfidf.py +++ b/ParamSklearn/components/feature_preprocessing/tfidf.py @@ -38,7 +38,7 @@ def transform(self, X): return X @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'TFIDF', 'name': 'Term Frequency / Inverse Document Frequency', 'handles_missing_values': False, diff --git a/ParamSklearn/components/preprocessing/truncatedSVD.py b/ParamSklearn/components/feature_preprocessing/truncatedSVD.py similarity index 94% rename from ParamSklearn/components/preprocessing/truncatedSVD.py rename to ParamSklearn/components/feature_preprocessing/truncatedSVD.py index 6dda2d73a6..3d9010d01c 100644 --- a/ParamSklearn/components/preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/feature_preprocessing/truncatedSVD.py @@ -33,7 +33,7 @@ def transform(self, X): return self.preprocessor.transform(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'TSVD', 'name': 'Truncated Singular Value Decomposition', 'handles_missing_values': False, @@ -48,8 +48,8 @@ def get_properties(): 'is_deterministic': True, 'handles_sparse': True, 'handles_dense': False, - 'input': (SPARSE, ), - 'output': (DENSE,), + 'input': (SPARSE, UNSIGNED_DATA), + 'output': (DENSE, INPUT), 'preferred_dtype': np.float32} @staticmethod diff --git a/ParamSklearn/components/preprocessing/rescaling.py b/ParamSklearn/components/preprocessing/rescaling.py deleted file mode 100644 index ed2c376999..0000000000 --- a/ParamSklearn/components/preprocessing/rescaling.py +++ /dev/null @@ -1,77 +0,0 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter - -from ParamSklearn.implementations.StandardScaler import StandardScaler -from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler -from ParamSklearn.implementations.Normalizer import Normalizer -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * - - -class none(object): - def fit(self, X, y=None): - return self - - def transform(self, X): - return X - - -class Rescaling(ParamSklearnPreprocessingAlgorithm): - def __init__(self, strategy, random_state=None): - # TODO pay attention to the cases when a copy is made - self.strategy = strategy - - def fit(self, X, Y=None): - if self.strategy == "min/max": - self.preprocessor = MinMaxScaler(copy=False) - elif self.strategy == "standard": - self.preprocessor = StandardScaler(copy=False) - elif self.strategy == 'none': - self.preprocessor = none() - elif self.strategy == 'normalize': - self.preprocessor = Normalizer(norm='l2', copy=False) - else: - raise ValueError(self.strategy) - self.preprocessor.fit(X) - return self - - def transform(self, X): - if self.preprocessor is None: - raise NotImplementedError() - return self.preprocessor.transform(X) - - @staticmethod - def get_properties(): - return {'shortname': 'Rescaling', - 'name': 'Rescaling', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': False, - 'handles_regression': True, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': True, - 'is_deterministic': True, - # TODO find out of this is right! - 'handles_sparse': True, - 'handles_dense': True, - 'input': (SPARSE, DENSE), - 'output': (INPUT,), - # Add something here... - 'preferred_dtype': None} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - # TODO add replace by zero! - strategy = CategoricalHyperparameter( - "strategy", ["min/max", "standard", "none", "normalize"], - default="min/max") - cs = ConfigurationSpace() - cs.add_hyperparameter(strategy) - return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/regression/adaboost.py b/ParamSklearn/components/regression/adaboost.py index 1dbac3951f..cc7dd17f70 100644 --- a/ParamSklearn/components/regression/adaboost.py +++ b/ParamSklearn/components/regression/adaboost.py @@ -43,7 +43,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'AB', 'name': 'AdaBoost Regressor', 'handles_missing_values': False, @@ -58,7 +58,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS, ), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/decision_tree.py b/ParamSklearn/components/regression/decision_tree.py index dae551ddd7..d3113d9774 100644 --- a/ParamSklearn/components/regression/decision_tree.py +++ b/ParamSklearn/components/regression/decision_tree.py @@ -58,7 +58,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'DT', 'name': 'Decision Tree Classifier', 'handles_missing_values': False, @@ -73,7 +73,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': False, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/extra_trees.py b/ParamSklearn/components/regression/extra_trees.py index 123c5de69e..12380c080c 100644 --- a/ParamSklearn/components/regression/extra_trees.py +++ b/ParamSklearn/components/regression/extra_trees.py @@ -110,7 +110,7 @@ def predict_proba(self, X): return self.estimator.predict_proba(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'ET', 'name': 'Extra Trees Regressor', 'handles_missing_values': False, @@ -125,7 +125,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index cc92230df2..5d37d063a7 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -47,7 +47,7 @@ def predict(self, X): return self.scaler.inverse_transform(Y_pred) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'GP', 'name': 'Gaussian Process', 'handles_missing_values': False, @@ -62,7 +62,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index ad030b2ace..766ba62dbb 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -109,7 +109,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'GB', 'name': 'Gradient Boosting Regressor', 'handles_missing_values': False, @@ -124,7 +124,7 @@ def get_properties(): 'prefers_data_normalized': False, 'is_deterministic': True, 'handles_sparse': False, - 'input': (DENSE, ), + 'input': (DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/ParamSklearn/components/regression/k_nearest_neighbors.py index 88796ab534..c7e19ca6ce 100644 --- a/ParamSklearn/components/regression/k_nearest_neighbors.py +++ b/ParamSklearn/components/regression/k_nearest_neighbors.py @@ -30,7 +30,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'KNN', 'name': 'K-Nearest Neighbor Classification', 'handles_missing_values': False, @@ -45,7 +45,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py index 8d91a34d1e..fefb053085 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -48,7 +48,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Liblinear-SVR', 'name': 'Liblinear Support Vector Regression', 'handles_missing_values': False, @@ -63,7 +63,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': False, 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), 'preferred_dtype': None} diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index 16ad3a2645..fb321aae45 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -74,7 +74,7 @@ def predict(self, X): return self.scaler.inverse_transform(Y_pred) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'SVR', 'name': 'Support Vector Regression', 'handles_missing_values': False, @@ -89,7 +89,7 @@ def get_properties(): 'prefers_data_normalized': True, 'is_deterministic': True, 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index f237d3112b..505ce0108a 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -98,7 +98,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'RF', 'name': 'Random Forest Regressor', 'handles_missing_values': False, @@ -112,7 +112,7 @@ def get_properties(): 'prefers_data_normalized': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index 6fca3926c2..e65f1ced8b 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -32,7 +32,7 @@ def predict(self, X): return self.estimator.predict(X) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'Rigde', 'name': 'Ridge Regression', 'handles_missing_values': False, @@ -47,7 +47,7 @@ def get_properties(): 'prefers_data_normalized': True, 'is_deterministic': True, 'handles_sparse': True, - 'input': (SPARSE, DENSE), + 'input': (SPARSE, DENSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! # But rather fortran or C-contiguous? diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py index 7abb3e08fe..5385540c85 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/ParamSklearn/components/regression/sgd.py @@ -89,7 +89,7 @@ def predict(self, X): return self.scaler.inverse_transform(Y_pred) @staticmethod - def get_properties(): + def get_properties(dataset_properties=None): return {'shortname': 'SGD Regressor', 'name': 'Stochastic Gradient Descent Regressor', 'handles_missing_values': False, @@ -103,7 +103,7 @@ def get_properties(): 'handles_multilabel': False, 'is_deterministic': True, 'handles_sparse': True, - 'input': (DENSE, SPARSE), + 'input': (DENSE, SPARSE, UNSIGNED_DATA), 'output': (PREDICTIONS,), # TODO find out what is best used here! 'preferred_dtype': None} diff --git a/ParamSklearn/constants.py b/ParamSklearn/constants.py index 9692b5fbce..2cd95287be 100644 --- a/ParamSklearn/constants.py +++ b/ParamSklearn/constants.py @@ -5,10 +5,36 @@ MULTILABEL_CLASSIFICATION = 3 REGRESSION = 4 -DENSE = 0 -SPARSE = 1 -PREDICTIONS = 2 -INPUT = 3 +REGRESSION_TASKS = [REGRESSION] +CLASSIFICATION_TASKS = [BINARY_CLASSIFICATION, MULTICLASS_CLASSIFICATION, + MULTILABEL_CLASSIFICATION] -REAL_DATA = 0 -POSITIVE_REAL_DATA = 1 \ No newline at end of file +TASK_TYPES = REGRESSION_TASKS + CLASSIFICATION_TASKS + +TASK_TYPES_TO_STRING = \ + {BINARY_CLASSIFICATION: "binary.classification", + MULTICLASS_CLASSIFICATION: "multiclass.classification", + MULTILABEL_CLASSIFICATION: "multilabel.classification", + REGRESSION: "regression"} + +STRING_TO_TASK_TYPES = \ + {"binary.classification": BINARY_CLASSIFICATION, + "multiclass.classification": MULTICLASS_CLASSIFICATION, + "multilabel.classification": MULTILABEL_CLASSIFICATION, + "regression": REGRESSION} + +DENSE = 5 +SPARSE = 6 +PREDICTIONS = 7 +INPUT = 8 + +SIGNED_DATA = 9 +UNSIGNED_DATA = 10 + +DATASET_PROPERTIES_TO_STRING = \ + {DENSE: 'dense', + SPARSE: 'sparse', + PREDICTIONS: 'predictions', + INPUT: 'input', + SIGNED_DATA: 'signed data', + UNSIGNED_DATA: 'unsigned data'} \ No newline at end of file diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index ea620b7e43..224009635e 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -11,6 +11,7 @@ def get_match_array(pipeline, dataset_properties, include=None, exclude=None): sparse = dataset_properties.get('sparse') + signed = dataset_properties.get('signed') # Duck typing, not sure if it's good... node_i_is_choice = [] @@ -49,15 +50,27 @@ def get_match_array(pipeline, dataset_properties, enumerate(pipeline_instantiation_idxs)] data_is_sparse = sparse + dataset_is_signed = signed for node in pipeline_instantiation: node_input = node.get_properties()['input'] node_output = node.get_properties()['output'] + # First check if these two instantiations of this node can work + # together. Do this in multiple if statements to maintain + # readability if (data_is_sparse and SPARSE not in node_input) or \ not data_is_sparse and DENSE not in node_input: matches[pipeline_instantiation_idxs] = 0 break - if INPUT in node_output or PREDICTIONS in node_output or\ + # No need to check if the node can handle SIGNED_DATA; this is + # always assumed to be true + elif not dataset_is_signed and UNSIGNED_DATA not in node_input: + matches[pipeline_instantiation_idxs] = 0 + break + + if (INPUT in node_output and DENSE not in node_output and + SPARSE not in node_output) or \ + PREDICTIONS in node_output or\ (not data_is_sparse and DENSE in node_input and DENSE in node_output) or \ (data_is_sparse and SPARSE in node_input and @@ -70,7 +83,22 @@ def get_match_array(pipeline, dataset_properties, data_is_sparse = True else: print node - print data_is_sparse + print "Data is sparse", data_is_sparse + print node_input, node_output + raise ValueError("This combination is not allowed!") + + if PREDICTIONS in node_output: + pass + elif (INPUT in node_output and SIGNED_DATA not in node_output and + UNSIGNED_DATA not in node_output): + pass + elif SIGNED_DATA in node_output: + dataset_is_signed = True + elif UNSIGNED_DATA in node_output: + dataset_is_signed = False + else: + print node + print "Data is signed", dataset_is_signed print node_input, node_output raise ValueError("This combination is not allowed!") @@ -123,6 +151,7 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, exclude=node_exclude).values()) else: + node_i_choices_names.append([node_name]) node_i_choices.append([node]) # Find out all chains of choices. Only in such a chain its possible to @@ -148,7 +177,6 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, # Add one to have also have chain_length in the range for sub_chain_length in range(2, chain_length + 1): - for start_idx in range(chain_start, chain_stop - sub_chain_length + 1): indices = range(start_idx, start_idx + sub_chain_length) node_names = [pipeline[idx][0] for idx in indices] @@ -161,7 +189,8 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, node = all_nodes[idx] available_components = node.get_available_components( dataset_properties, - include=node_i_choices_names[idx-start_idx]) + include=node_i_choices_names[idx]) + assert len(available_components) > 0, len(available_components) skip_array_shape.append(len(available_components)) num_node_choices.append(range(len(available_components))) node_choice_names.append([name for name in available_components]) @@ -193,7 +222,8 @@ def add_forbidden(conf_space, pipeline, matches, dataset_properties, # This prints the affected nodes # print [node_choice_names[i][product[i]] - # for i in range(len(product))] + # for i in range(len(product))], \ + # np.sum(matches[slices]) if np.sum(matches[slices]) == 0: constraint = tuple([(node_names[i], diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py index c220ffd70d..e4110b3342 100644 --- a/ParamSklearn/implementations/gem.py +++ b/ParamSklearn/implementations/gem.py @@ -13,6 +13,7 @@ def __init__(self, N, precond): def fit(self, X, Y): + print X.shape, Y.shape self.N = min(self.N, X.shape[1]-2) y_max = int(np.max(Y) + 1) self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 632749bd5d..abf81383d4 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -144,7 +144,7 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, # A regressor which can handle sparse data after the densifier for key in regressors: - if SPARSE in available_regressors[key].get_properties()['input']: + if SPARSE in available_regressors[key].get_properties(dataset_properties=None)['input']: if 'densifier' in preprocessors: cs.add_forbidden_clause( ForbiddenAndConjunction( @@ -188,13 +188,14 @@ def _get_pipeline(): # Add the always active preprocessing components steps.extend( [["imputation", - components.preprocessing._preprocessors['imputation']], + components.data_preprocessing._preprocessors['imputation']], ["rescaling", - components.preprocessing._preprocessors['rescaling']]]) + components.data_preprocessing._preprocessors['rescaling']]]) # Add the preprocessing component steps.append(['preprocessor', - components.preprocessing._preprocessors['preprocessor']]) + components.feature_preprocessing._preprocessors[ + 'preprocessor']]) # Add the classification component steps.append(['regressor', diff --git a/source/first_steps.rst b/source/first_steps.rst index 3520d15200..26e89e17c2 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -17,10 +17,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> configuration_space.seed(1) + >>> configuration_space.seed(3) >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 + 0.66000000000000003 diff --git a/tests/components/classification/test_bernoulli_nb.py b/tests/components/classification/test_bernoulli_nb.py index 5c770b2dd4..321377bd01 100644 --- a/tests/components/classification/test_bernoulli_nb.py +++ b/tests/components/classification/test_bernoulli_nb.py @@ -12,7 +12,7 @@ def test_default_configuration(self): for i in range(10): predictions, targets = \ _test_classifier(BernoulliNB) - self.assertAlmostEqual(0.97999999999999998, + self.assertAlmostEqual(0.26000000000000001, sklearn.metrics.accuracy_score(predictions, targets)) @@ -20,6 +20,6 @@ def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = \ _test_classifier_iterative_fit(BernoulliNB) - self.assertAlmostEqual(0.97999999999999998, + self.assertAlmostEqual(0.26000000000000001, sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/tests/components/classification/test_multinomial_nb.py b/tests/components/classification/test_multinomial_nb.py index 4225d50e07..f2cc49f385 100644 --- a/tests/components/classification/test_multinomial_nb.py +++ b/tests/components/classification/test_multinomial_nb.py @@ -2,8 +2,10 @@ from ParamSklearn.components.classification.multinomial_nb import \ MultinomialNB -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit, \ + get_dataset +import numpy as np import sklearn.metrics @@ -22,4 +24,22 @@ def test_default_configuration_iterative_fit(self): _test_classifier_iterative_fit(MultinomialNB) self.assertAlmostEqual(0.97999999999999998, sklearn.metrics.accuracy_score(predictions, - targets)) \ No newline at end of file + targets)) + + def test_default_configuration_negative_values(self): + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = MultinomialNB.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + cls = MultinomialNB(random_state=1, **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + cls = cls.fit(X_train, Y_train) + prediction = cls.predict(X_test) + self.assertAlmostEqual(np.nanmean(prediction == Y_test), + 0.88888888888888884) \ No newline at end of file diff --git a/tests/components/preprocessing/__init__.py b/tests/components/data_preprocessing/__init__.py similarity index 100% rename from tests/components/preprocessing/__init__.py rename to tests/components/data_preprocessing/__init__.py diff --git a/tests/components/preprocessing/test_balancing.py b/tests/components/data_preprocessing/test_balancing.py similarity index 95% rename from tests/components/preprocessing/test_balancing.py rename to tests/components/data_preprocessing/test_balancing.py index 35169d9253..a82d008318 100644 --- a/tests/components/preprocessing/test_balancing.py +++ b/tests/components/data_preprocessing/test_balancing.py @@ -5,7 +5,7 @@ import numpy as np import sklearn.metrics -from ParamSklearn.components.preprocessing.balancing import Balancing +from ParamSklearn.components.data_preprocessing.balancing import Balancing from ParamSklearn.classification import ParamSklearnClassifier from ParamSklearn.components.classification.adaboost import AdaboostClassifier from ParamSklearn.components.classification.decision_tree import DecisionTree @@ -16,10 +16,10 @@ from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC from ParamSklearn.components.classification.sgd import SGD from ParamSklearn.components.classification.ridge import Ridge -from ParamSklearn.components.preprocessing\ +from ParamSklearn.components.feature_preprocessing\ .extra_trees_preproc_for_classification import ExtraTreesPreprocessor -from ParamSklearn.components.preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor -from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding +from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor +from ParamSklearn.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding from ParamSklearn.util import get_dataset diff --git a/tests/components/preprocessing/test_imputation.py b/tests/components/data_preprocessing/test_imputation.py similarity index 94% rename from tests/components/preprocessing/test_imputation.py rename to tests/components/data_preprocessing/test_imputation.py index 7caa6929e9..092e3d47b7 100644 --- a/tests/components/preprocessing/test_imputation.py +++ b/tests/components/data_preprocessing/test_imputation.py @@ -2,7 +2,7 @@ from scipy import sparse -from ParamSklearn.components.preprocessing.imputation import Imputation +from ParamSklearn.components.data_preprocessing.imputation import Imputation from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase diff --git a/tests/components/data_preprocessing/test_scaling.py b/tests/components/data_preprocessing/test_scaling.py new file mode 100644 index 0000000000..f3254e6846 --- /dev/null +++ b/tests/components/data_preprocessing/test_scaling.py @@ -0,0 +1,53 @@ +import unittest + +import numpy as np +import sklearn.datasets + +from ParamSklearn.components.data_preprocessing.rescaling import RescalingChoice +from ParamSklearn.util import get_dataset + + +class ScalingComponentTest(unittest.TestCase): + def _test_helper(self, Preprocessor, dataset=None, make_sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=make_sparse) + original_X_train = X_train.copy() + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + preprocessor = preprocessor.choice + transformer = preprocessor.fit(X_train, Y_train) + return transformer.transform(X_train), original_X_train + + def test_boston_is_not_scaled(self): + data = sklearn.datasets.load_boston()['data'] + self.assertGreaterEqual(np.max(data), 100) + + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = self._test_helper(RescalingChoice, + dataset='boston') + # The maximum is around 1.95 for the transformed array... + self.assertLessEqual(np.max(transformation), 2) + self.assertFalse((original == transformation).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_with_sparse_data(self): + preprocessing = self._test_helper(RescalingChoice, dataset='boston', + make_sparse=True) + transformation, original = preprocessing + self.assertEqual(original.getnnz(), transformation.getnnz()) + self.assertAlmostEqual(1, transformation.max(), places=6) + self.assertTrue(~np.allclose(original.data, transformation.data)) + + @unittest.skip("Does not work at the moment.") + def test_preprocessing_dtype(self): + super(ScalingComponentTest, self)._test_helper( + RescalingChoice) diff --git a/tests/components/feature_preprocessing/__init__.py b/tests/components/feature_preprocessing/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/tests/components/feature_preprocessing/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/tests/components/preprocessing/test_NoPreprocessing.py b/tests/components/feature_preprocessing/test_NoPreprocessing.py similarity index 91% rename from tests/components/preprocessing/test_NoPreprocessing.py rename to tests/components/feature_preprocessing/test_NoPreprocessing.py index c373382dd4..d947645a34 100644 --- a/tests/components/preprocessing/test_NoPreprocessing.py +++ b/tests/components/feature_preprocessing/test_NoPreprocessing.py @@ -1,7 +1,7 @@ import numpy as np import unittest -from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing +from ParamSklearn.components.feature_preprocessing.no_preprocessing import NoPreprocessing from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase diff --git a/tests/components/preprocessing/test_densifier.py b/tests/components/feature_preprocessing/test_densifier.py similarity index 88% rename from tests/components/preprocessing/test_densifier.py rename to tests/components/feature_preprocessing/test_densifier.py index 3f0d21386e..2fedd1abf3 100644 --- a/tests/components/preprocessing/test_densifier.py +++ b/tests/components/feature_preprocessing/test_densifier.py @@ -2,7 +2,7 @@ import numpy as np -from ParamSklearn.components.preprocessing.densifier import Densifier +from ParamSklearn.components.feature_preprocessing.densifier import Densifier from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase diff --git a/tests/components/preprocessing/test_extra_trees.py b/tests/components/feature_preprocessing/test_extra_trees.py similarity index 94% rename from tests/components/preprocessing/test_extra_trees.py rename to tests/components/feature_preprocessing/test_extra_trees.py index fdae495445..a347ebed61 100644 --- a/tests/components/preprocessing/test_extra_trees.py +++ b/tests/components/feature_preprocessing/test_extra_trees.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.extra_trees_preproc_for_classification import \ +from ParamSklearn.components.feature_preprocessing.extra_trees_preproc_for_classification import \ ExtraTreesPreprocessor from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_fast_ica.py b/tests/components/feature_preprocessing/test_fast_ica.py similarity index 96% rename from tests/components/preprocessing/test_fast_ica.py rename to tests/components/feature_preprocessing/test_fast_ica.py index db9d6ba15c..347be61217 100644 --- a/tests/components/preprocessing/test_fast_ica.py +++ b/tests/components/feature_preprocessing/test_fast_ica.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import Ridge -from ParamSklearn.components.preprocessing.fast_ica import \ +from ParamSklearn.components.feature_preprocessing.fast_ica import \ FastICA from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_feature_agglomeration.py b/tests/components/feature_preprocessing/test_feature_agglomeration.py similarity index 94% rename from tests/components/preprocessing/test_feature_agglomeration.py rename to tests/components/feature_preprocessing/test_feature_agglomeration.py index 0c69179763..7dc637f03c 100644 --- a/tests/components/preprocessing/test_feature_agglomeration.py +++ b/tests/components/feature_preprocessing/test_feature_agglomeration.py @@ -1,7 +1,7 @@ import unittest from sklearn.ensemble import RandomForestClassifier -from ParamSklearn.components.preprocessing.feature_agglomeration import FeatureAgglomeration +from ParamSklearn.components.feature_preprocessing.feature_agglomeration import FeatureAgglomeration from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/preprocessing/test_gem.py b/tests/components/feature_preprocessing/test_gem.py similarity index 96% rename from tests/components/preprocessing/test_gem.py rename to tests/components/feature_preprocessing/test_gem.py index 1bfdf818b6..827ac13547 100644 --- a/tests/components/preprocessing/test_gem.py +++ b/tests/components/feature_preprocessing/test_gem.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.classification.proj_logit import ProjLogitCLassifier -from ParamSklearn.components.preprocessing.gem import GEM +from ParamSklearn.components.feature_preprocessing.gem import GEM from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, get_dataset import sklearn.metrics diff --git a/tests/components/preprocessing/test_kernel_pca.py b/tests/components/feature_preprocessing/test_kernel_pca.py similarity index 96% rename from tests/components/preprocessing/test_kernel_pca.py rename to tests/components/feature_preprocessing/test_kernel_pca.py index 6be82de927..ad21d5c826 100644 --- a/tests/components/preprocessing/test_kernel_pca.py +++ b/tests/components/feature_preprocessing/test_kernel_pca.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.kernel_pca import \ +from ParamSklearn.components.feature_preprocessing.kernel_pca import \ KernelPCA from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_kitchen_sinks.py b/tests/components/feature_preprocessing/test_kitchen_sinks.py similarity index 88% rename from tests/components/preprocessing/test_kitchen_sinks.py rename to tests/components/feature_preprocessing/test_kitchen_sinks.py index 3994feccb6..0fe2aa50c3 100644 --- a/tests/components/preprocessing/test_kitchen_sinks.py +++ b/tests/components/feature_preprocessing/test_kitchen_sinks.py @@ -2,7 +2,7 @@ import numpy as np -from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from ParamSklearn.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase diff --git a/tests/components/preprocessing/test_liblinear.py b/tests/components/feature_preprocessing/test_liblinear.py similarity index 95% rename from tests/components/preprocessing/test_liblinear.py rename to tests/components/feature_preprocessing/test_liblinear.py index 668abe440c..4a8bf0cc3a 100644 --- a/tests/components/preprocessing/test_liblinear.py +++ b/tests/components/feature_preprocessing/test_liblinear.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.liblinear_svc_preprocessor import \ +from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import \ LibLinear_Preprocessor from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_nystroem_sampler.py b/tests/components/feature_preprocessing/test_nystroem_sampler.py similarity index 76% rename from tests/components/preprocessing/test_nystroem_sampler.py rename to tests/components/feature_preprocessing/test_nystroem_sampler.py index 9f06b8058c..361c85d922 100644 --- a/tests/components/preprocessing/test_nystroem_sampler.py +++ b/tests/components/feature_preprocessing/test_nystroem_sampler.py @@ -1,8 +1,9 @@ import unittest import numpy as np +import sklearn.preprocessing -from ParamSklearn.components.preprocessing.nystroem_sampler import \ +from ParamSklearn.components.feature_preprocessing.nystroem_sampler import \ Nystroem from ParamSklearn.util import _test_preprocessing, get_dataset @@ -14,6 +15,25 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[1], 100) self.assertFalse((transformation == 0).all()) + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = Nystroem(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform( + X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + #@unittest.skip("Right now, the RBFSampler returns a float64 array!") def _test_preprocessing_dtype(self): # Dense diff --git a/tests/components/preprocessing/test_pca.py b/tests/components/feature_preprocessing/test_pca.py similarity index 92% rename from tests/components/preprocessing/test_pca.py rename to tests/components/feature_preprocessing/test_pca.py index b56c3d61aa..a764742d37 100644 --- a/tests/components/preprocessing/test_pca.py +++ b/tests/components/feature_preprocessing/test_pca.py @@ -2,7 +2,7 @@ import numpy as np -from ParamSklearn.components.preprocessing.pca import PCA +from ParamSklearn.components.feature_preprocessing.pca import PCA from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase diff --git a/tests/components/preprocessing/test_polynomial.py b/tests/components/feature_preprocessing/test_polynomial.py similarity index 96% rename from tests/components/preprocessing/test_polynomial.py rename to tests/components/feature_preprocessing/test_polynomial.py index b432ec6372..5903f89e9b 100644 --- a/tests/components/preprocessing/test_polynomial.py +++ b/tests/components/feature_preprocessing/test_polynomial.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.polynomial import \ +from ParamSklearn.components.feature_preprocessing.polynomial import \ PolynomialFeatures from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_random_trees_embedding.py b/tests/components/feature_preprocessing/test_random_trees_embedding.py similarity index 96% rename from tests/components/preprocessing/test_random_trees_embedding.py rename to tests/components/feature_preprocessing/test_random_trees_embedding.py index 8844d13fba..e7d102bc00 100644 --- a/tests/components/preprocessing/test_random_trees_embedding.py +++ b/tests/components/feature_preprocessing/test_random_trees_embedding.py @@ -3,7 +3,7 @@ import numpy as np import scipy.sparse -from ParamSklearn.components.preprocessing.random_trees_embedding import \ +from ParamSklearn.components.feature_preprocessing.random_trees_embedding import \ RandomTreesEmbedding from ParamSklearn.util import _test_preprocessing, get_dataset diff --git a/tests/components/preprocessing/test_select_percentile_classification.py b/tests/components/feature_preprocessing/test_select_percentile_classification.py similarity index 76% rename from tests/components/preprocessing/test_select_percentile_classification.py rename to tests/components/feature_preprocessing/test_select_percentile_classification.py index fb856f3fa5..593e51e70d 100644 --- a/tests/components/preprocessing/test_select_percentile_classification.py +++ b/tests/components/feature_preprocessing/test_select_percentile_classification.py @@ -2,8 +2,9 @@ import numpy as np import scipy.sparse +import sklearn.preprocessing -from ParamSklearn.components.preprocessing.select_percentile_classification import SelectPercentileClassification +from ParamSklearn.components.feature_preprocessing.select_percentile_classification import SelectPercentileClassification from ParamSklearn.util import _test_preprocessing, get_dataset @@ -19,6 +20,23 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform(X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1] / 2)) + def test_preprocessing_dtype(self): # Dense # np.float32 diff --git a/tests/components/preprocessing/test_select_percentile_regression.py b/tests/components/feature_preprocessing/test_select_percentile_regression.py similarity index 94% rename from tests/components/preprocessing/test_select_percentile_regression.py rename to tests/components/feature_preprocessing/test_select_percentile_regression.py index 6326023918..379b49349a 100644 --- a/tests/components/preprocessing/test_select_percentile_regression.py +++ b/tests/components/feature_preprocessing/test_select_percentile_regression.py @@ -2,7 +2,7 @@ import numpy as np -from ParamSklearn.components.preprocessing.select_percentile_regression import SelectPercentileRegression +from ParamSklearn.components.feature_preprocessing.select_percentile_regression import SelectPercentileRegression from ParamSklearn.util import _test_preprocessing, get_dataset diff --git a/tests/components/preprocessing/test_select_rates.py b/tests/components/feature_preprocessing/test_select_rates.py similarity index 72% rename from tests/components/preprocessing/test_select_rates.py rename to tests/components/feature_preprocessing/test_select_rates.py index 5d89c99cd3..952820fe16 100644 --- a/tests/components/preprocessing/test_select_rates.py +++ b/tests/components/feature_preprocessing/test_select_rates.py @@ -2,8 +2,9 @@ import numpy as np import scipy.sparse +import sklearn.preprocessing -from ParamSklearn.components.preprocessing.select_rates import \ +from ParamSklearn.components.feature_preprocessing.select_rates import \ SelectRates from ParamSklearn.util import _test_preprocessing, get_dataset @@ -21,6 +22,28 @@ def test_default_configuration(self): self.assertEqual(transformation.shape[0], original.shape[0]) self.assertEqual(transformation.shape[1], int(original.shape[1] / 2)) + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform( + X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + # I don't know why its 52 here and not 32 which would be half of the + # number of features. Seems to be related to a runtime warning raised + # by sklearn + self.assertEqual(transformation.shape[1], 52) + def test_preprocessing_dtype(self): # Dense # np.float32 diff --git a/tests/components/preprocessing/test_truncatedSVD.py b/tests/components/feature_preprocessing/test_truncatedSVD.py similarity index 96% rename from tests/components/preprocessing/test_truncatedSVD.py rename to tests/components/feature_preprocessing/test_truncatedSVD.py index 9bffc7226d..e43cc4e569 100644 --- a/tests/components/preprocessing/test_truncatedSVD.py +++ b/tests/components/feature_preprocessing/test_truncatedSVD.py @@ -1,7 +1,7 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.preprocessing.truncatedSVD import \ +from ParamSklearn.components.feature_preprocessing.truncatedSVD import \ TruncatedSVD from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset diff --git a/tests/components/preprocessing/test_scaling.py b/tests/components/preprocessing/test_scaling.py deleted file mode 100644 index 489db4693e..0000000000 --- a/tests/components/preprocessing/test_scaling.py +++ /dev/null @@ -1,37 +0,0 @@ -import unittest - -import numpy as np -import sklearn.datasets - -from ParamSklearn.components.preprocessing.rescaling import Rescaling -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase - - -class ScalingComponentTest(PreprocessingTestCase): - def test_boston_is_not_scaled(self): - data = sklearn.datasets.load_boston()['data'] - self.assertGreaterEqual(np.max(data), 100) - - def test_default_configuration(self): - transformations = [] - for i in range(10): - transformation, original = _test_preprocessing(Rescaling, - dataset='boston') - # The maximum is around 1.95 for the transformed array... - self.assertLessEqual(np.max(transformation), 2) - self.assertFalse((original == transformation).all()) - transformations.append(transformation) - if len(transformations) > 1: - self.assertTrue( - (transformations[-1] == transformations[-2]).all()) - - def test_default_configuration_with_sparse_data(self): - preprocessing = _test_preprocessing(Rescaling, dataset='boston', - make_sparse=True) - transformation, original = preprocessing - self.assertEqual(original.getnnz(), transformation.getnnz()) - self.assertAlmostEqual(1, transformation.max(), places=6) - self.assertTrue(~np.allclose(original.data, transformation.data)) - - def test_preprocessing_dtype(self): - super(ScalingComponentTest, self)._test_preprocessing_dtype(Rescaling) diff --git a/tests/components/regression/test_ridge_regression.py b/tests/components/regression/test_ridge_regression.py index e6e2ee5aad..15cbea26c6 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/tests/components/regression/test_ridge_regression.py @@ -1,7 +1,7 @@ import unittest from ParamSklearn.components.regression.ridge_regression import RidgeRegression -from ParamSklearn.components.preprocessing.kitchen_sinks import RandomKitchenSinks +from ParamSklearn.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks from ParamSklearn.util import _test_regressor, get_dataset import sklearn.metrics diff --git a/tests/test_base.py b/tests/test_base.py index 5a1d7a1234..0326bd0c7d 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -3,7 +3,7 @@ import HPOlibConfigSpace.configuration_space import ParamSklearn.base -import ParamSklearn.components.preprocessing +import ParamSklearn.components.feature_preprocessing import ParamSklearn.components.classification class BaseTest(unittest.TestCase): @@ -14,9 +14,9 @@ def test_get_hyperparameter_configuration_space_3choices(self): dataset_properties = {} exclude = {} include = {} - pipeline = [('p0', ParamSklearn.components.preprocessing._preprocessors[ + pipeline = [('p0', ParamSklearn.components.feature_preprocessing._preprocessors[ 'preprocessor']), - ('p1', ParamSklearn.components.preprocessing._preprocessors[ + ('p1', ParamSklearn.components.feature_preprocessing._preprocessors[ 'preprocessor']), ('c', ParamSklearn.components.classification._classifiers[ 'classifier'])] @@ -25,15 +25,73 @@ def test_get_hyperparameter_configuration_space_3choices(self): self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), 14) self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) - self.assertEqual(143, len(cs.forbidden_clauses)) + + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print clause + self.assertEqual(151, len(cs.forbidden_clauses)) + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'signed': True} + include = {'c': ['multinomial_nb']} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 10) + self.assertEqual(len(cs.get_hyperparameter("c:__choice__").choices), + 1) + # Mostly combinations of p0 making the data unsigned and p1 not + # changing the values of the data points + self.assertEqual(74, len(cs.forbidden_clauses)) + + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'signed': True} + include = {} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + self.assertEqual(len(cs.get_hyperparameter("c:__choice__").choices), + 17) + self.assertEqual(126, len(cs.forbidden_clauses)) + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() dataset_properties = {'sparse': True} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 11) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + self.assertEqual(409, len(cs.forbidden_clauses)) + for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + print clause + + print + print + print + print + + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'sparse': True, 'signed': True} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + print clause self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), 11) self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) - self.assertEqual(387, len(cs.forbidden_clauses)) + # Data is guaranteed to be positive in cases like densifier, + # extra_trees_preproc, multinomial_nb -> less constraints + self.assertEqual(364, len(cs.forbidden_clauses)) + + diff --git a/tests/test_classification.py b/tests/test_classification.py index 15dc3af012..fdf7ec0da7 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,5 +1,3 @@ -__author__ = 'feurerm' - import resource import sys import traceback @@ -22,9 +20,9 @@ from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.classification as classification_components -import ParamSklearn.components.preprocessing as preprocessing_components +import ParamSklearn.components.feature_preprocessing as preprocessing_components from ParamSklearn.util import get_dataset -from ParamSklearn.constants import DENSE, SPARSE, PREDICTIONS +from ParamSklearn.constants import * class TestParamSklearnClassifier(unittest.TestCase): @@ -42,7 +40,7 @@ def test_io_dict(self): self.assertIsInstance(inp, tuple) self.assertIsInstance(output, tuple) for i in inp: - self.assertIn(i, (SPARSE, DENSE)) + self.assertIn(i, (SPARSE, DENSE, SIGNED_DATA, UNSIGNED_DATA)) self.assertEqual(output, (PREDICTIONS,)) self.assertIn('handles_regression', props) self.assertFalse(props['handles_regression']) @@ -103,32 +101,105 @@ def test_configurations(self): predicted_probabiliets = cls.predict_proba(X_test_) self.assertIsInstance(predicted_probabiliets, np.ndarray) except ValueError as e: - if "Floating-point under-/overflow occurred at epoch" in \ - e.message or \ - "removed all features" in e.message: + #if "Floating-point under-/overflow occurred at epoch" in \ + # e.message or \ + if "removed all features" in e.message or \ + "all features are discarded" in e.message: continue else: print config print traceback.format_exc() raise e - except LinAlgError as e: - if "not positive definite, even with jitter" in e.message: + # except LinAlgError as e: + # if "not positive definite, even with jitter" in e.message: + # continue + # else: + # print config + # print traceback.format_exc() + # raise e + #except AttributeError as e: + # # Some error in QDA + # if "log" == e.message: + # print config + # print traceback.format_exc() + # raise e + # continue + # else: + # print config + # print traceback.format_exc() + # raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.message: + continue + elif "divide by zero encountered in divide" in e.message: + continue + elif "invalid value encountered in divide" in e.message: continue else: print config print traceback.format_exc() raise e - except KeyError as e: - # Some error in QDA - if "log" == e.message: + except UserWarning as e: + if "FastICA did not converge" in e.message: + continue + else: print config print traceback.format_exc() raise e + except MemoryError as e: + continue + + def test_configurations_signed_data(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 2014 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + dataset_properties={'signed': True} + ) + + print cs + + for i in range(10): + config = cs.sample_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls = ParamSklearnClassifier(config, random_state=1) + print config + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict_proba(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except ValueError as e: + # if "Floating-point under-/overflow occurred at epoch" in \ + # e.message or \ + if "removed all features" in e.message or \ + "all features are discarded" in e.message: continue else: print config print traceback.format_exc() raise e + # except LinAlgError as e: + # if "not positive definite, even with jitter" in e.message: + # continue + # else: + # print config + # print traceback.format_exc() + # raise e + #except AttributeError as e: + # # Some error in QDA + # if "log" == e.message: + # print config + # print traceback.format_exc() + # raise e + # continue + # else: + # print config + # print traceback.format_exc() + # raise e except RuntimeWarning as e: if "invalid value encountered in sqrt" in e.message: continue @@ -151,6 +222,10 @@ def test_configurations(self): continue def test_configurations_sparse(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 2014 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) print cs @@ -163,28 +238,29 @@ def test_configurations_sparse(self): try: cls.fit(X_train, Y_train) predictions = cls.predict(X_test) - except ValueError as e: - if "Floating-point under-/overflow occurred at epoch" in \ - e.message or \ - "removed all features" in e.message: + except ValueError as e: + # if "Floating-point under-/overflow occurred at epoch" in \ + # e.message or \ + if "removed all features" in e.message or \ + "all features are discarded" in e.message: continue else: print config traceback.print_tb(sys.exc_info()[2]) raise e - except LinAlgError as e: - if "not positive definite, even with jitter" in e.message: - continue - else: - print config - raise e - except AttributeError as e: - # Some error in QDA - if "log" == e.message: - continue - else: - print config - raise e + # except LinAlgError as e: + # if "not positive definite, even with jitter" in e.message: + # continue + # else: + # print config + # raise e + # except AttributeError as e: + # # Some error in QDA + # if "log" == e.message: + # continue + # else: + # print config + # raise e except RuntimeWarning as e: if "invalid value encountered in sqrt" in e.message: continue @@ -204,8 +280,20 @@ def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() + + self.assertEqual(len(cs.get_hyperparameter( + 'rescaling:__choice__').choices), 4) + self.assertEqual(len(cs.get_hyperparameter( + 'classifier:__choice__').choices), 17) + self.assertEqual(len(cs.get_hyperparameter( + 'preprocessor:__choice__').choices), 14) + hyperparameters = cs.get_hyperparameters() self.assertEqual(146, len(hyperparameters)) + + #for hp in sorted([str(h) for h in hyperparameters]): + # print hp + # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy self.assertEqual(len(hyperparameters) - 5, len(conditions)) @@ -248,26 +336,16 @@ def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classi " preprocessor:nystroem_sampler:gamma, Value: 0.1\n" " preprocessor:nystroem_sampler:kernel, Value: rbf\n" " preprocessor:nystroem_sampler:n_components, Value: 100\n" - " rescaling:strategy, Value: min/max\n" + " rescaling:__choice__, Value: min/max\n" "violates forbidden clause \(Forbidden: classifier:__choice__ == random_forest && Forbidden: preprocessor:__choice__ == nystroem_sampler\)", ParamSklearnClassifier.get_hyperparameter_search_space, include={'preprocessor': ['nystroem_sampler']}) def test_get_hyperparameter_search_space_only_forbidden_combinations(self): - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " balancing:strategy, Value: none\n" - " classifier:__choice__, Value: multinomial_nb\n" - " classifier:multinomial_nb:alpha, Value: 1.0\n" - " classifier:multinomial_nb:fit_prior, Value: True\n" - " imputation:strategy, Value: mean\n" - " preprocessor:__choice__, Value: truncatedSVD\n" - " preprocessor:truncatedSVD:target_dim, Value: 128\n" - " rescaling:strategy, Value: min/max\n" - "violates forbidden clause \(Forbidden: preprocessor:__choice__ == " - "truncatedSVD && Forbidden: classifier:__choice__ == multinomial_nb\)", + self.assertRaisesRegexp(AssertionError, "No valid pipeline found.", ParamSklearnClassifier.get_hyperparameter_search_space, include={'classifier': ['multinomial_nb'], - 'preprocessor': ['truncatedSVD']}, + 'preprocessor': ['pca']}, dataset_properties={'sparse':True}) # It must also be catched that no classifiers which can handle sparse @@ -285,7 +363,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " classifier:liblinear_svc:tol, Value: 0.0001\n" " imputation:strategy, Value: mean\n" " preprocessor:__choice__, Value: densifier\n" - " rescaling:strategy, Value: min/max\n" + " rescaling:__choice__, Value: min/max\n" "violates forbidden clause \(Forbidden: classifier:__choice__ == liblinear_svc &&" " Forbidden: preprocessor:__choice__ == densifier\)", ParamSklearnClassifier.get_hyperparameter_search_space, @@ -371,7 +449,7 @@ def test_predict_batched_sparse(self): 'classifier:random_forest:max_leaf_nodes': 'None', 'classifier:random_forest:n_estimators': 100, 'classifier:random_forest:min_weight_fraction_leaf': 0.0, - "rescaling:strategy": "min/max"}) + "rescaling:__choice__": "min/max"}) cls = ParamSklearnClassifier(config) # Multiclass @@ -454,7 +532,7 @@ def test_predict_proba_batched_sparse(self): 'classifier:random_forest:max_features': 0.5, 'classifier:random_forest:max_leaf_nodes': 'None', 'classifier:random_forest:n_estimators': 100, - "rescaling:strategy": "min/max"}) + "rescaling:__choice__": "min/max"}) # Multiclass cls = ParamSklearnClassifier(config) diff --git a/tests/test_create_searchspace_util_classification.py b/tests/test_create_searchspace_util_classification.py index 06b0b952e7..b4720710b8 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/tests/test_create_searchspace_util_classification.py @@ -9,16 +9,16 @@ from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC from ParamSklearn.components.classification.lda import LDA -from ParamSklearn.components.preprocessing.pca import PCA -from ParamSklearn.components.preprocessing.truncatedSVD import TruncatedSVD -from ParamSklearn.components.preprocessing.no_preprocessing import NoPreprocessing -from ParamSklearn.components.preprocessing.fast_ica import FastICA -from ParamSklearn.components.preprocessing.random_trees_embedding import RandomTreesEmbedding +from ParamSklearn.components.feature_preprocessing.pca import PCA +from ParamSklearn.components.feature_preprocessing.truncatedSVD import TruncatedSVD +from ParamSklearn.components.feature_preprocessing.no_preprocessing import NoPreprocessing +from ParamSklearn.components.feature_preprocessing.fast_ica import FastICA +from ParamSklearn.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding import ParamSklearn.create_searchspace_util class TestCreateClassificationSearchspace(unittest.TestCase): - def test_get_match_array(self): + def test_get_match_array_sparse_and_dense(self): # preproc is empty preprocessors = OrderedDict() preprocessors['pca'] = PCA @@ -103,6 +103,9 @@ def get_available_components(self, *args, **kwargs): # lda only allowed after truncatedSVD self.assertEqual(list(m[3].flatten()), [0, 0, 1, 1, 0, 1, 0, 1]) + def test_get_match_array_signed_unsigned_and_binary(self): + pass + @unittest.skip("Not currently working.") def test_add_forbidden(self): m = numpy.ones([2, 3]) diff --git a/tests/test_regression.py b/tests/test_regression.py index b8685cd2aa..8329c12628 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -17,7 +17,7 @@ from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm import ParamSklearn.components.regression as regression_components -import ParamSklearn.components.preprocessing as preprocessing_components +import ParamSklearn.components.feature_preprocessing as preprocessing_components from ParamSklearn.util import get_dataset from ParamSklearn.constants import * @@ -38,7 +38,7 @@ def test_io_dict(self): self.assertIsInstance(inp, tuple) self.assertIsInstance(output, tuple) for i in inp: - self.assertIn(i, (SPARSE, DENSE)) + self.assertIn(i, (SPARSE, DENSE, SIGNED_DATA, UNSIGNED_DATA)) self.assertEqual(output, (PREDICTIONS,)) self.assertIn('handles_regression', props) self.assertTrue(props['handles_regression']) @@ -125,7 +125,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " regressor:random_forest:min_samples_split, Value: 2\n" " regressor:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" " regressor:random_forest:n_estimators, Constant: 100\n" - " rescaling:strategy, Value: min/max\n" + " rescaling:__choice__, Value: min/max\n" "violates forbidden clause \(Forbidden: regressor:__choice__ == random_forest" " && Forbidden: preprocessor:__choice__ == kitchen_sinks\)", ParamSklearnRegressor.get_hyperparameter_search_space, @@ -141,7 +141,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " regressor:ridge_regression:alpha, Value: 1.0\n" " regressor:ridge_regression:fit_intercept, Constant: True\n" " regressor:ridge_regression:tol, Value: 0.0001\n" - " rescaling:strategy, Value: min/max\n" + " rescaling:__choice__, Value: min/max\n" "violates forbidden clause \(Forbidden: regressor:__choice__ == " "ridge_regression && Forbidden: preprocessor:__choice__ == densifier\)", ParamSklearnRegressor.get_hyperparameter_search_space, From b4d2da21ffaafe9b2c9296bbfbbc68f59e9eb15c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 8 Sep 2015 15:52:49 +0200 Subject: [PATCH 295/352] OneHotEncoder: make faster and add hyperparameter --- ParamSklearn/implementations/OneHotEncoder.py | 311 +++++++++++------- tests/implementations/test_OneHotEncoder.py | 115 +++++-- 2 files changed, 276 insertions(+), 150 deletions(-) diff --git a/ParamSklearn/implementations/OneHotEncoder.py b/ParamSklearn/implementations/OneHotEncoder.py index 1715834824..89f7bdec97 100644 --- a/ParamSklearn/implementations/OneHotEncoder.py +++ b/ParamSklearn/implementations/OneHotEncoder.py @@ -27,11 +27,14 @@ def _transform_selected(X, transform, selected="all", copy=True): ------- X : array or sparse matrix, shape=(n_samples, n_features_new) """ - if selected == "all": return transform(X) + if len(selected) == 0: + return X + X = check_array(X, accept_sparse='csc', force_all_finite=False) + n_features = X.shape[1] ind = np.arange(n_features) sel = np.zeros(n_features, dtype=bool) @@ -39,18 +42,6 @@ def _transform_selected(X, transform, selected="all", copy=True): not_sel = np.logical_not(sel) n_selected = np.sum(sel) - # Add 1 to all categorical colums to avoid loosing them due to slicing - subtract = False - if sparse.isspmatrix_csr(X): - X.data += 1 - subtract = True - X = check_array(X, copy=copy, force_all_finite=False, accept_sparse="csc") - if subtract: - X.data -= 1 - - if len(selected) == 0: - return X - if n_selected == 0: # No features selected. return X @@ -58,32 +49,17 @@ def _transform_selected(X, transform, selected="all", copy=True): # All features selected. return transform(X) else: - # Add 1 to all categorical columns to avoid loosing them due to slicing - if sparse.issparse(X): - for idx in range(n_features): - if idx in ind[sel]: - X.data[X.indptr[idx]:X.indptr[idx + 1]] += 1 - X_ = X[:, ind[sel]] - for idx in range(n_features): - if idx in ind[sel]: - X.data[X.indptr[idx]:X.indptr[idx + 1]] -= 1 - X_.data -= 1 - else: - X_ = X[:, ind[sel]] - - X_sel = transform(X_) + X_sel = transform(X[:, ind[sel]]) X_not_sel = X[:, ind[not_sel]] if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): - return sparse.hstack((X_sel, X_not_sel)).tocsr() + return sparse.hstack((X_sel, X_not_sel), format='csr') else: return np.hstack((X_sel, X_not_sel)) class OneHotEncoder(BaseEstimator, TransformerMixin): - """Don't trust the documentation of this module! - - Encode categorical integer features using a one-hot aka one-of-K scheme. + """Encode categorical integer features using a one-hot aka one-of-K scheme. The input to this transformer should be a matrix of integers, denoting the values taken on by categorical (discrete) features. The output will be @@ -96,6 +72,7 @@ class OneHotEncoder(BaseEstimator, TransformerMixin): Parameters ---------- + categorical_features: "all" or array of indices or mask Specify what features are treated as categorical. @@ -137,7 +114,7 @@ class OneHotEncoder(BaseEstimator, TransformerMixin): >>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \ [1, 0, 2]]) # doctest: +ELLIPSIS OneHotEncoder(categorical_features='all', dtype=<... 'float'>, - n_values='auto', sparse=True) + sparse=True, minimum_fraction=None) >>> enc.n_values_ array([2, 3, 4]) >>> enc.feature_indices_ @@ -153,11 +130,12 @@ class OneHotEncoder(BaseEstimator, TransformerMixin): encoding of dictionary items or strings. """ - def __init__(self, categorical_features="all", - dtype=np.float, sparse=True): + def __init__(self, categorical_features="all", dtype=np.float, + sparse=True, minimum_fraction=None): self.categorical_features = categorical_features self.dtype = dtype self.sparse = sparse + self.minimum_fraction = minimum_fraction def fit(self, X, y=None): """Fit OneHotEncoder to X. @@ -175,69 +153,110 @@ def fit(self, X, y=None): return self def _fit_transform(self, X): + """Assumes X contains only categorical features.""" - # Add 1 to all categorical colums to avoid loosing them due to slicing - subtract = False - if sparse.isspmatrix_csr(X): - X.data += 1 - subtract = True - X = check_array(X, accept_sparse="csc", force_all_finite=False) - if subtract: - X.data -= 1 + # First increment everything by three to account for the fact that + # np.NaN will get an index of two, and 'other' values will get index of + # one, index of zero is not assigned to also work with sparse data + if sparse.issparse(X): + X.data += 3 + X.data[~np.isfinite(X.data)] = 2 + else: + X += 3 + X[~np.isfinite(X)] = 2 + X = check_array(X, accept_sparse='csc', force_all_finite=False, + dtype=int) + + if X.min() < 0: + raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape - # By replacing NaNs (which means a column full on NaNs in the - # original data matrix) with a 1, we add a column full of zeros to - # the array - if sparse.isspmatrix_csc(X): - n_values = [0] - for idx in range(n_features): - if X.indptr[idx] == X.indptr[idx+1]: - values_for_idx = 1 + # Remember which values should not be replaced by the value 'other' + if self.minimum_fraction is not None: + do_not_replace_by_other = list() + for column in range(X.shape[1]): + do_not_replace_by_other.append(list()) + + + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + unique = np.unique(X.data[indptr_start:indptr_end]) + colsize = indptr_end - indptr_start else: - values_for_idx = np.nanmax( - X.data[X.indptr[idx]:X.indptr[idx + 1]]) + 1 - n_values.append(values_for_idx if - np.isfinite(values_for_idx) else 1) + unique = np.unique(X[:, column]) + colsize = X.shape[0] + + for unique_value in unique: + if np.isfinite(unique_value): + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + count = np.nansum(unique_value == + X.data[indptr_start:indptr_end]) + else: + count = np.nansum(unique_value == X[:, column]) + else: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + count = np.nansum(~np.isfinite( + X.data[indptr_start:indptr_end])) + else: + count = np.nansum(~np.isfinite(X[:, column])) + + fraction = float(count) / colsize + if fraction >= self.minimum_fraction: + do_not_replace_by_other[-1].append(unique_value) + + for unique_value in unique: + if unique_value not in do_not_replace_by_other[-1]: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + X.data[indptr_start:indptr_end][ + X.data[indptr_start:indptr_end] == + unique_value] = 1 + else: + X[:, column][X[:, column] == unique_value] = 1 + + self.do_not_replace_by_other_ = do_not_replace_by_other + + if sparse.issparse(X): + n_values = X.max(axis=0).toarray().flatten() + 2 + else: + n_values = np.max(X, axis=0) + 2 + + self.n_values_ = n_values + n_values = np.hstack([[0], n_values]) + indices = np.cumsum(n_values) + self.feature_indices_ = indices + + if sparse.issparse(X): row_indices = X.indices + column_indices = [] + for i in range(len(X.indptr) - 1): + nbr = X.indptr[i+1] - X.indptr[i] + column_indices_ = [indices[i]] * nbr + column_indices_ += X.data[X.indptr[i]:X.indptr[i+1]] + column_indices.extend(column_indices_) + data = np.ones(X.data.size) else: - n_values = np.hstack([[0], np.nanmax(X, axis=0) + 1]) - n_values[~np.isfinite(n_values)] = 1 - row_indices = np.tile(np.arange(n_samples, dtype=np.int32), - n_features) - - total_num_values = np.nansum(n_values) - - column_indices = [] - data = [] - feature_indices = [] - - for idx in range(X.shape[1]): - if sparse.isspmatrix_csc(X): - values_ = X.getcol(idx).data - else: - values_ = X[:, idx] - - offset = np.nansum(n_values[:idx+1]) - column_indices_idx = [offset + value if np.isfinite(value) - else offset for value in values_] - data_idx = [1 if np.isfinite(value) else 0 for value in values_] - feature_indices_idx = {value: value + offset - for value in values_ - if np.isfinite(value)} - - column_indices.extend(column_indices_idx) - data.extend(data_idx) - feature_indices.append(feature_indices_idx) - - self.feature_indices_ = feature_indices - self.n_values = n_values - # tocsr() removes zeros in the data which represent NaNs + column_indices = (X + indices[:-1]).ravel() + row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), + n_features) + data = np.ones(n_samples * n_features) + out = sparse.coo_matrix((data, (row_indices, column_indices)), - shape=(n_samples, total_num_values), - dtype=self.dtype).tocsr() - return out if self.sparse else out.toarray() + shape=(n_samples, indices[-1]), + dtype=self.dtype).tocsc() + + mask = np.array(out.sum(axis=0)).ravel() != 0 + active_features = np.where(mask)[0] + out = out[:, active_features] + self.active_features_ = active_features + return out.tocsr() if self.sparse else out.toarray() def fit_transform(self, X, y=None): """Fit OneHotEncoder to X, then transform X. @@ -249,55 +268,91 @@ def fit_transform(self, X, y=None): self.categorical_features, copy=True) def _transform(self, X): - """Assumes X contains only categorical features.""" - X = check_array(X, dtype=np.int, accept_sparse='csc') - - # Add 1 to all categorical colums to avoid loosing them due to slicing - subtract = False - if sparse.isspmatrix_csr(X): - X.data += 1 - subtract = True - X = check_array(X, accept_sparse="csc", force_all_finite=False) - if subtract: - X.data -= 1 + """Asssumes X contains only categorical features.""" + + # First increment everything by three to account for the fact that + # np.NaN will get an index of two, and 'other' values will get index of + # one, index of zero is not assigned to also work with sparse data + if sparse.issparse(X): + X.data += 3 + X.data[~np.isfinite(X.data)] = 2 + else: + X += 3 + X[~np.isfinite(X)] = 2 + + X = check_array(X, accept_sparse='csc', force_all_finite=False, + dtype=int) + if X.min() < 0: + raise ValueError("X needs to contain only non-negative integers.") n_samples, n_features = X.shape indices = self.feature_indices_ - if n_features != len(indices): + if n_features != indices.shape[0] - 1: raise ValueError("X has different shape than during fitting." " Expected %d, got %d." - % (len(indices), n_features)) + % (indices.shape[0] - 1, n_features)) + + # Replace all indicators which were below `minimum_fraction` in the + # training set by 'other' + if self.minimum_fraction is not None: + for column in range(X.shape[1]): + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + unique = np.unique(X.data[indptr_start:indptr_end]) + else: + unique = np.unique(X[:, column]) + + for unique_value in unique: + if unique_value not in self.do_not_replace_by_other_[ + column]: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + X.data[indptr_start:indptr_end][ + X.data[indptr_start:indptr_end] == + unique_value] = 1 + else: + X[:, column][X[:, column] == unique_value] = 1 - if sparse.isspmatrix_csc(X): - row_indices = X.indices + if sparse.issparse(X): + n_values_check = X.max(axis=0).toarray().flatten() + 1 else: - row_indices = np.tile(np.arange(n_samples, dtype=np.int32), - n_features) - - data = [] - column_indices = [] - - for idx, feature in enumerate(range(n_features)): - if sparse.isspmatrix_csc(X): - values_ = X.getcol(idx).data - else: - values_ = X[:, idx] - - offset = np.sum(self.n_values[:idx+1]) - feature_indices_idx = self.feature_indices_[idx] - column_indices_idx = [feature_indices_idx.get(x, offset) - for x in values_] - data_idx = [1 if feature_indices_idx.get(x) is not None else 0 - for x in values_] - - column_indices.extend(column_indices_idx) - data.extend(data_idx) + n_values_check = np.max(X, axis=0) + 1 + + # Replace all indicators which are out of bounds by 'other' (index 0) + if (n_values_check > self.n_values_).any(): + # raise ValueError("Feature out of bounds. Try setting n_values.") + for i, n_value_check in enumerate(n_values_check): + if (n_value_check - 1) >= self.n_values_[i]: + if sparse.issparse(X): + indptr_start = X.indptr[i] + indptr_end = X.indptr[i+1] + X.data[indptr_start:indptr_end][X.data + [indptr_start:indptr_end] >= self.n_values_[i]] = 0 + else: + X[:, i][X[:, i] >= self.n_values_[i]] = 0 + if sparse.issparse(X): + row_indices = X.indices + column_indices = [] + for i in range(len(X.indptr) - 1): + nbr = X.indptr[i + 1] - X.indptr[i] + column_indices_ = [indices[i]] * nbr + column_indices_ += X.data[X.indptr[i]:X.indptr[i + 1]] + column_indices.extend(column_indices_) + data = np.ones(X.data.size) + else: + column_indices = (X + indices[:-1]).ravel() + row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), + n_features) + data = np.ones(n_samples * n_features) out = sparse.coo_matrix((data, (row_indices, column_indices)), - shape=(n_samples, np.sum(self.n_values)), - dtype=self.dtype).tocsr() + shape=(n_samples, indices[-1]), + dtype=self.dtype).tocsc() - return out if self.sparse else out.toarray() + out = out[:, self.active_features_] + return out.tocsr() if self.sparse else out.toarray() def transform(self, X): """Transform X using one-hot encoding. diff --git a/tests/implementations/test_OneHotEncoder.py b/tests/implementations/test_OneHotEncoder.py index 84aeb3134c..1cb24ab08e 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/tests/implementations/test_OneHotEncoder.py @@ -7,33 +7,53 @@ from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder dense1 = np.array([[0, 1, 0], - [0, 0, 0]]) -dense1_1h = np.array([[1, 0, 1, 1], - [1, 1, 0 , 1]]) - + [0, 0, 0], + [1, 1, 0]]) +dense1_1h = np.array([[1, 0, 0, 1, 1], + [1, 0, 1, 0, 1], + [0, 1, 0, 1, 1]]) +dense1_1h_minimum_fraction = np.array([[0, 1, 0, 1, 1], + [0, 1, 1, 0, 1], + [1, 0, 0, 1, 1]]) + +# Including NaNs dense2 = np.array([[0, np.NaN, 0], [np.NaN, 0, 2], - [1, 1, 1]]) -dense2_1h = np.array([[1, 0, 0, 0, 1, 0, 0], - [0, 0, 1, 0, 0, 0, 1], - [0, 1, 0, 1, 0, 1, 0]]) -dense2_partial_1h = np.array([[1., 0., 0., 0., 0.], - [0., 0., 1., 0., 2.], - [0., 1., 0., 1., 1.]]) + [1, 1, 1], + [np.NaN, 0, 1]]) +dense2_1h = np.array([[0, 1, 0, 1, 0, 0, 1, 0, 0], + [1, 0, 0, 0, 1, 0, 0, 0, 1], + [0, 0, 1, 0, 0, 1, 0, 1, 0], + [1, 0, 0, 0, 1, 0, 0, 1, 0]]) + +dense2_1h_minimum_fraction = np.array([[1, 0, 1, 0, 1, 0], + [0, 1, 0, 1, 1, 0], + [1, 0, 1, 0, 0, 1], + [0, 1, 0, 1, 0, 1]]) + +dense2_partial_1h = np.array([[0., 1., 0., 1., 0., 0., 0.], + [1., 0., 0., 0., 1., 0., 2.], + [0., 0., 1., 0., 0., 1., 1.], + [1., 0., 0., 0., 1., 0., 1.]]) + +dense2_1h_minimum_fraction_as_sparse = np.array([[0, 0, 1, 0, 0, 0], + [0, 1, 0, 0, 1, 0], + [1, 0, 0, 1, 0, 1], + [0, 1, 0, 0, 0, 1]]) # All NaN slice dense3 = np.array([[0, 1, np.NaN], [1, 0, np.NaN]]) -dense3_1h = np.array([[1, 0, 0, 1, 0], - [0, 1, 1, 0, 0]]) +dense3_1h = np.array([[1, 0, 0, 1, 1], + [0, 1, 1, 0, 1]]) -sparse1 = scipy.sparse.csc_matrix(([2, 1, 0, 0, 1, 2], +sparse1 = scipy.sparse.csc_matrix(([3, 2, 1, 1, 2, 3], ((1, 4, 5, 2, 3, 5), (0, 0, 0, 1, 1, 1))), shape=(6, 2)) sparse1_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 1, 1], ((5, 4, 1, 2, 3, 5), (0, 1, 2, 3, 4, 5))), shape=(6, 6)) -sparse1_paratial_1h = scipy.sparse.csc_matrix(([1, 1, 1, 0, 1, 2], +sparse1_paratial_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 2, 3], ((5, 4, 1, 2, 3, 5), (0, 1, 2, 3, 3, 3))), shape=(6, 4)) @@ -59,16 +79,27 @@ def test_dense1(self): self.fit_then_transform(dense1_1h, dense1) self.fit_then_transform_dense(dense1_1h, dense1) + def test_dense1_minimum_fraction(self): + self.fit_then_transform(dense1_1h_minimum_fraction, dense1, minimum_fraction=0.5) + self.fit_then_transform_dense(dense1_1h_minimum_fraction, dense1, minimum_fraction=0.5) + def test_dense2(self): self.fit_then_transform(dense2_1h, dense2) self.fit_then_transform_dense(dense2_1h, dense2) + def test_dense2_minimum_fraction(self): + self.fit_then_transform(dense2_1h_minimum_fraction, dense2, + minimum_fraction=0.3) + self.fit_then_transform_dense(dense2_1h_minimum_fraction, dense2, + minimum_fraction=0.3) + def test_dense2_with_non_sparse_components(self): self.fit_then_transform(dense2_partial_1h, dense2, categorical_features=[True, True, False]) self.fit_then_transform_dense(dense2_partial_1h, dense2, categorical_features=[True, True, False]) + # Minimum fraction is not too interesting here... def test_dense3(self): self.fit_then_transform(dense3_1h, dense3) self.fit_then_transform_dense(dense3_1h, dense3) @@ -77,6 +108,14 @@ def test_sparse1(self): self.fit_then_transform(sparse1_1h.todense(), sparse1) self.fit_then_transform_dense(sparse1_1h.todense(), sparse1) + def test_sparse1_minimum_fraction(self): + expected = np.array([[0, 1, 0, 0, 1, 1], + [0, 0, 1, 1, 0, 1]], dtype=float).transpose() + self.fit_then_transform(expected, sparse1, + minimum_fraction=0.5) + self.fit_then_transform_dense(expected, sparse1, + minimum_fraction=0.5) + def test_sparse1_with_non_sparse_components(self): self.fit_then_transform(sparse1_paratial_1h.todense(), sparse1, categorical_features=[True, False]) @@ -91,31 +130,54 @@ def test_sparse2(self): self.fit_then_transform(sparse2_1h.todense(), sparse2) self.fit_then_transform_dense(sparse2_1h.todense(), sparse2) + def test_sparse2_minimum_fraction(self): + expected = np.array([[0, 1, 0, 0, 1, 1], + [0, 0, 1, 1, 0, 1]], dtype=float).transpose() + self.fit_then_transform(expected, sparse2, + minimum_fraction=0.5) + self.fit_then_transform_dense(expected, sparse2, + minimum_fraction=0.5) + def test_sparse2_csr(self): self.fit_then_transform(sparse2_csr_1h.todense(), sparse2_csr) self.fit_then_transform_dense(sparse2_csr_1h.todense(), sparse2_csr) - def fit_then_transform(self, expected, input, categorical_features='all'): - ohe = OneHotEncoder(categorical_features=categorical_features) + def test_sparse_on_dense2_minimum_fraction(self): + sparse = scipy.sparse.csr_matrix(dense2) + self.fit_then_transform(dense2_1h_minimum_fraction_as_sparse, sparse, + minimum_fraction=0.5) + self.fit_then_transform_dense(dense2_1h_minimum_fraction_as_sparse, sparse, + minimum_fraction=0.5) + + def fit_then_transform(self, expected, input, categorical_features='all', + minimum_fraction=None): + # Test fit_transform + ohe = OneHotEncoder(categorical_features=categorical_features, + minimum_fraction=minimum_fraction) transformation = ohe.fit_transform(input.copy()) self.assertIsInstance(transformation, scipy.sparse.csr_matrix) - assert_array_almost_equal(expected, transformation.todense()) + assert_array_almost_equal(expected.astype(float), + transformation.todense()) - ohe2 = OneHotEncoder(categorical_features=categorical_features) + # Test fit, and afterwards transform + ohe2 = OneHotEncoder(categorical_features=categorical_features, + minimum_fraction=minimum_fraction) ohe2.fit(input.copy()) transformation = ohe2.transform(input.copy()) self.assertIsInstance(transformation, scipy.sparse.csr_matrix) assert_array_almost_equal(expected, transformation.todense()) - def fit_then_transform_dense(self, expected, input, categorical_features='all'): + def fit_then_transform_dense(self, expected, input, + categorical_features='all', + minimum_fraction=None): ohe = OneHotEncoder(categorical_features=categorical_features, - sparse=False) + sparse=False, minimum_fraction=minimum_fraction) transformation = ohe.fit_transform(input.copy()) self.assertIsInstance(transformation, np.ndarray) assert_array_almost_equal(expected, transformation) ohe2 = OneHotEncoder(categorical_features=categorical_features, - sparse=False) + sparse=False, minimum_fraction=minimum_fraction) ohe2.fit(input.copy()) transformation = ohe2.transform(input.copy()) self.assertIsInstance(transformation, np.ndarray) @@ -129,4 +191,13 @@ def test_transform_with_unknown_value(self): output = ohe.transform(test_data).todense() self.assertEqual(5, np.sum(output)) + input = np.array(((0, 1, 2, 3, 4, 5), (0, 1, 2, 3, 4, 5))).transpose() + ips = scipy.sparse.csr_matrix(input) + ohe = OneHotEncoder() + ohe.fit(ips) + test_data = np.array(((0, 1, 2, 6), (0, 1, 6, 7))).transpose() + tds = scipy.sparse.csr_matrix(test_data) + output = ohe.transform(tds).todense() + self.assertEqual(3, np.sum(output)) + From 9d97581f32ca14807f8c0286698c1dc04bc0bd97 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 8 Sep 2015 16:23:37 +0200 Subject: [PATCH 296/352] Add OneHotEncoding component and test --- .../data_preprocessing/imputation.py | 4 +- .../data_preprocessing/one_hot_encoding.py | 72 ++ .../components/data_preprocessing/dataset.pkl | 898 ++++++++++++++++++ .../test_one_hot_encoding.py | 95 ++ 4 files changed, 1067 insertions(+), 2 deletions(-) create mode 100644 ParamSklearn/components/data_preprocessing/one_hot_encoding.py create mode 100644 tests/components/data_preprocessing/dataset.pkl create mode 100644 tests/components/data_preprocessing/test_one_hot_encoding.py diff --git a/ParamSklearn/components/data_preprocessing/imputation.py b/ParamSklearn/components/data_preprocessing/imputation.py index 7961762a3b..e8ab45185c 100644 --- a/ParamSklearn/components/data_preprocessing/imputation.py +++ b/ParamSklearn/components/data_preprocessing/imputation.py @@ -1,4 +1,4 @@ -#import ParamSklearn.implementations.Imputation + import sklearn.preprocessing from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -15,7 +15,7 @@ def __init__(self, strategy, random_state=None): def fit(self, X, y=None): self.preprocessor = sklearn.preprocessing.Imputer( - strategy=self.strategy, copy=False) #, dtype=X.dtype) + strategy=self.strategy, copy=False) self.preprocessor = self.preprocessor.fit(X) return self diff --git a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py new file mode 100644 index 0000000000..2b19bb739b --- /dev/null +++ b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py @@ -0,0 +1,72 @@ +import ParamSklearn.implementations.OneHotEncoder + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformFloatHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm +from ParamSklearn.constants import * + + +class OneHotEncoder(ParamSklearnPreprocessingAlgorithm): + def __init__(self, use_minimum_fraction, minimum_fraction, + init_params=None, random_state=None): + # TODO pay attention to the cases when a copy is made (CSR matrices) + self.use_minimum_fraction = use_minimum_fraction + self.minimum_fraction = minimum_fraction + self.init_params = init_params + + def fit(self, X, y=None): + if self.use_minimum_fraction is None or \ + self.use_minimum_fraction.lower() == 'False': + self.minimum_fraction = None + else: + self.minimum_fraction = float(self.minimum_fraction) + + self.preprocessor = ParamSklearn.implementations.OneHotEncoder\ + .OneHotEncoder(minimum_fraction=self.minimum_fraction, + categorical_features=self.init_params) + self.preprocessor = self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': '1Hot', + 'name': 'One Hot Encoder', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + use_minimum_fraction = cs.add_hyperparameter(CategoricalHyperparameter( + "use_minimum_fraction", ["True", "False"], default="True")) + minimum_fraction = cs.add_hyperparameter(UniformFloatHyperparameter( + "minimum_fraction", lower=.0001, upper=0.5, default=0.01, log=True)) + cs.add_condition(EqualsCondition(minimum_fraction, + use_minimum_fraction, 'True')) + return cs + + def __str__(self): + name = self.get_properties()['name'] + return "ParamSklearn %s" % name diff --git a/tests/components/data_preprocessing/dataset.pkl b/tests/components/data_preprocessing/dataset.pkl new file mode 100644 index 0000000000..a976726d5c --- /dev/null +++ b/tests/components/data_preprocessing/dataset.pkl @@ -0,0 +1,898 @@ +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.550000000000000000e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.299999952316284180e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.009999871253967285e-01 1.200099975585937500e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.560000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.399999976158142090e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 4.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.510000228881835938e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.150000000000000000e+02 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.100000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 5.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.100999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.899999976158142090e+00 1.135000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.750000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.900000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.550000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 7.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.000000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399999976158142090e+00 1.310000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.799999952316284180e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200999975204467773e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.140000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500999927520751953e+00 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 2.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.900000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500999927520751953e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.000000000000000000e+02 4.120000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.000000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 3.560000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.190000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000000000000000e+00 6.509999847412109375e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 2.550000000000000000e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.525000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.274900024414062500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.069000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 0.000000000000000000e+00 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.525000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.399999946355819702e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.800000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 diff --git a/tests/components/data_preprocessing/test_one_hot_encoding.py b/tests/components/data_preprocessing/test_one_hot_encoding.py new file mode 100644 index 0000000000..b9c199adfb --- /dev/null +++ b/tests/components/data_preprocessing/test_one_hot_encoding.py @@ -0,0 +1,95 @@ +import os +import unittest + +import numpy as np +from scipy import sparse + +from ParamSklearn.components.data_preprocessing.one_hot_encoding import OneHotEncoder + + +class OneHotEncoderTest(unittest.TestCase): + def setUp(self): + self.categorical = [True, + True, + True, + False, + False, + True, + True, + True, + False, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + False, + False, + False, + True, + True, + True, + True] + this_directory = os.path.dirname(__file__) + self.X_train = np.loadtxt(os.path.join(this_directory, "dataset.pkl")) + + def test_default_configuration(self): + transformations = [] + for i in range(10): + configuration_space = OneHotEncoder.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = OneHotEncoder(random_state=1, + init_params=self.categorical, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + + transformer = preprocessor.fit(self.X_train.copy()) + Xt = transformer.transform(self.X_train.copy()) + transformations.append(Xt) + if len(transformations) > 1: + self.assertFalse( + (transformations[-1].todense() != transformations[-2].todense()).all()) + + def test_default_configuration_sparse_data(self): + transformations = [] + + self.X_train[~np.isfinite(self.X_train)] = 0 + self.X_train = sparse.csc_matrix(self.X_train) + + for i in range(10): + configuration_space = OneHotEncoder.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = OneHotEncoder(random_state=1, + init_params=self.categorical, + **{hp_name: default[hp_name] for + hp_name in + default if + default[hp_name] is not None}) + + transformer = preprocessor.fit(self.X_train.copy()) + Xt = transformer.transform(self.X_train.copy()) + transformations.append(Xt) + if len(transformations) > 1: + self.assertFalse( + (transformations[-1].todense() != transformations[ + -2].todense()).all()) From e337e60a2c123f05640676e5912f6b2b673f66ac Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 8 Sep 2015 17:22:58 +0200 Subject: [PATCH 297/352] Add OneHotEncoding as a pipeline component --- ParamSklearn/base.py | 2 +- ParamSklearn/classification.py | 4 +- .../data_preprocessing/one_hot_encoding.py | 16 ++-- ParamSklearn/regression.py | 4 +- source/first_steps.rst | 4 +- .../test_one_hot_encoding.py | 29 ++++++- tests/test_classification.py | 81 ++++++++++++++++++- tests/test_regression.py | 8 +- 8 files changed, 132 insertions(+), 16 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 76ee81a4ee..d21308f7fd 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -107,7 +107,7 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): components.data_preprocessing_components._preprocessors: _preprocessors = components.data_preprocessing_components._preprocessors else: - raise ValueError() + raise ValueError(preproc_name) preprocessor_object = _preprocessors[preproc_name]( random_state=self.random_state, **preproc_params) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index b1e864983b..c912dc1477 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -230,7 +230,9 @@ def _get_pipeline(): # Add the always active preprocessing components steps.extend( - [["imputation", + [["one_hot_encoding", + components.data_preprocessing._preprocessors['one_hot_encoding']], + ["imputation", components.data_preprocessing._preprocessors['imputation']], ["rescaling", components.data_preprocessing._preprocessors['rescaling']], diff --git a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py index 2b19bb739b..80107ea6e7 100644 --- a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py +++ b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py @@ -10,23 +10,29 @@ class OneHotEncoder(ParamSklearnPreprocessingAlgorithm): - def __init__(self, use_minimum_fraction, minimum_fraction, - init_params=None, random_state=None): + def __init__(self, use_minimum_fraction, minimum_fraction=None, + categorical_features=None, random_state=None): # TODO pay attention to the cases when a copy is made (CSR matrices) self.use_minimum_fraction = use_minimum_fraction self.minimum_fraction = minimum_fraction - self.init_params = init_params + self.categorical_features = categorical_features def fit(self, X, y=None): if self.use_minimum_fraction is None or \ - self.use_minimum_fraction.lower() == 'False': + self.use_minimum_fraction.lower() == 'false': self.minimum_fraction = None else: self.minimum_fraction = float(self.minimum_fraction) + if self.categorical_features is None: + categorical_features = [] + else: + categorical_features = self.categorical_features + self.preprocessor = ParamSklearn.implementations.OneHotEncoder\ .OneHotEncoder(minimum_fraction=self.minimum_fraction, - categorical_features=self.init_params) + categorical_features=categorical_features) + self.preprocessor = self.preprocessor.fit(X) return self diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index abf81383d4..94231167e0 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -187,7 +187,9 @@ def _get_pipeline(): # Add the always active preprocessing components steps.extend( - [["imputation", + [["one_hot_encoding", + components.data_preprocessing._preprocessors['one_hot_encoding']], + ["imputation", components.data_preprocessing._preprocessors['imputation']], ["rescaling", components.data_preprocessing._preprocessors['rescaling']]]) diff --git a/source/first_steps.rst b/source/first_steps.rst index 26e89e17c2..6beb7f72e9 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -17,10 +17,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> configuration_space.seed(3) + >>> configuration_space.seed(2) >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.66000000000000003 + 0.73999999999999999 diff --git a/tests/components/data_preprocessing/test_one_hot_encoding.py b/tests/components/data_preprocessing/test_one_hot_encoding.py index b9c199adfb..067258f6b3 100644 --- a/tests/components/data_preprocessing/test_one_hot_encoding.py +++ b/tests/components/data_preprocessing/test_one_hot_encoding.py @@ -5,6 +5,7 @@ from scipy import sparse from ParamSklearn.components.data_preprocessing.one_hot_encoding import OneHotEncoder +from ParamSklearn.util import _test_preprocessing class OneHotEncoderTest(unittest.TestCase): @@ -58,7 +59,7 @@ def test_default_configuration(self): default = configuration_space.get_default_configuration() preprocessor = OneHotEncoder(random_state=1, - init_params=self.categorical, + categorical_features=self.categorical, **{hp_name: default[hp_name] for hp_name in default if default[hp_name] is not None}) @@ -69,6 +70,17 @@ def test_default_configuration(self): self.assertFalse( (transformations[-1].todense() != transformations[-2].todense()).all()) + def test_default_configuration_no_encoding(self): + transformations = [] + for i in range(10): + transformation, original = _test_preprocessing(OneHotEncoder) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) + def test_default_configuration_sparse_data(self): transformations = [] @@ -80,7 +92,7 @@ def test_default_configuration_sparse_data(self): default = configuration_space.get_default_configuration() preprocessor = OneHotEncoder(random_state=1, - init_params=self.categorical, + categorical_features=self.categorical, **{hp_name: default[hp_name] for hp_name in default if @@ -93,3 +105,16 @@ def test_default_configuration_sparse_data(self): self.assertFalse( (transformations[-1].todense() != transformations[ -2].todense()).all()) + + def test_default_configuration_sparse_no_encoding(self): + transformations = [] + + for i in range(10): + transformation, original = _test_preprocessing(OneHotEncoder, + make_sparse=True) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation.todense() == original.todense()).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1].todense() == transformations[-2].todense()).all()) diff --git a/tests/test_classification.py b/tests/test_classification.py index fdf7ec0da7..4b7c3e8eec 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -1,3 +1,4 @@ +import os import resource import sys import traceback @@ -8,6 +9,7 @@ from scipy.linalg import LinAlgError import sklearn.datasets import sklearn.decomposition +import sklearn.cross_validation import sklearn.ensemble import sklearn.svm from sklearn.utils.testing import assert_array_almost_equal @@ -276,6 +278,73 @@ def test_configurations_sparse(self): print config raise e + def test_configurations_categorical_data(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 2014 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + print cs + for i in range(10): + config = cs.sample_configuration() + print config + categorical = [True, True, True, False, False, True, True, True, + False, True, True, True, True, True, True, True, + True, True, True, True, True, True, True, True, True, + True, True, True, True, True, True, True, False, + False, False, True, True, True] + this_directory = os.path.dirname(__file__) + X = np.loadtxt(os.path.join(this_directory, "components", + "data_preprocessing", "dataset.pkl")) + y = X[:, -1].copy() + X = X[:,:-1] + X_train, X_test, Y_train, Y_test = \ + sklearn.cross_validation.train_test_split(X, y) + + cls = ParamSklearnClassifier(config, random_state=1,) + try: + cls.fit(X_train, Y_train, + init_params={'one_hot_encoding:categorical_features': categorical}) + predictions = cls.predict(X_test) + except ValueError as e: + # if "Floating-point under-/overflow occurred at epoch" in \ + # e.message or \ + if "removed all features" in e.message or \ + "all features are discarded" in e.message: + continue + else: + print config + traceback.print_tb(sys.exc_info()[2]) + raise e + # except LinAlgError as e: + # if "not positive definite, even with jitter" in e.message: + # continue + # else: + # print config + # raise e + # except AttributeError as e: + # # Some error in QDA + # if "log" == e.message: + # continue + # else: + # print config + # raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.message: + continue + elif "divide by zero encountered in divide" in e.message: + continue + else: + print config + raise e + except UserWarning as e: + if "FastICA did not converge" in e.message: + continue + else: + print config + raise e + def test_get_hyperparameter_search_space(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) @@ -289,14 +358,14 @@ def test_get_hyperparameter_search_space(self): 'preprocessor:__choice__').choices), 14) hyperparameters = cs.get_hyperparameters() - self.assertEqual(146, len(hyperparameters)) + self.assertEqual(148, len(hyperparameters)) #for hp in sorted([str(h) for h in hyperparameters]): # print hp # The four parameters which are always active are classifier, # preprocessor, imputation strategy and scaling strategy - self.assertEqual(len(hyperparameters) - 5, len(conditions)) + self.assertEqual(len(hyperparameters) - 6, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space( @@ -332,6 +401,8 @@ def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classi " classifier:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" " classifier:random_forest:n_estimators, Constant: 100\n" " imputation:strategy, Value: mean\n" + " one_hot_encoding:minimum_fraction, Value: 0.01\n" + " one_hot_encoding:use_minimum_fraction, Value: True\n" " preprocessor:__choice__, Value: nystroem_sampler\n" " preprocessor:nystroem_sampler:gamma, Value: 0.1\n" " preprocessor:nystroem_sampler:kernel, Value: rbf\n" @@ -362,6 +433,8 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): " classifier:liblinear_svc:penalty, Value: l2\n" " classifier:liblinear_svc:tol, Value: 0.0001\n" " imputation:strategy, Value: mean\n" + " one_hot_encoding:minimum_fraction, Value: 0.01\n" + " one_hot_encoding:use_minimum_fraction, Value: True\n" " preprocessor:__choice__, Value: densifier\n" " rescaling:__choice__, Value: min/max\n" "violates forbidden clause \(Forbidden: classifier:__choice__ == liblinear_svc &&" @@ -439,6 +512,8 @@ def test_predict_batched_sparse(self): values={"balancing:strategy": "none", "classifier:__choice__": "random_forest", "imputation:strategy": "mean", + "one_hot_encoding:minimum_fraction": 0.01, + "one_hot_encoding:use_minimum_fraction": "True", "preprocessor:__choice__": "no_preprocessing", 'classifier:random_forest:bootstrap': 'True', 'classifier:random_forest:criterion': 'gini', @@ -522,6 +597,8 @@ def test_predict_proba_batched_sparse(self): values={"balancing:strategy": "none", "classifier:__choice__": "random_forest", "imputation:strategy": "mean", + "one_hot_encoding:minimum_fraction": 0.01, + "one_hot_encoding:use_minimum_fraction": 'True', "preprocessor:__choice__": "no_preprocessing", 'classifier:random_forest:bootstrap': 'True', 'classifier:random_forest:criterion': 'gini', diff --git a/tests/test_regression.py b/tests/test_regression.py index 8329c12628..98ef1968f1 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -86,8 +86,8 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(133, len(hyperparameters)) - self.assertEqual(len(hyperparameters) - 4, len(conditions)) + self.assertEqual(135, len(hyperparameters)) + self.assertEqual(len(hyperparameters) - 5, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): cs = ParamSklearnRegressor.get_hyperparameter_search_space( @@ -112,6 +112,8 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Configuration:\n" " imputation:strategy, Value: mean\n" + " one_hot_encoding:minimum_fraction, Value: 0.01\n" + " one_hot_encoding:use_minimum_fraction, Value: True\n" " preprocessor:__choice__, Value: kitchen_sinks\n" " preprocessor:kitchen_sinks:gamma, Value: 1.0\n" " preprocessor:kitchen_sinks:n_components, Value: 100\n" @@ -136,6 +138,8 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Configuration:\n" " imputation:strategy, Value: mean\n" + " one_hot_encoding:minimum_fraction, Value: 0.01\n" + " one_hot_encoding:use_minimum_fraction, Value: True\n" " preprocessor:__choice__, Value: densifier\n" " regressor:__choice__, Value: ridge_regression\n" " regressor:ridge_regression:alpha, Value: 1.0\n" From 481ead7247758d96906f883a383b1f22b7969fdb Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 10:28:56 +0200 Subject: [PATCH 298/352] SVM/SVR adaptive cache size --- .../components/classification/libsvm_svc.py | 13 ++++++++++++- .../components/regression/libsvm_svr.py | 18 ++++++++++++++---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 79cf20c6a3..75aa340e62 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -1,3 +1,5 @@ +import resource + import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -27,6 +29,15 @@ def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, self.estimator = None def fit(self, X, Y): + try: + soft, hard = resource.getrlimit(resource.RLIMIT_AS) + soft /= 1024 * 1024 + print(soft,) + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + except Exception: + cache_size = 200 + self.C = float(self.C) if self.degree is None: self.degree = 3 @@ -57,7 +68,7 @@ def fit(self, X, Y): class_weight=self.class_weight, max_iter=self.max_iter, random_state=self.random_state, - cache_size=1000) + cache_size=cache_size) # probability=True) self.estimator.fit(X, Y) return self diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index fb321aae45..f2a67aeef9 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -1,3 +1,5 @@ +import resource + import numpy as np import sklearn.svm @@ -14,7 +16,7 @@ class LibSVM_SVR(ParamSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, - degree=3, coef0=0.0, cache_size=1000, verbose=False, + degree=3, coef0=0.0, verbose=False, max_iter=-1, random_state=None): self.kernel = kernel self.C = C @@ -24,13 +26,22 @@ def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, self.degree = degree self.gamma = gamma self.coef0 = coef0 - self.cache_size = cache_size self.verbose = verbose self.max_iter = max_iter self.random_state = random_state self.estimator = None def fit(self, X, Y): + try: + soft, hard = resource.getrlimit(resource.RLIMIT_AS) + soft /= 1024 * 1024 + print(soft, ) + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + + except Exception: + cache_size = 200 + self.C = float(self.C) self.epsilon = float(self.epsilon) self.tol = float(self.tol) @@ -41,7 +52,6 @@ def fit(self, X, Y): self.coef0 = 0.0 else: self.coef0 = float(self.coef0) - self.cache_size = int(self.cache_size) self.verbose = int(self.verbose) self.max_iter = int(self.max_iter) @@ -54,7 +64,7 @@ def fit(self, X, Y): degree=self.degree, gamma=self.gamma, coef0=self.coef0, - cache_size=self.cache_size, + cache_size=cache_size, verbose=self.verbose, max_iter=self.max_iter ) From e51c01c40d76acb3ee243415547a31a86c7d3bf5 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 10:54:26 +0200 Subject: [PATCH 299/352] Python 3 compability --- ParamSklearn/base.py | 9 +- .../components/classification/__init__.py | 14 +- .../components/classification/libsvm_svc.py | 10 +- .../components/data_preprocessing/__init__.py | 11 +- .../data_preprocessing/rescaling.py | 11 +- .../feature_preprocessing/__init__.py | 10 +- .../components/regression/__init__.py | 12 +- .../components/regression/libsvm_svr.py | 11 +- ParamSklearn/create_searchspace_util.py | 20 +- ParamSklearn/implementations/Imputation.py | 1 - ParamSklearn/implementations/gem.py | 2 +- ParamSklearn/util.py | 6 +- source/first_steps.rst | 2 +- .../data_preprocessing/test_balancing.py | 6 +- tests/components/regression/test_sgd.py | 2 - tests/test_base.py | 14 +- tests/test_classification.py | 236 ++++++++---------- tests/test_textclassification.py | 50 ++-- 18 files changed, 189 insertions(+), 238 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index d21308f7fd..8b2380bcd3 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -1,6 +1,5 @@ -from abc import ABCMeta, abstractmethod -from collections import defaultdict, OrderedDict -import copy +from abc import ABCMeta +from collections import defaultdict import numpy as np import sklearn @@ -86,7 +85,6 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): for preproc_name in preprocessors_names: preproc_params = {} - for instantiated_hyperparameter in self.configuration: if not instantiated_hyperparameter.startswith( preproc_name + ":"): @@ -98,8 +96,6 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): preproc_params[name_] = self.configuration[ instantiated_hyperparameter] - preproc_params.update(init_params_per_method[preproc_name]) - if preproc_name in \ components.feature_preprocessing_components._preprocessors: _preprocessors = components.feature_preprocessing_components._preprocessors @@ -304,7 +300,6 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, cs.add_configuration_space(node_name, node.get_hyperparameter_search_space( dataset_properties, include=choices_list)) - # And now add forbidden parameter configurations # According to matches if np.sum(matches) < np.size(matches): diff --git a/ParamSklearn/components/classification/__init__.py b/ParamSklearn/components/classification/__init__.py index 09abc1bb53..fe0bd04c59 100644 --- a/ParamSklearn/components/classification/__init__.py +++ b/ParamSklearn/components/classification/__init__.py @@ -2,6 +2,7 @@ from collections import OrderedDict import copy +import importlib import inspect import os import pkgutil @@ -12,16 +13,14 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from sklearn.base import BaseEstimator - classifier_directory = os.path.split(__file__)[0] -_classifiers = {} +_classifiers = OrderedDict() for module_loader, module_name, ispkg in pkgutil.iter_modules([classifier_directory]): full_module_name = "%s.%s" % (__package__, module_name) if full_module_name not in sys.modules and not ispkg: - module = module_loader.find_module(module_name).load_module(full_module_name) + module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): if inspect.isclass(obj) and ParamSklearnClassificationAlgorithm in obj.__bases__: @@ -98,7 +97,7 @@ def get_hyperparameter_search_space(cls, dataset_properties, if default is None: defaults = ['random_forest', 'liblinear_svc', 'sgd', - 'libsvm_svc'] + available_estimators.keys() + 'libsvm_svc'] + list(available_estimators.keys()) for default_ in defaults: if default_ in available_estimators: if include is not None and default_ not in include: @@ -109,11 +108,10 @@ def get_hyperparameter_search_space(cls, dataset_properties, break estimator = CategoricalHyperparameter('__choice__', - available_estimators.keys(), + list(available_estimators.keys()), default=default) cs.add_hyperparameter(estimator) for estimator_name in available_estimators.keys(): - # We have to retrieve the configuration space every time because # we change the objects it returns. If we reused it, we could not # retrieve the conditions further down @@ -158,7 +156,7 @@ def get_hyperparameter_search_space(cls, dataset_properties, dlc.hyperparameter.name = "%s:%s" % (estimator_name, dlc.hyperparameter.name) cs.add_forbidden_clause(forbidden_clause) - + return cs diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 75aa340e62..b0b8aa268d 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -31,10 +31,12 @@ def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, def fit(self, X, Y): try: soft, hard = resource.getrlimit(resource.RLIMIT_AS) - soft /= 1024 * 1024 - print(soft,) - maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 - cache_size = (soft - maxrss) / 1.5 + if soft > 0: + soft /= 1024 * 1024 + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + else: + cache_size = 200 except Exception: cache_size = 200 diff --git a/ParamSklearn/components/data_preprocessing/__init__.py b/ParamSklearn/components/data_preprocessing/__init__.py index d6536d41aa..711717a201 100644 --- a/ParamSklearn/components/data_preprocessing/__init__.py +++ b/ParamSklearn/components/data_preprocessing/__init__.py @@ -1,7 +1,5 @@ -__author__ = 'feurerm' - -from collections import OrderedDict -import copy +import collections +import importlib import inspect import os import pkgutil @@ -12,14 +10,13 @@ preprocessors_directory = os.path.split(__file__)[0] -_preprocessors = {} +_preprocessors = collections.OrderedDict() for module_loader, module_name, ispkg in pkgutil.iter_modules( [preprocessors_directory]): full_module_name = "%s.%s" % (__package__, module_name) if full_module_name not in sys.modules and not ispkg: - module = module_loader.find_module(module_name).load_module( - full_module_name) + module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): if inspect.isclass( diff --git a/ParamSklearn/components/data_preprocessing/rescaling.py b/ParamSklearn/components/data_preprocessing/rescaling.py index 3646012a0a..537e349ea2 100644 --- a/ParamSklearn/components/data_preprocessing/rescaling.py +++ b/ParamSklearn/components/data_preprocessing/rescaling.py @@ -146,10 +146,10 @@ def __init__(self, **params): @classmethod def get_components(cls): - return {'none': NoRescalingComponent, - 'min/max': MinMaxScalerComponent, - 'standardize': StandardScalerComponent, - 'normalize': NormalizerComponent} + return OrderedDict((('none', NoRescalingComponent), + ('min/max', MinMaxScalerComponent), + ('standardize', StandardScalerComponent), + ('normalize', NormalizerComponent))) @classmethod def get_available_components(cls, data_prop=None, @@ -197,7 +197,8 @@ def get_hyperparameter_search_space(cls, dataset_properties=None, break preprocessor = CategoricalHyperparameter('__choice__', - available_preprocessors.keys(), + list( + available_preprocessors.keys()), default=default) cs.add_hyperparameter(preprocessor) for name in available_preprocessors: diff --git a/ParamSklearn/components/feature_preprocessing/__init__.py b/ParamSklearn/components/feature_preprocessing/__init__.py index 8c8808a338..9c7160d2d9 100644 --- a/ParamSklearn/components/feature_preprocessing/__init__.py +++ b/ParamSklearn/components/feature_preprocessing/__init__.py @@ -1,7 +1,6 @@ -__author__ = 'feurerm' - from collections import OrderedDict import copy +import importlib import inspect import os import pkgutil @@ -14,13 +13,13 @@ preprocessors_directory = os.path.split(__file__)[0] -_preprocessors = {} +_preprocessors = OrderedDict() for module_loader, module_name, ispkg in pkgutil.iter_modules([preprocessors_directory]): full_module_name = "%s.%s" % (__package__, module_name) if full_module_name not in sys.modules and not ispkg: - module = module_loader.find_module(module_name).load_module(full_module_name) + module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): if inspect.isclass(obj) and ParamSklearnPreprocessingAlgorithm in obj.__bases__: @@ -101,7 +100,8 @@ def get_hyperparameter_search_space(cls, dataset_properties, break preprocessor = CategoricalHyperparameter('__choice__', - available_preprocessors.keys(), + list( + available_preprocessors.keys()), default=default) cs.add_hyperparameter(preprocessor) for name in available_preprocessors: diff --git a/ParamSklearn/components/regression/__init__.py b/ParamSklearn/components/regression/__init__.py index 051dd0de39..f7b3415343 100644 --- a/ParamSklearn/components/regression/__init__.py +++ b/ParamSklearn/components/regression/__init__.py @@ -1,7 +1,6 @@ -__author__ = ['Katharina Eggensperger', 'Matthias Feurer'] - from collections import OrderedDict import copy +import importlib import inspect import os import pkgutil @@ -13,13 +12,13 @@ from HPOlibConfigSpace.conditions import EqualsCondition regressor_directory = os.path.split(__file__)[0] -_regressors = {} +_regressors = OrderedDict() for module_loader, module_name, ispkg in pkgutil.iter_modules([regressor_directory]): full_module_name = "%s.%s" % (__package__, module_name) if full_module_name not in sys.modules and not ispkg: - module = module_loader.find_module(module_name).load_module(full_module_name) + module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): if inspect.isclass(obj) and ParamSklearnRegressionAlgorithm in obj.__bases__: @@ -89,7 +88,8 @@ def get_hyperparameter_search_space(cls, dataset_properties, raise ValueError("No regressors found") if default is None: - defaults = ['random_forest', 'support_vector_regression'] + available_estimators.keys() + defaults = ['random_forest', 'support_vector_regression'] + \ + list(available_estimators.keys()) for default_ in defaults: if default_ in available_estimators: if include is not None and default_ not in include: @@ -100,7 +100,7 @@ def get_hyperparameter_search_space(cls, dataset_properties, break estimator = CategoricalHyperparameter('__choice__', - available_estimators.keys(), + list(available_estimators.keys()), default=default) cs.add_hyperparameter(estimator) for estimator_name in available_estimators.keys(): diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index f2a67aeef9..e6faab55d3 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -34,11 +34,12 @@ def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, def fit(self, X, Y): try: soft, hard = resource.getrlimit(resource.RLIMIT_AS) - soft /= 1024 * 1024 - print(soft, ) - maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 - cache_size = (soft - maxrss) / 1.5 - + if soft > 0: + soft /= 1024 * 1024 + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + else: + cache_size = 200 except Exception: cache_size = 200 diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index 224009635e..4b6622ad98 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -29,12 +29,12 @@ def get_match_array(pipeline, dataset_properties, node_name) if exclude is not None else None if is_choice: - node_i_choices_names.append(node.get_available_components( + node_i_choices_names.append(list(node.get_available_components( dataset_properties, include=node_include, - exclude=node_exclude).keys()) - node_i_choices.append(node.get_available_components( + exclude=node_exclude).keys())) + node_i_choices.append(list(node.get_available_components( dataset_properties, include=node_include, - exclude=node_exclude).values()) + exclude=node_exclude).values())) else: node_i_choices.append([node]) @@ -82,9 +82,9 @@ def get_match_array(pipeline, dataset_properties, elif not data_is_sparse and SPARSE in node_output: data_is_sparse = True else: - print node - print "Data is sparse", data_is_sparse - print node_input, node_output + print(node) + print("Data is sparse", data_is_sparse) + print(node_input, node_output) raise ValueError("This combination is not allowed!") if PREDICTIONS in node_output: @@ -97,9 +97,9 @@ def get_match_array(pipeline, dataset_properties, elif UNSIGNED_DATA in node_output: dataset_is_signed = False else: - print node - print "Data is signed", dataset_is_signed - print node_input, node_output + print(node) + print("Data is signed", dataset_is_signed) + print(node_input, node_output) raise ValueError("This combination is not allowed!") return matches diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index bfafb05988..09d2a7e0e9 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -164,7 +164,6 @@ def fit(self, X, y=None): self.missing_values, self.axis) else: - print type(X), X.dtype, self.dtype X = check_array(X, dtype=self.dtype, force_all_finite=False) self.statistics_ = self._dense_fit(X, self.strategy, diff --git a/ParamSklearn/implementations/gem.py b/ParamSklearn/implementations/gem.py index e4110b3342..96d1b3e488 100644 --- a/ParamSklearn/implementations/gem.py +++ b/ParamSklearn/implementations/gem.py @@ -13,7 +13,7 @@ def __init__(self, N, precond): def fit(self, X, Y): - print X.shape, Y.shape + print(X.shape, Y.shape) self.N = min(self.N, X.shape[1]-2) y_max = int(np.max(Y) + 1) self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 879cdfcd9c..9ac9f15d50 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -23,7 +23,7 @@ def find_sklearn_classes(class_): # Work around some issues... if module_name in ["hmm", "mixture"]: - print "Skipping %s" % module_name + print("Skipping %s" % module_name) continue module_file = module_loader.__dict__["path"] @@ -39,9 +39,9 @@ def find_sklearn_classes(class_): # print member_name, obj classifiers.add(classifier) - print + print() for classifier in sorted([str(cls) for cls in classifiers]): - print classifier + print(classifier) def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): diff --git a/source/first_steps.rst b/source/first_steps.rst index 6beb7f72e9..b07014e8f5 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -23,4 +23,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.73999999999999999 + 0.90000000000000002 diff --git a/tests/components/data_preprocessing/test_balancing.py b/tests/components/data_preprocessing/test_balancing.py index a82d008318..c10c00de1b 100644 --- a/tests/components/data_preprocessing/test_balancing.py +++ b/tests/components/data_preprocessing/test_balancing.py @@ -55,11 +55,11 @@ def test_balancing_get_weights_svm_sgd(self): init_params, fit_params = balancing.get_weights( Y, 'libsvm_svc', None, None, None) self.assertEqual(("classifier:class_weight", "auto"), - init_params.items()[0]) + list(init_params.items())[0]) init_params, fit_params = balancing.get_weights( Y, None, 'liblinear_svc_preprocessor', None, None) self.assertEqual(("preprocessor:class_weight", "auto"), - init_params.items()[0]) + list(init_params.items())[0]) def test_balancing_get_weights_ridge(self): Y = np.array([0] * 80 + [1] * 20) @@ -115,7 +115,7 @@ def test_weighting_effect(self): [('extra_trees_preproc_for_classification', ExtraTreesPreprocessor, 0.892, 0.910), ('liblinear_svc_preprocessor', LibLinear_Preprocessor, - 0.906, 0.887)]: + 0.906, 0.909)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: diff --git a/tests/components/regression/test_sgd.py b/tests/components/regression/test_sgd.py index 4bbe70dacd..719475465d 100644 --- a/tests/components/regression/test_sgd.py +++ b/tests/components/regression/test_sgd.py @@ -10,8 +10,6 @@ class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_regressor(SGD) - print predictions - print targets self.assertAlmostEqual(0.092460881802630235, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/tests/test_base.py b/tests/test_base.py index 0326bd0c7d..60660a6bed 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -69,22 +69,16 @@ def test_get_hyperparameter_configuration_space_3choices(self): self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) self.assertEqual(409, len(cs.forbidden_clauses)) - for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): - print clause - - print - print - print - print - + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print(clause) cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() dataset_properties = {'sparse': True, 'signed': True} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) - for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): - print clause + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print(clause) self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), 11) diff --git a/tests/test_classification.py b/tests/test_classification.py index 4b7c3e8eec..edf26aa32a 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -6,7 +6,6 @@ import mock import numpy as np -from scipy.linalg import LinAlgError import sklearn.datasets import sklearn.decomposition import sklearn.cross_validation @@ -72,7 +71,6 @@ def test_default_configuration(self): for i in range(2): cs = ParamSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() - print cs X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') auto = ParamSklearnClassifier(default) auto = auto.fit(X_train, Y_train) @@ -83,18 +81,28 @@ def test_default_configuration(self): def test_configurations(self): # Use a limit of ~4GiB - limit = 4000 * 1024 * 2014 + limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + cs = ParamSklearnClassifier.get_hyperparameter_search_space() - print cs + print(cs) + cs.seed(1) for i in range(10): config = cs.sample_configuration() + config._populate_values() + if 'classifier:passive_aggresive:n_iter' in config and \ + config['classifier:passive_aggresive:n_iter'] is not None: + config._values['classifier:passive_aggresive:n_iter'] = 5 + if 'classifier:sgd:n_iter' in config and \ + config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) - print config + print(config) try: cls.fit(X_train, Y_train) X_test_ = X_test.copy() @@ -103,70 +111,59 @@ def test_configurations(self): predicted_probabiliets = cls.predict_proba(X_test_) self.assertIsInstance(predicted_probabiliets, np.ndarray) except ValueError as e: - #if "Floating-point under-/overflow occurred at epoch" in \ - # e.message or \ - if "removed all features" in e.message or \ - "all features are discarded" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e - # except LinAlgError as e: - # if "not positive definite, even with jitter" in e.message: - # continue - # else: - # print config - # print traceback.format_exc() - # raise e - #except AttributeError as e: - # # Some error in QDA - # if "log" == e.message: - # print config - # print traceback.format_exc() - # raise e - # continue - # else: - # print config - # print traceback.format_exc() - # raise e except RuntimeWarning as e: - if "invalid value encountered in sqrt" in e.message: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: continue - elif "divide by zero encountered in divide" in e.message: + elif "invalid value encountered in divide" in e.args[0]: continue - elif "invalid value encountered in divide" in e.message: + elif "invalid value encountered in true_divide" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e except UserWarning as e: - if "FastICA did not converge" in e.message: + if "FastICA did not converge" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e except MemoryError as e: continue def test_configurations_signed_data(self): # Use a limit of ~4GiB - limit = 4000 * 1024 * 2014 + limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) cs = ParamSklearnClassifier.get_hyperparameter_search_space( - dataset_properties={'signed': True} - ) + dataset_properties={'signed': True}) - print cs + print(cs) for i in range(10): config = cs.sample_configuration() + config._populate_values() + if config['classifier:passive_aggresive:n_iter'] is not None: + config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls = ParamSklearnClassifier(config, random_state=1) - print config + print(config) try: cls.fit(X_train, Y_train) X_test_ = X_test.copy() @@ -175,65 +172,55 @@ def test_configurations_signed_data(self): predicted_probabiliets = cls.predict_proba(X_test_) self.assertIsInstance(predicted_probabiliets, np.ndarray) except ValueError as e: - # if "Floating-point under-/overflow occurred at epoch" in \ - # e.message or \ - if "removed all features" in e.message or \ - "all features are discarded" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e - # except LinAlgError as e: - # if "not positive definite, even with jitter" in e.message: - # continue - # else: - # print config - # print traceback.format_exc() - # raise e - #except AttributeError as e: - # # Some error in QDA - # if "log" == e.message: - # print config - # print traceback.format_exc() - # raise e - # continue - # else: - # print config - # print traceback.format_exc() - # raise e except RuntimeWarning as e: - if "invalid value encountered in sqrt" in e.message: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: continue - elif "divide by zero encountered in divide" in e.message: + elif "invalid value encountered in divide" in e.args[0]: continue - elif "invalid value encountered in divide" in e.message: + elif "invalid value encountered in true_divide" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e except UserWarning as e: - if "FastICA did not converge" in e.message: + if "FastICA did not converge" in e.args[0]: continue else: - print config - print traceback.format_exc() + print(config) + print(traceback.format_exc()) raise e except MemoryError as e: continue def test_configurations_sparse(self): # Use a limit of ~4GiB - limit = 4000 * 1024 * 2014 + limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - print cs + print(cs) for i in range(10): config = cs.sample_configuration() - print config + config._populate_values() + if config['classifier:passive_aggresive:n_iter'] is not None: + config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + print(config) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) cls = ParamSklearnClassifier(config, random_state=1) @@ -241,54 +228,51 @@ def test_configurations_sparse(self): cls.fit(X_train, Y_train) predictions = cls.predict(X_test) except ValueError as e: - # if "Floating-point under-/overflow occurred at epoch" in \ - # e.message or \ - if "removed all features" in e.message or \ - "all features are discarded" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: continue else: - print config + print(config) traceback.print_tb(sys.exc_info()[2]) raise e - # except LinAlgError as e: - # if "not positive definite, even with jitter" in e.message: - # continue - # else: - # print config - # raise e - # except AttributeError as e: - # # Some error in QDA - # if "log" == e.message: - # continue - # else: - # print config - # raise e except RuntimeWarning as e: - if "invalid value encountered in sqrt" in e.message: + if "invalid value encountered in sqrt" in e.args[0]: continue - elif "divide by zero encountered in divide" in e.message: + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: continue else: - print config + print(config) raise e except UserWarning as e: - if "FastICA did not converge" in e.message: + if "FastICA did not converge" in e.args[0]: continue else: - print config + print(config) raise e def test_configurations_categorical_data(self): # Use a limit of ~4GiB - limit = 4000 * 1024 * 2014 + limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) cs = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'sparse': True}) - print cs + print(cs) for i in range(10): config = cs.sample_configuration() - print config + config._populate_values() + if config['classifier:passive_aggresive:n_iter'] is not None: + config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + print(config) categorical = [True, True, True, False, False, True, True, True, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, @@ -308,41 +292,32 @@ def test_configurations_categorical_data(self): init_params={'one_hot_encoding:categorical_features': categorical}) predictions = cls.predict(X_test) except ValueError as e: - # if "Floating-point under-/overflow occurred at epoch" in \ - # e.message or \ - if "removed all features" in e.message or \ - "all features are discarded" in e.message: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: continue else: - print config + print(config) traceback.print_tb(sys.exc_info()[2]) raise e - # except LinAlgError as e: - # if "not positive definite, even with jitter" in e.message: - # continue - # else: - # print config - # raise e - # except AttributeError as e: - # # Some error in QDA - # if "log" == e.message: - # continue - # else: - # print config - # raise e except RuntimeWarning as e: - if "invalid value encountered in sqrt" in e.message: + if "invalid value encountered in sqrt" in e.args[0]: continue - elif "divide by zero encountered in divide" in e.message: + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: continue else: - print config + print(config) raise e except UserWarning as e: - if "FastICA did not converge" in e.message: + if "FastICA did not converge" in e.args[0]: continue else: - print config + print(config) raise e def test_get_hyperparameter_search_space(self): @@ -466,15 +441,6 @@ def test_get_hyperparameter_search_space_dataset_properties(self): dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) - # We now have a preprocessing method that handles this case - #self.assertRaisesRegexp(ValueError, - # "No classifier to build a configuration space " - # "for...", ParamSklearnClassifier. - # get_hyperparameter_search_space, - # dataset_properties={'multilabel': True, - # 'multiclass': True, - # 'sparse': True}) - def test_predict_batched(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space() default = cs.get_default_configuration() diff --git a/tests/test_textclassification.py b/tests/test_textclassification.py index 0ff4dc72bb..f613bfbde0 100644 --- a/tests/test_textclassification.py +++ b/tests/test_textclassification.py @@ -1,25 +1,25 @@ -import unittest - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace - -from ParamSklearn.textclassification import ParamSklearnTextClassifier - - -class TextClassificationTest(unittest.TestCase): - @unittest.skip("Not properly implemented yet!") - def test_get_hyperparameter_search_space(self): - cs = ParamSklearnTextClassifier.get_hyperparameter_search_space() - self.assertIsInstance(cs, ConfigurationSpace) - conditions = cs.get_conditions() - hyperparameters = cs.get_hyperparameters() - self.assertEqual(135, len(hyperparameters)) - # The four parameters which are always active are classifier, - # preprocessor and imputation strategy - self.assertEqual(len(hyperparameters) - 3, len(conditions)) - self.assertNotIn("rescaling", cs.get_hyperparameter( - "preprocessor").choices) - self.assertRaisesRegexp(KeyError, "Hyperparameter " - "'rescaling:strategy' does not " - "exist in this configuration " - "space.", cs.get_hyperparameter, - "rescaling:strategy") +# import unittest +# +# from HPOlibConfigSpace.configuration_space import ConfigurationSpace +# +# from ParamSklearn.textclassification import ParamSklearnTextClassifier +# +# +# class TextClassificationTest(unittest.TestCase): +# @unittest.skip("Not properly implemented yet!") +# def test_get_hyperparameter_search_space(self): +# cs = ParamSklearnTextClassifier.get_hyperparameter_search_space() +# self.assertIsInstance(cs, ConfigurationSpace) +# conditions = cs.get_conditions() +# hyperparameters = cs.get_hyperparameters() +# self.assertEqual(135, len(hyperparameters)) +# # The four parameters which are always active are classifier, +# # preprocessor and imputation strategy +# self.assertEqual(len(hyperparameters) - 3, len(conditions)) +# self.assertNotIn("rescaling", cs.get_hyperparameter( +# "preprocessor").choices) +# self.assertRaisesRegexp(KeyError, "Hyperparameter " +# "'rescaling:strategy' does not " +# "exist in this configuration " +# "space.", cs.get_hyperparameter, +# "rescaling:strategy") From c4cf385fbe892fd46fb6cdd28e0685154d3b3a50 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 1 Oct 2015 14:25:02 +0200 Subject: [PATCH 300/352] Version number --- ParamSklearn/__init__.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ParamSklearn/__init__.py b/ParamSklearn/__init__.py index c7f439060a..dbdf47cf91 100644 --- a/ParamSklearn/__init__.py +++ b/ParamSklearn/__init__.py @@ -5,4 +5,4 @@ scikit-learn models. This configuration space can be searched by one of the hyperparameter optimization algorithms in HPOlib.""" -__version__ = "0.16.1dev" \ No newline at end of file +__version__ = "0.16.1.0" \ No newline at end of file diff --git a/setup.py b/setup.py index 1ac2249a58..5f78f6cc49 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setuptools.setup(name="ParamSklearn", description="Scikit-Learn wrapper for automatic " "hyperparameter configuration.", - version="0.1dev", + version="0.16.1.0", packages=setuptools.find_packages(), install_requires=["numpy>=1.9.0", "scipy>=0.14.0", From c471bd6df5fd42b79937c5598dce9c173776e7e7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 5 Oct 2015 17:32:57 +0200 Subject: [PATCH 301/352] Fix typo in component name --- .../{passive_aggresive.py => passive_aggressive.py} | 0 tests/components/classification/test_passive_aggressive.py | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) rename ParamSklearn/components/classification/{passive_aggresive.py => passive_aggressive.py} (100%) diff --git a/ParamSklearn/components/classification/passive_aggresive.py b/ParamSklearn/components/classification/passive_aggressive.py similarity index 100% rename from ParamSklearn/components/classification/passive_aggresive.py rename to ParamSklearn/components/classification/passive_aggressive.py diff --git a/tests/components/classification/test_passive_aggressive.py b/tests/components/classification/test_passive_aggressive.py index a1809f331d..e376abe4bf 100644 --- a/tests/components/classification/test_passive_aggressive.py +++ b/tests/components/classification/test_passive_aggressive.py @@ -1,6 +1,7 @@ import unittest -from ParamSklearn.components.classification.passive_aggresive import PassiveAggressive +from ParamSklearn.components.classification.passive_aggressive import \ + PassiveAggressive from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics From ab758ee0b8481661a9e1fe3cbaab9e68aca399cf Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Mon, 5 Oct 2015 19:27:56 +0200 Subject: [PATCH 302/352] Fix unittests --- .../components/regression/gaussian_process.py | 7 ++++--- ParamSklearn/components/regression/libsvm_svr.py | 3 +-- ParamSklearn/implementations/Imputation.py | 3 ++- .../components/regression/test_gaussian_process.py | 7 ++++--- tests/test_classification.py | 13 +++++++------ 5 files changed, 18 insertions(+), 15 deletions(-) diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index 5d37d063a7..5088a7ca36 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -12,13 +12,12 @@ class GaussianProcess(ParamSklearnRegressionAlgorithm): def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, - tol=0.001, optimizer='fmin_cobyla', random_state=None): + random_state=None): self.nugget = float(nugget) self.thetaL = float(thetaL) self.thetaU = float(thetaU) self.normalize = normalize self.copy_X = copy_X - self.optimizer = optimizer # We ignore it self.random_state = random_state self.estimator = None @@ -31,7 +30,9 @@ def fit(self, X, Y): theta0=np.ones(X.shape[1]) * 1e-1, thetaL=np.ones(X.shape[1]) * self.thetaL, thetaU=np.ones(X.shape[1]) * self.thetaU, - nugget=self.nugget) + nugget=self.nugget, + optimizer='Welch', + random_state=self.random_state) self.scaler = sklearn.preprocessing.StandardScaler(copy=True) self.scaler.fit(Y) Y_scaled = self.scaler.transform(Y) diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index e6faab55d3..a860fa3e3d 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -4,8 +4,7 @@ import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.conditions import EqualsCondition, OrConjunction, \ - InCondition +from HPOlibConfigSpace.conditions import InCondition from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter diff --git a/ParamSklearn/implementations/Imputation.py b/ParamSklearn/implementations/Imputation.py index 09d2a7e0e9..8f37e02da9 100644 --- a/ParamSklearn/implementations/Imputation.py +++ b/ParamSklearn/implementations/Imputation.py @@ -164,7 +164,8 @@ def fit(self, X, y=None): self.missing_values, self.axis) else: - X = check_array(X, dtype=self.dtype, force_all_finite=False) + X = check_array(X, dtype=type(self.dtype), + force_all_finite=False) self.statistics_ = self._dense_fit(X, self.strategy, self.missing_values, diff --git a/tests/components/regression/test_gaussian_process.py b/tests/components/regression/test_gaussian_process.py index af8093801a..a909ee1b14 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/tests/components/regression/test_gaussian_process.py @@ -10,8 +10,9 @@ class GaussianProcessComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): # Float32 leads to numeric instabilities - predictions, targets = _test_regressor(GaussianProcess, dataset='diabetes') - self.assertAlmostEqual(0.2331, + predictions, targets = _test_regressor(GaussianProcess, + dataset='boston') + self.assertAlmostEqual(0.83362335184173442, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions), - places=3) + places=2) diff --git a/tests/test_classification.py b/tests/test_classification.py index edf26aa32a..8d9c1a4841 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -156,8 +156,8 @@ def test_configurations_signed_data(self): for i in range(10): config = cs.sample_configuration() config._populate_values() - if config['classifier:passive_aggresive:n_iter'] is not None: - config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 if config['classifier:sgd:n_iter'] is not None: config._values['classifier:sgd:n_iter'] = 5 @@ -215,8 +215,8 @@ def test_configurations_sparse(self): for i in range(10): config = cs.sample_configuration() config._populate_values() - if config['classifier:passive_aggresive:n_iter'] is not None: - config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 if config['classifier:sgd:n_iter'] is not None: config._values['classifier:sgd:n_iter'] = 5 @@ -267,8 +267,8 @@ def test_configurations_categorical_data(self): for i in range(10): config = cs.sample_configuration() config._populate_values() - if config['classifier:passive_aggresive:n_iter'] is not None: - config._values['classifier:passive_aggresive:n_iter'] = 5 + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 if config['classifier:sgd:n_iter'] is not None: config._values['classifier:sgd:n_iter'] = 5 @@ -419,6 +419,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): 'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) + @unittest.skip("Wait until HPOlibConfigSpace is fixed.") def test_get_hyperparameter_search_space_dataset_properties(self): cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) From a0ff41fc4150f60a429d2dbb8fc99702ac962f22 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 6 Oct 2015 13:41:59 +0200 Subject: [PATCH 303/352] Fix unittests; fix bugs --- ParamSklearn/base.py | 19 +++++- ParamSklearn/classification.py | 8 ++- .../feature_preprocessing/__init__.py | 28 +++++--- .../feature_preprocessing/polynomial.py | 6 +- .../select_percentile_regression.py | 2 +- .../components/regression/liblinear_svr.py | 10 ++- ParamSklearn/regression.py | 10 ++- .../feature_preprocessing/test_choice.py | 34 ++++++++++ tests/test_base.py | 11 ++-- tests/test_classification.py | 9 +-- tests/test_regression.py | 64 ++++++++++++++++++- 11 files changed, 169 insertions(+), 32 deletions(-) create mode 100644 tests/components/feature_preprocessing/test_choice.py diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 8b2380bcd3..0ac5e66392 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -257,9 +257,22 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, include, pipeline): if include is None: include = {} + + keys = [pair[0] for pair in pipeline] + for key in include: + if key not in keys: + raise ValueError('Invalid key in include: %s; should be one ' + 'of %s' % (key, keys)) + if exclude is None: exclude = {} + keys = [pair[0] for pair in pipeline] + for key in exclude: + if key not in keys: + raise ValueError('Invalid key in exclude: %s; should be one ' + 'of %s' % (key, keys)) + if 'sparse' not in dataset_properties: # This dataset is probaby dense dataset_properties['sparse'] = False @@ -310,8 +323,10 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, return cs - @staticmethod - def _get_pipeline(): + @classmethod + def _get_pipeline(cls): + if cls == ParamSklearnBaseEstimator: + return [] raise NotImplementedError() def _get_estimator_hyperparameter_name(self): diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index c912dc1477..8df3cf01ef 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -153,6 +153,10 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() + if not 'target_type' in dataset_properties: + dataset_properties['target_type'] = 'classification' + if dataset_properties['target_type'] != 'classification': + dataset_properties['target_type'] = 'classification' pipeline = cls._get_pipeline() cs = cls._get_hyperparameter_search_space(cs, dataset_properties, @@ -224,8 +228,8 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, return cs - @staticmethod - def _get_pipeline(): + @classmethod + def _get_pipeline(cls): steps = [] # Add the always active preprocessing components diff --git a/ParamSklearn/components/feature_preprocessing/__init__.py b/ParamSklearn/components/feature_preprocessing/__init__.py index 9c7160d2d9..707d27767c 100644 --- a/ParamSklearn/components/feature_preprocessing/__init__.py +++ b/ParamSklearn/components/feature_preprocessing/__init__.py @@ -50,6 +50,8 @@ def get_available_components(cls, data_prop, available_comp = cls.get_components() + # TODO check for task type classification and/or regression! + components_dict = OrderedDict() for name in available_comp: if include is not None and name not in include: @@ -63,14 +65,24 @@ def get_available_components(cls, data_prop, if entry == FeaturePreprocessorChoice or hasattr(entry, 'get_components'): continue - if entry.get_properties()['handles_classification'] is False: - continue - if data_prop.get('multiclass') is True and entry.get_properties()[ - 'handles_multiclass'] is False: - continue - if data_prop.get('multilabel') is True and available_comp[name]. \ - get_properties()['handles_multilabel'] is False: - continue + target_type = data_prop['target_type'] + if target_type == 'classification': + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and \ + entry.get_properties()['handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and \ + entry.get_properties()['handles_multilabel'] is False: + continue + + elif target_type == 'regression': + if entry.get_properties()['handles_regression'] is False: + continue + + else: + raise ValueError('Unknown target type %s' % target_type) + components_dict[name] = entry return components_dict diff --git a/ParamSklearn/components/feature_preprocessing/polynomial.py b/ParamSklearn/components/feature_preprocessing/polynomial.py index c9d12a0477..08aa0cf7a2 100644 --- a/ParamSklearn/components/feature_preprocessing/polynomial.py +++ b/ParamSklearn/components/feature_preprocessing/polynomial.py @@ -39,11 +39,11 @@ def get_properties(dataset_properties=None): 'prefers_data_scaled': True, # Find out if this is good because of sparsity 'prefers_data_normalized': False, - 'handles_regression': False, + 'handles_regression': True, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': False, + 'handles_multilabel': True, + 'is_deterministic': True, # TODO find out of this is right! # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use 'handles_sparse': True, diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py index 045428837c..a93381594f 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py @@ -43,7 +43,7 @@ def get_properties(dataset_properties=None): 'handles_sparse': False, 'handles_dense': True, 'input': (DENSE, UNSIGNED_DATA), - 'output': (DENSE,), + 'output': (INPUT,), 'preferred_dtype': None} @staticmethod diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py index fefb053085..713199eb7b 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -3,6 +3,8 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.constants import * @@ -74,7 +76,7 @@ def get_hyperparameter_search_space(dataset_properties=None): "C", 0.03125, 32768, log=True, default=1.0)) loss = cs.add_hyperparameter(CategoricalHyperparameter( "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"], - default="epsilon_insensitive")) + default="squared_epsilon_insensitive")) # Random Guess epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( name="epsilon", lower=0.001, upper=1, default=0.1, log=True)) @@ -86,4 +88,10 @@ def get_hyperparameter_search_space(dataset_properties=None): intercept_scaling = cs.add_hyperparameter(Constant( "intercept_scaling", 1)) + dual_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(loss, "epsilon_insensitive") + ) + cs.add_forbidden_clause(dual_and_loss) + return cs diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 94231167e0..2985d71cd1 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -126,6 +126,10 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, if dataset_properties is None or not isinstance(dataset_properties, dict): dataset_properties = dict() + if not 'target_type' in dataset_properties: + dataset_properties['target_type'] = 'regression' + if dataset_properties['target_type'] != 'regression': + dataset_properties['target_type'] = 'regression' if 'sparse' not in dataset_properties: # This dataset is probaby dense @@ -159,7 +163,7 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, # which would take too long # Combinations of tree-based models with feature learning: regressors_ = ["random_forest", "gradient_boosting", "gaussian_process"] - feature_learning_ = ["kitchen_sinks", "sparse_filtering"] + feature_learning_ = ["kitchen_sinks", "kernel_pca", "nystroem_sampler"] for r, f in product(regressors_, feature_learning_): if r not in regressors: @@ -181,8 +185,8 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, def _get_estimator_components(): return components.regression_components._regressors - @staticmethod - def _get_pipeline(): + @classmethod + def _get_pipeline(cls): steps = [] # Add the always active preprocessing components diff --git a/tests/components/feature_preprocessing/test_choice.py b/tests/components/feature_preprocessing/test_choice.py new file mode 100644 index 0000000000..dfe5a56b76 --- /dev/null +++ b/tests/components/feature_preprocessing/test_choice.py @@ -0,0 +1,34 @@ +from __future__ import print_function + +import unittest + +import ParamSklearn.components.feature_preprocessing as fp + + +class FeatureProcessingTest(unittest.TestCase): + def test_get_available_components(self): + # Target type + for target_type, num_values in [('classification', 16), + ('regression', 12)]: + data_properties = {'target_type': target_type} + + available_components = fp.FeaturePreprocessorChoice\ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), num_values) + + # Multiclass + data_properties = {'target_type': 'classification', + 'multiclass': True} + available_components = fp.FeaturePreprocessorChoice \ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), 16) + + # Multilabel + data_properties = {'target_type': 'classification', + 'multilabel': True} + available_components = fp.FeaturePreprocessorChoice \ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), 12) diff --git a/tests/test_base.py b/tests/test_base.py index 60660a6bed..664b36cc01 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -11,7 +11,7 @@ def test_get_hyperparameter_configuration_space_3choices(self): base = ParamSklearn.base.ParamSklearnBaseEstimator cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() - dataset_properties = {} + dataset_properties = {'target_type': 'classification'} exclude = {} include = {} pipeline = [('p0', ParamSklearn.components.feature_preprocessing._preprocessors[ @@ -31,7 +31,7 @@ def test_get_hyperparameter_configuration_space_3choices(self): self.assertEqual(151, len(cs.forbidden_clauses)) cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() - dataset_properties = {'signed': True} + dataset_properties = {'target_type': 'classification', 'signed': True} include = {'c': ['multinomial_nb']} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) @@ -47,7 +47,7 @@ def test_get_hyperparameter_configuration_space_3choices(self): cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() - dataset_properties = {'signed': True} + dataset_properties = {'target_type': 'classification', 'signed': True} include = {} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) @@ -61,7 +61,7 @@ def test_get_hyperparameter_configuration_space_3choices(self): cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() - dataset_properties = {'sparse': True} + dataset_properties = {'target_type': 'classification', 'sparse': True} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), @@ -73,7 +73,8 @@ def test_get_hyperparameter_configuration_space_3choices(self): # print(clause) cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() - dataset_properties = {'sparse': True, 'signed': True} + dataset_properties = {'target_type': 'classification', + 'sparse': True, 'signed': True} cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) diff --git a/tests/test_classification.py b/tests/test_classification.py index 8d9c1a4841..bea4a7feec 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -84,7 +84,6 @@ def test_configurations(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space() print(cs) @@ -93,11 +92,9 @@ def test_configurations(self): for i in range(10): config = cs.sample_configuration() config._populate_values() - if 'classifier:passive_aggresive:n_iter' in config and \ - config['classifier:passive_aggresive:n_iter'] is not None: - config._values['classifier:passive_aggresive:n_iter'] = 5 - if 'classifier:sgd:n_iter' in config and \ - config['classifier:sgd:n_iter'] is not None: + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: config._values['classifier:sgd:n_iter'] = 5 X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') diff --git a/tests/test_regression.py b/tests/test_regression.py index 98ef1968f1..0d7701144e 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -1,9 +1,12 @@ __author__ = 'eggenspk' import copy +import resource +import traceback import unittest import mock +import numpy as np import sklearn.datasets import sklearn.decomposition import sklearn.ensemble @@ -67,6 +70,65 @@ def test_find_preprocessors(self): self.assertIn(ParamSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) + def test_configurations(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = ParamSklearnRegressor.get_hyperparameter_search_space() + + print(cs) + cs.seed(1) + + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['regressor:sgd:n_iter'] is not None: + config._values['regressor:sgd:n_iter'] = 5 + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') + cls = ParamSklearnRegressor(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + def test_default_configuration(self): for i in range(2): cs = ParamSklearnRegressor.get_hyperparameter_search_space() @@ -86,7 +148,7 @@ def test_get_hyperparameter_search_space(self): self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() - self.assertEqual(135, len(hyperparameters)) + self.assertEqual(114, len(hyperparameters)) self.assertEqual(len(hyperparameters) - 5, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): From b119e917ddfaa9093b0a9a51835d36335318b52d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 6 Oct 2015 14:45:10 +0200 Subject: [PATCH 304/352] Update requirements in setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5f78f6cc49..6a2b11eef4 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ "hyperparameter configuration.", version="0.16.1.0", packages=setuptools.find_packages(), - install_requires=["numpy>=1.9.0", + install_requires=["numpy>=1.6.0", "scipy>=0.14.0", "scikit-learn==0.16.1", "nose", From fe482671060d5886297a7615c7c86d5fce3391ae Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 6 Oct 2015 21:40:58 +0200 Subject: [PATCH 305/352] Remove Ridge Regression Classifier --- .../components/classification/ridge.py | 80 ------------------- source/first_steps.rst | 2 +- tests/components/classification/test_ridge.py | 23 ------ .../data_preprocessing/test_balancing.py | 13 +-- tests/test_base.py | 2 +- tests/test_classification.py | 4 +- 6 files changed, 5 insertions(+), 119 deletions(-) delete mode 100644 ParamSklearn/components/classification/ridge.py delete mode 100644 tests/components/classification/test_ridge.py diff --git a/ParamSklearn/components/classification/ridge.py b/ParamSklearn/components/classification/ridge.py deleted file mode 100644 index 73b0865544..0000000000 --- a/ParamSklearn/components/classification/ridge.py +++ /dev/null @@ -1,80 +0,0 @@ -from sklearn.linear_model.ridge import RidgeClassifier - -from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ - CategoricalHyperparameter, UnParametrizedHyperparameter, \ - UniformIntegerHyperparameter -from HPOlibConfigSpace.conditions import EqualsCondition - -from ParamSklearn.components.base import \ - ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax - - -class Ridge(ParamSklearnClassificationAlgorithm): - def __init__(self, alpha, fit_intercept, tol, class_weight=None, - random_state=None): - self.alpha = float(alpha) - self.fit_intercept = fit_intercept == 'True' - self.tol = float(tol) - self.class_weight = class_weight - self.random_state = random_state - self.estimator = None - - def fit(self, X, Y): - self.estimator = RidgeClassifier(alpha=self.alpha, - fit_intercept=self.fit_intercept, - tol=self.tol, - class_weight=self.class_weight, - copy_X=False, - normalize=False, - solver='auto') - self.estimator.fit(X, Y) - return self - - def predict(self, X): - if self.estimator is None: - raise NotImplementedError() - return self.estimator.predict(X) - - def predict_proba(self, X): - if self.estimator is None: - raise NotImplementedError() - - df = self.estimator.decision_function(X) - return softmax(df) - - @staticmethod - def get_properties(dataset_properties=None): - return {'shortname': 'Rigde', - 'name': 'Rigde Classifier', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': True, - 'prefers_data_normalized': True, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': False, - 'is_deterministic': True, - 'handles_sparse': True, - 'input': (DENSE, SPARSE, UNSIGNED_DATA), - 'output': (PREDICTIONS,), - # TODO find out what is best used here! - 'preferred_dtype': None} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - cs = ConfigurationSpace() - alpha = cs.add_hyperparameter(UniformFloatHyperparameter( - "alpha", 10 ** -5, 10., log=True, default=1.)) - fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( - "fit_intercept", "True")) - tol = cs.add_hyperparameter(UniformFloatHyperparameter( - "tol", 1e-5, 1e-1, default=1e-4, log=True)) - return cs - - def __str__(self): - return "ParamSklearn Ridge Classifier" diff --git a/source/first_steps.rst b/source/first_steps.rst index b07014e8f5..245f4332a2 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -23,4 +23,4 @@ configuration on the iris dataset. >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.90000000000000002 + 0.92000000000000004 diff --git a/tests/components/classification/test_ridge.py b/tests/components/classification/test_ridge.py deleted file mode 100644 index 0b5892c98b..0000000000 --- a/tests/components/classification/test_ridge.py +++ /dev/null @@ -1,23 +0,0 @@ -import unittest - -from ParamSklearn.components.classification.ridge import Ridge -from ParamSklearn.util import _test_classifier - -import sklearn.metrics - - -class SGDComponentTest(unittest.TestCase): - def test_default_configuration(self): - for i in range(10): - predictions, targets = _test_classifier(Ridge, dataset='iris') - self.assertAlmostEqual(0.88, - sklearn.metrics.accuracy_score(predictions, - targets)) - - def test_default_configuration_digits(self): - for i in range(10): - predictions, targets = \ - _test_classifier(classifier=Ridge, dataset='digits') - self.assertAlmostEqual(0.87553126897389189, - sklearn.metrics.accuracy_score(predictions, - targets)) \ No newline at end of file diff --git a/tests/components/data_preprocessing/test_balancing.py b/tests/components/data_preprocessing/test_balancing.py index c10c00de1b..f4cea80ad4 100644 --- a/tests/components/data_preprocessing/test_balancing.py +++ b/tests/components/data_preprocessing/test_balancing.py @@ -15,7 +15,6 @@ from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC from ParamSklearn.components.classification.sgd import SGD -from ParamSklearn.components.classification.ridge import Ridge from ParamSklearn.components.feature_preprocessing\ .extra_trees_preproc_for_classification import ExtraTreesPreprocessor from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor @@ -61,14 +60,6 @@ def test_balancing_get_weights_svm_sgd(self): self.assertEqual(("preprocessor:class_weight", "auto"), list(init_params.items())[0]) - def test_balancing_get_weights_ridge(self): - Y = np.array([0] * 80 + [1] * 20) - balancing = Balancing(strategy='weighting') - init_params, fit_params = balancing.get_weights( - Y, 'ridge', None, None, None) - self.assertAlmostEqual(0.4, init_params['classifier:class_weight'][0]) - self.assertAlmostEqual(1.6, init_params['classifier:class_weight'][1]) - def test_weighting_effect(self): for name, clf, acc_no_weighting, acc_weighting in \ [('adaboost', AdaboostClassifier, 0.692, 0.719), @@ -78,9 +69,7 @@ def test_weighting_effect(self): ('random_forest', RandomForest, 0.886, 0.885), ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), - ('sgd', SGD, 0.811, 0.902), - ('ridge', Ridge, 0.89071038251366119, - 0.91013964784456591)]: + ('sgd', SGD, 0.811, 0.902)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: # Fit diff --git a/tests/test_base.py b/tests/test_base.py index 664b36cc01..6b5cb0a2c1 100644 --- a/tests/test_base.py +++ b/tests/test_base.py @@ -56,7 +56,7 @@ def test_get_hyperparameter_configuration_space_3choices(self): self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) self.assertEqual(len(cs.get_hyperparameter("c:__choice__").choices), - 17) + 16) self.assertEqual(126, len(cs.forbidden_clauses)) diff --git a/tests/test_classification.py b/tests/test_classification.py index bea4a7feec..4c4ed7d844 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -325,12 +325,12 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(len(cs.get_hyperparameter( 'rescaling:__choice__').choices), 4) self.assertEqual(len(cs.get_hyperparameter( - 'classifier:__choice__').choices), 17) + 'classifier:__choice__').choices), 16) self.assertEqual(len(cs.get_hyperparameter( 'preprocessor:__choice__').choices), 14) hyperparameters = cs.get_hyperparameters() - self.assertEqual(148, len(hyperparameters)) + self.assertEqual(145, len(hyperparameters)) #for hp in sorted([str(h) for h in hyperparameters]): # print hp From 4da44ecaf0823e28634dcb993dd3f600243630ed Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 7 Oct 2015 10:26:02 +0200 Subject: [PATCH 306/352] Fix SVC predict_proba to return correct shape --- .../components/classification/libsvm_svc.py | 50 ++++++++++++++++++- ParamSklearn/util.py | 17 ++++--- .../classification/test_libsvm_svc.py | 13 ++++- 3 files changed, 71 insertions(+), 9 deletions(-) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index b0b8aa268d..a3cfdb8930 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -1,5 +1,6 @@ import resource +import numpy as np import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -13,6 +14,51 @@ from ParamSklearn.implementations.util import softmax +# From the scikit-learn master branch. Will hopefully be there in sklearn 0.17 +def _ovr_decision_function(predictions, confidences, n_classes): + """Compute a continuous, tie-breaking ovr decision function. + It is important to include a continuous value, not only votes, + to make computing AUC or calibration meaningful. + Parameters + ---------- + predictions : array-like, shape (n_samples, n_classifiers) + Predicted classes for each binary classifier. + confidences : array-like, shape (n_samples, n_classifiers) + Decision functions or predicted probabilities for positive class + for each binary classifier. + n_classes : int + Number of classes. n_classifiers must be + ``n_classes * (n_classes - 1 ) / 2`` + """ + n_samples = predictions.shape[0] + votes = np.zeros((n_samples, n_classes)) + sum_of_confidences = np.zeros((n_samples, n_classes)) + + k = 0 + for i in range(n_classes): + for j in range(i + 1, n_classes): + sum_of_confidences[:, i] -= confidences[:, k] + sum_of_confidences[:, j] += confidences[:, k] + votes[predictions[:, k] == 0, i] += 1 + votes[predictions[:, k] == 1, j] += 1 + k += 1 + + max_confidences = sum_of_confidences.max() + min_confidences = sum_of_confidences.min() + + if max_confidences == min_confidences: + return votes + + # Scale the sum_of_confidences to (-0.5, 0.5) and add it with votes. + # The motivation is to use confidence levels as a way to break ties in + # the votes without switching any decision made based on a difference + # of 1 vote. + eps = np.finfo(sum_of_confidences.dtype).eps + max_abs_confidence = max(abs(max_confidences), abs(min_confidences)) + scale = (0.5 - eps) / max_abs_confidence + return votes + sum_of_confidences * scale + + class LibSVM_SVC(ParamSklearnClassificationAlgorithm): def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, class_weight=None, degree=3, coef0=0, random_state=None): @@ -85,7 +131,9 @@ def predict_proba(self, X): raise NotImplementedError() # return self.estimator.predict_proba(X) decision = self.estimator.decision_function(X) - return softmax(decision) + ovr_decision = _ovr_decision_function(decision < 0, decision, + len(self.estimator.classes_)) + return softmax(ovr_decision) @staticmethod diff --git a/ParamSklearn/util.py b/ParamSklearn/util.py index 9ac9f15d50..fcc3e01ce9 100644 --- a/ParamSklearn/util.py +++ b/ParamSklearn/util.py @@ -44,13 +44,14 @@ def find_sklearn_classes(class_): print(classifier) -def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): +def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False, + train_size_maximum=150): iris = getattr(sklearn.datasets, "load_%s" % dataset)() X = iris.data.astype(np.float32) Y = iris.target rs = np.random.RandomState(42) indices = np.arange(X.shape[0]) - train_size = min(int(len(indices) / 3. * 2.), 150) + train_size = min(int(len(indices) / 3. * 2.), train_size_maximum) rs.shuffle(indices) X = X[indices] Y = Y[indices] @@ -76,9 +77,11 @@ def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False): return X_train, Y_train, X_test, Y_test -def _test_classifier(classifier, dataset='iris', sparse=False): +def _test_classifier(classifier, dataset='iris', sparse=False, + train_size_maximum=150): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, - make_sparse=sparse) + make_sparse=sparse, + train_size_maximum=train_size_maximum) configuration_space = classifier.get_hyperparameter_search_space( dataset_properties={'sparse': sparse}) default = configuration_space.get_default_configuration() @@ -105,9 +108,11 @@ def _test_classifier_iterative_fit(classifier, dataset='iris', sparse=False): return predictions, Y_test -def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False): +def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False, + train_size_maximum=150): X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, - make_sparse=sparse) + make_sparse=sparse, + train_size_maximum=train_size_maximum) configuration_space = classifier.get_hyperparameter_search_space() default = configuration_space.get_default_configuration() classifier = classifier(random_state=1, diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index 247ab2166d..9900f750a4 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -3,6 +3,7 @@ from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba +import numpy as np import sklearn.metrics @@ -16,7 +17,15 @@ def test_default_configuration(self): def test_default_configuration_predict_proba(self): for i in range(10): predictions, targets = _test_classifier_predict_proba( - LibSVM_SVC, sparse=True) - self.assertAlmostEqual(1.3028778322629093, + LibSVM_SVC, sparse=True, dataset='digits', + train_size_maximum=500) + self.assertAlmostEqual(4.6680593525563063, sklearn.metrics.log_loss(targets, predictions)) + + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + LibSVM_SVC, sparse=True, dataset='iris') + self.assertAlmostEqual(0.8649665185853217, + sklearn.metrics.log_loss(targets, + predictions)) From 869ab3bc5e31d983f2a7d4f475439e8d62dc048f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 7 Oct 2015 11:46:49 +0200 Subject: [PATCH 307/352] Change parameter in KernelPCA --- .../data_preprocessing/one_hot_encoding.py | 12 +++++++++++- .../components/feature_preprocessing/kernel_pca.py | 3 ++- .../data_preprocessing/test_one_hot_encoding.py | 2 +- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py index 80107ea6e7..cf594da1e4 100644 --- a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py +++ b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py @@ -1,3 +1,6 @@ +import numpy as np +import scipy.sparse + import ParamSklearn.implementations.OneHotEncoder from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -37,9 +40,16 @@ def fit(self, X, y=None): return self def transform(self, X): + is_sparse = scipy.sparse.issparse(X) if self.preprocessor is None: raise NotImplementedError() - return self.preprocessor.transform(X) + X = self.preprocessor.transform(X) + if is_sparse: + return X + elif isinstance(X, np.ndarray): + return X + else: + return X.toarray() @staticmethod def get_properties(dataset_properties=None): diff --git a/ParamSklearn/components/feature_preprocessing/kernel_pca.py b/ParamSklearn/components/feature_preprocessing/kernel_pca.py index 07589c2c12..bc2004cdc1 100644 --- a/ParamSklearn/components/feature_preprocessing/kernel_pca.py +++ b/ParamSklearn/components/feature_preprocessing/kernel_pca.py @@ -27,7 +27,8 @@ def __init__(self, n_components, kernel, degree=3, gamma=0.25, coef0=0.0, def fit(self, X, Y=None): self.preprocessor = sklearn.decomposition.KernelPCA( n_components=self.n_components, kernel=self.kernel, - degree=self.degree, gamma=self.gamma, coef0=self.coef0) + degree=self.degree, gamma=self.gamma, coef0=self.coef0, + remove_zero_eig=True) # Make the RuntimeWarning an Exception! if scipy.sparse.issparse(X): X = X.astype(np.float64) diff --git a/tests/components/data_preprocessing/test_one_hot_encoding.py b/tests/components/data_preprocessing/test_one_hot_encoding.py index 067258f6b3..371152926c 100644 --- a/tests/components/data_preprocessing/test_one_hot_encoding.py +++ b/tests/components/data_preprocessing/test_one_hot_encoding.py @@ -68,7 +68,7 @@ def test_default_configuration(self): transformations.append(Xt) if len(transformations) > 1: self.assertFalse( - (transformations[-1].todense() != transformations[-2].todense()).all()) + (transformations[-1] != transformations[-2]).all()) def test_default_configuration_no_encoding(self): transformations = [] From be173f72b699a91aaa69a793c8b10ed167d69d7d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 7 Oct 2015 13:58:27 +0200 Subject: [PATCH 308/352] Add more specific error message to FastICA --- .../components/feature_preprocessing/fast_ica.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/components/feature_preprocessing/fast_ica.py b/ParamSklearn/components/feature_preprocessing/fast_ica.py index 65b63d8cac..86b7538df3 100644 --- a/ParamSklearn/components/feature_preprocessing/fast_ica.py +++ b/ParamSklearn/components/feature_preprocessing/fast_ica.py @@ -29,7 +29,16 @@ def fit(self, X, Y=None): # Make the RuntimeWarning an Exception! with warnings.catch_warnings(): warnings.filterwarnings("error") - self.preprocessor.fit(X) + try: + self.preprocessor.fit(X) + except ValueError as e: + if e.message == 'array must not contain infs or NaNs': + raise ValueError("Bug in scikit-learn: https://github.com/scikit-learn/scikit-learn/pull/2738") + else: + import traceback + traceback.format_exc() + raise ValueError() + return self def transform(self, X): From 642e06edb003a4002c89565e1323d53898b1cdb8 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 7 Oct 2015 14:56:14 +0200 Subject: [PATCH 309/352] Fix: predict_proba for SVC in binary classification --- .../components/classification/libsvm_svc.py | 5 ++-- .../classification/test_libsvm_svc.py | 28 +++++++++++++++++-- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index a3cfdb8930..8d79c4dc9f 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -131,9 +131,10 @@ def predict_proba(self, X): raise NotImplementedError() # return self.estimator.predict_proba(X) decision = self.estimator.decision_function(X) - ovr_decision = _ovr_decision_function(decision < 0, decision, + if len(self.estimator.classes_) > 2: + decision = _ovr_decision_function(decision < 0, decision, len(self.estimator.classes_)) - return softmax(ovr_decision) + return softmax(decision) @staticmethod diff --git a/tests/components/classification/test_libsvm_svc.py b/tests/components/classification/test_libsvm_svc.py index 9900f750a4..61ff38c030 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/tests/components/classification/test_libsvm_svc.py @@ -1,7 +1,8 @@ import unittest from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC -from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba +from ParamSklearn.util import _test_classifier, \ + _test_classifier_predict_proba, get_dataset import numpy as np import sklearn.metrics @@ -26,6 +27,29 @@ def test_default_configuration_predict_proba(self): for i in range(10): predictions, targets = _test_classifier_predict_proba( LibSVM_SVC, sparse=True, dataset='iris') - self.assertAlmostEqual(0.8649665185853217, + self.assertAlmostEqual(0.8649665185853217, sklearn.metrics.log_loss(targets, predictions)) + + # 2 class + for i in range(10): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') + remove_training_data = Y_train == 2 + remove_test_data = Y_test == 2 + X_train = X_train[~remove_training_data] + Y_train = Y_train[~remove_training_data] + X_test = X_test[~remove_test_data] + Y_test = Y_test[~remove_test_data] + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = LibSVM_SVC.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + cls = LibSVM_SVC(random_state=1, **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + cls = cls.fit(X_train, Y_train) + prediction = cls.predict_proba(X_test) + self.assertAlmostEqual(sklearn.metrics.log_loss(Y_test, prediction), + 0.69323680119641773) From 28fad1186928ece85c71238f03849ab561a4c56d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 8 Oct 2015 10:57:50 +0200 Subject: [PATCH 310/352] Check for valid names in argument include --- ParamSklearn/components/classification/__init__.py | 6 ++++++ ParamSklearn/components/feature_preprocessing/__init__.py | 6 ++++++ ParamSklearn/components/regression/__init__.py | 6 ++++++ ParamSklearn/create_searchspace_util.py | 2 +- 4 files changed, 19 insertions(+), 1 deletion(-) diff --git a/ParamSklearn/components/classification/__init__.py b/ParamSklearn/components/classification/__init__.py index fe0bd04c59..a06428a008 100644 --- a/ParamSklearn/components/classification/__init__.py +++ b/ParamSklearn/components/classification/__init__.py @@ -51,6 +51,12 @@ def get_available_components(cls, data_prop, if include is not None and exclude is not None: raise ValueError("The argument include and exclude cannot be used together.") + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + for name in available_comp: if include is not None and name not in include: continue diff --git a/ParamSklearn/components/feature_preprocessing/__init__.py b/ParamSklearn/components/feature_preprocessing/__init__.py index 707d27767c..dd0144eac0 100644 --- a/ParamSklearn/components/feature_preprocessing/__init__.py +++ b/ParamSklearn/components/feature_preprocessing/__init__.py @@ -50,6 +50,12 @@ def get_available_components(cls, data_prop, available_comp = cls.get_components() + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + # TODO check for task type classification and/or regression! components_dict = OrderedDict() diff --git a/ParamSklearn/components/regression/__init__.py b/ParamSklearn/components/regression/__init__.py index f7b3415343..91199da85b 100644 --- a/ParamSklearn/components/regression/__init__.py +++ b/ParamSklearn/components/regression/__init__.py @@ -50,6 +50,12 @@ def get_available_components(cls, data_prop, raise ValueError( "The argument include and exclude cannot be used together.") + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + for name in available_comp: if include is not None and name not in include: continue diff --git a/ParamSklearn/create_searchspace_util.py b/ParamSklearn/create_searchspace_util.py index 4b6622ad98..374c29e11b 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/ParamSklearn/create_searchspace_util.py @@ -45,7 +45,7 @@ def get_match_array(pipeline, dataset_properties, matches = np.ones(matches_dimensions, dtype=int) pipeline_idxs = [range(dim) for dim in matches_dimensions] - for pipeline_instantiation_idxs in itertools.product(*pipeline_idxs): + for pipeline_instantiation_idxs in itertools.product(*pipeline_idxs): pipeline_instantiation = [node_i_choices[i][idx] for i, idx in enumerate(pipeline_instantiation_idxs)] From 81335738f2fd5d71d1e9aed1c609761599c7d10f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 8 Oct 2015 11:36:21 +0200 Subject: [PATCH 311/352] Change default if default is forbidden --- ParamSklearn/classification.py | 95 ++++++++++++++++++++++++---------- ParamSklearn/regression.py | 68 +++++++++++++++++------- tests/test_classification.py | 51 ++++-------------- tests/test_regression.py | 49 ++++++------------ 4 files changed, 145 insertions(+), 118 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 8df3cf01ef..691d539dc1 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -1,4 +1,4 @@ -from collections import OrderedDict +import copy from itertools import product import numpy as np @@ -169,19 +169,37 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, available_preprocessors = pipeline[-2][1].get_available_components( dataset_properties) - # A classifier which can handle sparse data after the densifier + possible_default_classifier = copy.copy(list( + available_classifiers.keys())) + default = cs.get_hyperparameter('classifier:__choice__').default + del possible_default_classifier[possible_default_classifier.index(default)] + + # A classifier which can handle sparse data after the densifier is + # forbidden for memory issues for key in classifiers: if SPARSE in available_classifiers[key].get_properties()['input']: if 'densifier' in preprocessors: - cs.add_forbidden_clause( - ForbiddenAndConjunction( - ForbiddenEqualsClause( - cs.get_hyperparameter( - 'classifier:__choice__'), key), - ForbiddenEqualsClause( - cs.get_hyperparameter( - 'preprocessor:__choice__'), 'densifier') - )) + while True: + try: + cs.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'classifier:__choice__'), key), + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') + )) + # Success + break + except ValueError: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError("Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default # which would take too long # Combinations of non-linear models with feature learning: @@ -196,14 +214,25 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, continue if f not in preprocessors: continue - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "classifier:__choice__"), c), - ForbiddenEqualsClause(cs.get_hyperparameter( - "preprocessor:__choice__"), f))) - except KeyError: - pass + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) + break + except KeyError: + break + except ValueError as e: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default # Won't work # Multinomial NB etc don't use with features learning, pca etc @@ -216,15 +245,25 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, continue if f not in preprocessors: continue - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "preprocessor:__choice__"), f), - ForbiddenEqualsClause(cs.get_hyperparameter( - "classifier:__choice__"), c))) - except KeyError: - pass - + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f), + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c))) + break + except KeyError: + break + except ValueError: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default return cs diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 2985d71cd1..21280d5115 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -1,4 +1,5 @@ from collections import OrderedDict +import copy from itertools import product import sklearn @@ -146,23 +147,43 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, available_preprocessors = pipeline[-2][1].get_available_components( dataset_properties) + possible_default_regressor = copy.copy(list( + available_regressors.keys())) + default = cs.get_hyperparameter('regressor:__choice__').default + del possible_default_regressor[ + possible_default_regressor.index(default)] + # A regressor which can handle sparse data after the densifier for key in regressors: if SPARSE in available_regressors[key].get_properties(dataset_properties=None)['input']: if 'densifier' in preprocessors: - cs.add_forbidden_clause( - ForbiddenAndConjunction( - ForbiddenEqualsClause( - cs.get_hyperparameter( - 'regressor:__choice__'), key), - ForbiddenEqualsClause( - cs.get_hyperparameter( - 'preprocessor:__choice__'), 'densifier') - )) + while True: + try: + cs.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'regressor:__choice__'), key), + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') + )) + break + except ValueError: + # Change the default and try again + try: + default = possible_default_regressor.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'regressor:__choice__').default = default # which would take too long # Combinations of tree-based models with feature learning: - regressors_ = ["random_forest", "gradient_boosting", "gaussian_process"] + regressors_ = ["adaboost", "decision_tree", "extra_trees", + "gaussian_process", "gradient_boosting", + "k_nearest_neighbors", "random_forest"] feature_learning_ = ["kitchen_sinks", "kernel_pca", "nystroem_sampler"] for r, f in product(regressors_, feature_learning_): @@ -170,14 +191,25 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, continue if f not in preprocessors: continue - try: - cs.add_forbidden_clause(ForbiddenAndConjunction( - ForbiddenEqualsClause(cs.get_hyperparameter( - "regressor:__choice__"), r), - ForbiddenEqualsClause(cs.get_hyperparameter( - "preprocessor:__choice__"), f))) - except KeyError: - pass + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "regressor:__choice__"), r), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) + break + except KeyError: + break + except ValueError: + # Change the default and try again + try: + default = possible_default_regressor.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'regressor:__choice__').default = default return cs diff --git a/tests/test_classification.py b/tests/test_classification.py index 4c4ed7d844..fd1f36a379 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -360,29 +360,16 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): self.assertNotIn('select_percentile_classification', str(cs)) def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier(self): - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " balancing:strategy, Value: none\n" - " classifier:__choice__, Value: random_forest\n" - " classifier:random_forest:bootstrap, Value: True\n" - " classifier:random_forest:criterion, Value: gini\n" - " classifier:random_forest:max_depth, Constant: None\n" - " classifier:random_forest:max_features, Value: 1.0\n" - " classifier:random_forest:max_leaf_nodes, Constant: None\n" - " classifier:random_forest:min_samples_leaf, Value: 1\n" - " classifier:random_forest:min_samples_split, Value: 2\n" - " classifier:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" - " classifier:random_forest:n_estimators, Constant: 100\n" - " imputation:strategy, Value: mean\n" - " one_hot_encoding:minimum_fraction, Value: 0.01\n" - " one_hot_encoding:use_minimum_fraction, Value: True\n" - " preprocessor:__choice__, Value: nystroem_sampler\n" - " preprocessor:nystroem_sampler:gamma, Value: 0.1\n" - " preprocessor:nystroem_sampler:kernel, Value: rbf\n" - " preprocessor:nystroem_sampler:n_components, Value: 100\n" - " rescaling:__choice__, Value: min/max\n" - "violates forbidden clause \(Forbidden: classifier:__choice__ == random_forest && Forbidden: preprocessor:__choice__ == nystroem_sampler\)", - ParamSklearnClassifier.get_hyperparameter_search_space, + cs = ParamSklearnClassifier.get_hyperparameter_search_space( + include={'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, + 'qda') + + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include={'preprocessor': ['nystroem_sampler']}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, + 'sgd') def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(AssertionError, "No valid pipeline found.", @@ -393,24 +380,8 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): # It must also be catched that no classifiers which can handle sparse # data are located behind the densifier - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " balancing:strategy, Value: none\n" - " classifier:__choice__, Value: liblinear_svc\n" - " classifier:liblinear_svc:C, Value: 1.0\n" - " classifier:liblinear_svc:dual, Constant: False\n" - " classifier:liblinear_svc:fit_intercept, Constant: True\n" - " classifier:liblinear_svc:intercept_scaling, Constant: 1\n" - " classifier:liblinear_svc:loss, Value: squared_hinge\n" - " classifier:liblinear_svc:multi_class, Constant: ovr\n" - " classifier:liblinear_svc:penalty, Value: l2\n" - " classifier:liblinear_svc:tol, Value: 0.0001\n" - " imputation:strategy, Value: mean\n" - " one_hot_encoding:minimum_fraction, Value: 0.01\n" - " one_hot_encoding:use_minimum_fraction, Value: True\n" - " preprocessor:__choice__, Value: densifier\n" - " rescaling:__choice__, Value: min/max\n" - "violates forbidden clause \(Forbidden: classifier:__choice__ == liblinear_svc &&" - " Forbidden: preprocessor:__choice__ == densifier\)", + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration.", ParamSklearnClassifier.get_hyperparameter_search_space, include={'classifier': ['liblinear_svc'], 'preprocessor': ['densifier']}, diff --git a/tests/test_regression.py b/tests/test_regression.py index 0d7701144e..91316154c7 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -171,45 +171,30 @@ def test_get_hyperparameter_search_space_include_exclude_models(self): exclude={'preprocessor': ['no_preprocessing']}) self.assertNotIn('no_preprocessing', str(cs)) + def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier( + self): + cs = ParamSklearnRegressor.get_hyperparameter_search_space( + include={'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, + 'gradient_boosting') + + cs = ParamSklearnRegressor.get_hyperparameter_search_space( + include={'preprocessor': ['nystroem_sampler']}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, + 'sgd') + def test_get_hyperparameter_search_space_only_forbidden_combinations(self): - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " imputation:strategy, Value: mean\n" - " one_hot_encoding:minimum_fraction, Value: 0.01\n" - " one_hot_encoding:use_minimum_fraction, Value: True\n" - " preprocessor:__choice__, Value: kitchen_sinks\n" - " preprocessor:kitchen_sinks:gamma, Value: 1.0\n" - " preprocessor:kitchen_sinks:n_components, Value: 100\n" - " regressor:__choice__, Value: random_forest\n" - " regressor:random_forest:bootstrap, Value: True\n" - " regressor:random_forest:criterion, Constant: mse\n" - " regressor:random_forest:max_depth, Constant: None\n" - " regressor:random_forest:max_features, Value: 1.0\n" - " regressor:random_forest:max_leaf_nodes, Constant: None\n" - " regressor:random_forest:min_samples_leaf, Value: 1\n" - " regressor:random_forest:min_samples_split, Value: 2\n" - " regressor:random_forest:min_weight_fraction_leaf, Constant: 0.0\n" - " regressor:random_forest:n_estimators, Constant: 100\n" - " rescaling:__choice__, Value: min/max\n" - "violates forbidden clause \(Forbidden: regressor:__choice__ == random_forest" - " && Forbidden: preprocessor:__choice__ == kitchen_sinks\)", + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration.", ParamSklearnRegressor.get_hyperparameter_search_space, include={'regressor': ['random_forest'], 'preprocessor': ['kitchen_sinks']}) # It must also be catched that no classifiers which can handle sparse # data are located behind the densifier - self.assertRaisesRegexp(ValueError, "Configuration:\n" - " imputation:strategy, Value: mean\n" - " one_hot_encoding:minimum_fraction, Value: 0.01\n" - " one_hot_encoding:use_minimum_fraction, Value: True\n" - " preprocessor:__choice__, Value: densifier\n" - " regressor:__choice__, Value: ridge_regression\n" - " regressor:ridge_regression:alpha, Value: 1.0\n" - " regressor:ridge_regression:fit_intercept, Constant: True\n" - " regressor:ridge_regression:tol, Value: 0.0001\n" - " rescaling:__choice__, Value: min/max\n" - "violates forbidden clause \(Forbidden: regressor:__choice__ == " - "ridge_regression && Forbidden: preprocessor:__choice__ == densifier\)", + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration", ParamSklearnRegressor.get_hyperparameter_search_space, include={'regressor': ['ridge_regression'], 'preprocessor': ['densifier']}, From 3396a20dfbc054aca2fa8cbe8e26c6a15aeaf8aa Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 22 Oct 2015 17:35:01 +0200 Subject: [PATCH 312/352] Fix metalearning if no features file present --- autosklearn/automl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/autosklearn/automl.py b/autosklearn/automl.py index aed40381fe..ee6a022d05 100644 --- a/autosklearn/automl.py +++ b/autosklearn/automl.py @@ -92,7 +92,7 @@ def _calculate_metafeatures_encoded(basename, x_train, y_train, watcher, task_name = 'CalculateMetafeaturesEncoded' watcher.start_task(task_name) result = calc_meta_features_encoded(X_train=x_train, Y_train=y_train, - categorical=[False] * x_train.shape[0], + categorical=[False] * x_train.shape[1], dataset_name=basename) watcher.stop_task(task_name) logger.info( From 45d8863fee53379622dbe2e5b4eaee08600eab41 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sat, 31 Oct 2015 13:06:55 +0100 Subject: [PATCH 313/352] ADD multilabel classification --- .../components/classification/adaboost.py | 16 +++- .../implementations/MultilabelClassifier.py | 67 ++++++++++++++++ tests/test_classification.py | 79 +++++++++++++++++++ 3 files changed, 158 insertions(+), 4 deletions(-) create mode 100644 ParamSklearn/implementations/MultilabelClassifier.py diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 4a1ef153bc..2bdb10ed27 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -1,6 +1,9 @@ import numpy as np import sklearn.ensemble import sklearn.tree +import sklearn.multiclass + +from ParamSklearn.implementations.MultilabelClassifier import MultilabelClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -27,14 +30,20 @@ def fit(self, X, Y, sample_weight=None): self.max_depth = int(self.max_depth) base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth) - self.estimator = sklearn.ensemble.AdaBoostClassifier( + estimator = sklearn.ensemble.AdaBoostClassifier( base_estimator=base_estimator, n_estimators=self.n_estimators, learning_rate=self.learning_rate, algorithm=self.algorithm, random_state=self.random_state ) - self.estimator.fit(X, Y, sample_weight=sample_weight) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = MultilabelClassifier(estimator, n_jobs=1) + self.estimator.fit(X, Y, sample_weight=sample_weight) + else: + self.estimator.fit(X, Y, sample_weight=sample_weight) + return self def predict(self, X): @@ -55,12 +64,11 @@ def get_properties(dataset_properties=None): 'handles_nominal_values': False, 'handles_numerical_features': True, 'prefers_data_scaled': False, - # TODO find out if this is good because of sparcity... 'prefers_data_normalized': False, 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, SPARSE, UNSIGNED_DATA), diff --git a/ParamSklearn/implementations/MultilabelClassifier.py b/ParamSklearn/implementations/MultilabelClassifier.py new file mode 100644 index 0000000000..de9b1c4030 --- /dev/null +++ b/ParamSklearn/implementations/MultilabelClassifier.py @@ -0,0 +1,67 @@ +import warnings +import numpy as np + +from sklearn.base import clone +from sklearn.preprocessing import LabelBinarizer +from sklearn.multiclass import OneVsRestClassifier, _ConstantPredictor + + +def _fit_binary(estimator, X, y, classes=None, sample_weight=None): + """Fit a single binary estimator.""" + unique_y = np.unique(y) + if len(unique_y) == 1: + if classes is not None: + if y[0] == -1: + c = 0 + else: + c = y[0] + warnings.warn("Label %s is present in all training examples." % + str(classes[c])) + estimator = _ConstantPredictor().fit(X, unique_y) + else: + estimator = clone(estimator) + estimator.fit(X, y, sample_weight=None) + return estimator + + +class MultilabelClassifier(OneVsRestClassifier): + """Subclasses sklearn.multiclass.OneVsRestClassifier in order to add + sample weights. Works as original code, but forwards sample_weihts to + base estimator + + Taken from: + https://github.com/scikit-learn/scikit-learn/blob/a95203b/sklearn/multiclass.py#L203 + """ + + def fit(self, X, y, sample_weight=None): + """Fit underlying estimators. + Parameters + ---------- + X : (sparse) array-like, shape = [n_samples, n_features] + Data. + y : (sparse) array-like, shape = [n_samples] or [n_samples, n_classes] + Multi-class targets. An indicator matrix turns on multilabel + classification. + Returns + ------- + self + """ + # A sparse LabelBinarizer, with sparse_output=True, has been shown to + # outpreform or match a dense label binarizer in all cases and has also + # resulted in less or equal memory consumption in the fit_ovr function + # overall. + self.label_binarizer_ = LabelBinarizer(sparse_output=True) + Y = self.label_binarizer_.fit_transform(y) + Y = Y.tocsc() + columns = (col.toarray().ravel() for col in Y.T) + # In cases where individual estimators are very fast to train setting + # n_jobs > 1 in can results in slower performance due to the overhead + # of spawning threads. See joblib issue #112. + self.estimators_ = [_fit_binary(estimator=self.estimator, + X=X, y=column, + classes=["not %s" % self.label_binarizer_.classes_[i], self.label_binarizer_.classes_[i]], + sample_weight=sample_weight) + for i, column in enumerate(columns)] + + return self + diff --git a/tests/test_classification.py b/tests/test_classification.py index fd1f36a379..0bf2ef28fb 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -79,6 +79,85 @@ def test_default_configuration(self): sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) + def test_multilabel(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + dataset_properties = {'multilabel': True} + cs = ParamSklearnClassifier.get_hyperparameter_search_space(dataset_properties=dataset_properties) + + print(cs) + cs.seed(1) + + for i in range(10): + + X, Y = sklearn.datasets.\ + make_multilabel_classification(n_samples=100, + n_features=20, + n_classes=5, + n_labels=2, + length=50, + allow_unlabeled=True, + sparse=False, + return_indicator=True, + return_distributions=False, + random_state=1) + X_train = X[:80, :] + Y_train = Y[:80, :] + X_test = X[81:, :] + Y_test = Y[81:, ] + + config = cs.sample_configuration() + config._populate_values() + + if 'classifier:passive_aggressive:n_iter' in config: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if 'classifier:sgd:n_iter' in config: + config._values['classifier:sgd:n_iter'] = 5 + + cls = ParamSklearnClassifier(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabilities = cls.predict_proba(X_test_) + [self.assertIsInstance(i, np.ndarray) for i in predicted_probabilities] + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + def test_configurations(self): # Use a limit of ~4GiB limit = 4000 * 1024 * 1024 From 6158a8a3e9563699e7a70b381034ec52dd1e7cb8 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sat, 31 Oct 2015 14:06:56 +0100 Subject: [PATCH 314/352] ADD option ta handle multilabel datasets using OneVsAllClassifier from sklearn --- .../classification/k_nearest_neighbors.py | 18 +++++++++++------ ParamSklearn/components/classification/lda.py | 20 ++++++++++++------- tests/test_classification.py | 3 +-- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index da31a35610..6a14d032ca 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -1,4 +1,5 @@ import sklearn.neighbors +import sklearn.multiclass from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ @@ -17,11 +18,16 @@ def __init__(self, n_neighbors, weights, p, random_state=None): self.random_state = random_state def fit(self, X, Y): - self.estimator = \ - sklearn.neighbors.KNeighborsClassifier( - n_neighbors=self.n_neighbors, - weights=self.weights, - p=self.p) + estimator = \ + sklearn.neighbors.KNeighborsClassifier(n_neighbors=self.n_neighbors, + weights=self.weights, + p=self.p) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + self.estimator.fit(X, Y) return self @@ -48,7 +54,7 @@ def get_properties(dataset_properties=None): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': True, 'input': (DENSE, SPARSE, UNSIGNED_DATA), diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index 86052b66cd..98c46dfafd 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -1,4 +1,5 @@ import sklearn.lda +import sklearn.multiclass from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -11,7 +12,6 @@ from ParamSklearn.implementations.util import softmax - class LDA(ParamSklearnClassificationAlgorithm): def __init__(self, shrinkage, n_components, tol, shrinkage_factor=0.5, random_state=None): @@ -36,11 +36,17 @@ def fit(self, X, Y): self.n_components = int(self.n_components) self.tol = float(self.tol) - self.estimator = sklearn.lda.LDA(n_components=self.n_components, - shrinkage=self.shrinkage, - tol=self.tol, - solver=solver) - self.estimator.fit(X, Y, tol=self.tol) + estimator = sklearn.lda.LDA(n_components=self.n_components, + shrinkage=self.shrinkage, + tol=self.tol, + solver=solver) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + + self.estimator.fit(X, Y) return self def predict(self, X): @@ -68,7 +74,7 @@ def get_properties(dataset_properties=None): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, UNSIGNED_DATA), diff --git a/tests/test_classification.py b/tests/test_classification.py index 0bf2ef28fb..37ebbda904 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -90,8 +90,7 @@ def test_multilabel(self): print(cs) cs.seed(1) - for i in range(10): - + for i in range(50): X, Y = sklearn.datasets.\ make_multilabel_classification(n_samples=100, n_features=20, From f7e279777995a716dfdee01cc37aed5e94eafa09 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sat, 31 Oct 2015 15:20:49 +0100 Subject: [PATCH 315/352] ADD multilabel support --- .../classification/liblinear_svc.py | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index 96b9399862..aef4abb7e0 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -1,4 +1,5 @@ import sklearn.svm +import sklearn.multiclass from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -39,16 +40,22 @@ def fit(self, X, Y): if self.class_weight == "None": self.class_weight = None - self.estimator = sklearn.svm.LinearSVC(penalty=self.penalty, - loss=self.loss, - dual=self.dual, - tol=self.tol, - C=self.C, - class_weight=self.class_weight, - fit_intercept=self.fit_intercept, - intercept_scaling=self.intercept_scaling, - multi_class=self.multi_class, - random_state=self.random_state) + estimator = sklearn.svm.LinearSVC(penalty=self.penalty, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + multi_class=self.multi_class, + random_state=self.random_state) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + self.estimator.fit(X, Y) return self @@ -77,7 +84,7 @@ def get_properties(dataset_properties=None): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, + 'handles_multilabel': True, 'is_deterministic': False, 'handles_sparse': True, 'input': (SPARSE, DENSE, UNSIGNED_DATA), From 17a84b91bc0385b9ca7521cc9804fcc21b0a1506 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Sat, 31 Oct 2015 15:23:29 +0100 Subject: [PATCH 316/352] ADD multilabel support --- ParamSklearn/components/classification/qda.py | 20 ++++++++++--------- tests/test_classification.py | 12 +++++------ 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index f0f9c1382e..fed8e7e73c 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -10,15 +10,20 @@ class QDA(ParamSklearnClassificationAlgorithm): - def __init__(self, reg_param, tol, random_state=None): + + def __init__(self, reg_param, random_state=None): self.reg_param = float(reg_param) - self.tol = float(tol) self.estimator = None def fit(self, X, Y): + estimator = sklearn.qda.QDA(self.reg_param) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator - self.estimator = sklearn.qda.QDA(self.reg_param) - self.estimator.fit(X, Y, tol=self.tol) + self.estimator.fit(X, Y) return self def predict(self, X): @@ -46,7 +51,7 @@ def get_properties(dataset_properties=None): 'handles_regression': False, 'handles_classification': True, 'handles_multiclass': True, - 'handles_multilabel': False, + 'handles_multilabel': True, 'is_deterministic': True, 'handles_sparse': False, 'input': (DENSE, UNSIGNED_DATA), @@ -57,10 +62,7 @@ def get_properties(dataset_properties=None): @staticmethod def get_hyperparameter_search_space(dataset_properties=None): reg_param = UniformFloatHyperparameter('reg_param', 0.0, 10.0, - default=0.5) - tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, - log=True) + default=0.5) cs = ConfigurationSpace() cs.add_hyperparameter(reg_param) - cs.add_hyperparameter(tol) return cs diff --git a/tests/test_classification.py b/tests/test_classification.py index 37ebbda904..ae018b310f 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -88,11 +88,11 @@ def test_multilabel(self): cs = ParamSklearnClassifier.get_hyperparameter_search_space(dataset_properties=dataset_properties) print(cs) - cs.seed(1) + cs.seed(5) for i in range(50): X, Y = sklearn.datasets.\ - make_multilabel_classification(n_samples=100, + make_multilabel_classification(n_samples=150, n_features=20, n_classes=5, n_labels=2, @@ -102,10 +102,10 @@ def test_multilabel(self): return_indicator=True, return_distributions=False, random_state=1) - X_train = X[:80, :] - Y_train = Y[:80, :] - X_test = X[81:, :] - Y_test = Y[81:, ] + X_train = X[:100, :] + Y_train = Y[:100, :] + X_test = X[101:, :] + Y_test = Y[101:, ] config = cs.sample_configuration() config._populate_values() From 7d3909ef953a356132ba5bf35760ac5814291367 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 31 Oct 2015 17:11:57 +0100 Subject: [PATCH 317/352] Add __repr__ to ParamSklearnBaseEstimator --- ParamSklearn/base.py | 18 +++++++++++++++++- misc/create_hyperparameter_table.py | 14 ++++++++------ tests/test_classification.py | 7 +++++++ tests/test_regression.py | 7 +++++++ 4 files changed, 39 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index 0ac5e66392..dddfd0dc6c 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -323,6 +323,23 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, return cs + def __repr__(self): + class_name = self.__class__.__name__ + + configuration = {} + self.configuration._populate_values() + for hp_name in self.configuration: + if self.configuration[hp_name] is not None: + configuration[hp_name] = self.configuration[hp_name] + + configuration_string = ''.join( + ['configuration={\n ', + ',\n '.join(["'%s': %s" % (hp_name, repr(configuration[hp_name])) + for hp_name in sorted(configuration)]), + '}']) + + return '%s(%s)' % (class_name, configuration_string) + @classmethod def _get_pipeline(cls): if cls == ParamSklearnBaseEstimator: @@ -332,4 +349,3 @@ def _get_pipeline(cls): def _get_estimator_hyperparameter_name(self): raise NotImplementedError() - diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index eb809d4a3c..198decac80 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -69,9 +69,9 @@ def get_dict(task_type="classifier", **kwargs): estimator = None for h in cs.get_hyperparameters(): - if h.name == "preprocessor": + if h.name == "preprocessor:__choice__": preprocessor = h - elif h.name == task_type: + elif h.name == (task_type + ':__choice__'): estimator = h if estimator is None: @@ -98,11 +98,12 @@ def get_dict(task_type="classifier", **kwargs): preprocessor_dict[i][UN] = 0 for h in cs.get_hyperparameters(): - if h.name == "preprocessor" or h.name == task_type: + if h.name == "preprocessor:__choice__" or \ + h.name == (task_type + ':__choice__'): continue # walk over both dicts for d in (estimator_dict, preprocessor_dict): - est = h.name.split(":")[0] + est = h.name.split(":")[1] if est not in d: continue if isinstance(h, HPOlibConfigSpace.hyperparameters.UniformIntegerHyperparameter): @@ -119,14 +120,15 @@ def get_dict(task_type="classifier", **kwargs): raise ValueError("Don't know that type: %s" % type(h)) for h in cs.get_conditions(): - if h.parent.name == task_type or h.parent.name == "preprocessor": + if h.parent.name == (task_type + ':__choice__') or h.parent.name == \ + "preprocessor:__choice__": # ignore this condition # print "IGNORE", h continue # walk over both dicts and collect hyperparams for d in (estimator_dict, preprocessor_dict): - est = h.child.name.split(":")[0] + est = h.child.name.split(":")[1] if est not in d: #print "Could not find %s" % est continue diff --git a/tests/test_classification.py b/tests/test_classification.py index fd1f36a379..c0013fe710 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -79,6 +79,13 @@ def test_default_configuration(self): sklearn.metrics.accuracy_score(predictions, Y_test)) scores = auto.predict_proba(X_test) + def test_repr(self): + cs = ParamSklearnClassifier.get_hyperparameter_search_space() + default = cs.get_default_configuration() + representation = repr(ParamSklearnClassifier(default)) + cls = eval(representation) + self.assertIsInstance(cls, ParamSklearnClassifier) + def test_configurations(self): # Use a limit of ~4GiB limit = 4000 * 1024 * 1024 diff --git a/tests/test_regression.py b/tests/test_regression.py index 91316154c7..aad8005b3a 100644 --- a/tests/test_regression.py +++ b/tests/test_regression.py @@ -143,6 +143,13 @@ def test_default_configuration(self): model_score = auto.score(copy.deepcopy(X_test), Y_test) self.assertEqual(model_score, r2_score) + def test_repr(self): + cs = ParamSklearnRegressor.get_hyperparameter_search_space() + default = cs.get_default_configuration() + representation = repr(ParamSklearnRegressor(default)) + cls = eval(representation) + self.assertIsInstance(cls, ParamSklearnRegressor) + def test_get_hyperparameter_search_space(self): cs = ParamSklearnRegressor.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) From dad0904f82e839571f8939f8583b734c1f460dd5 Mon Sep 17 00:00:00 2001 From: Marius Lindauer Date: Thu, 12 Nov 2015 11:36:49 +0100 Subject: [PATCH 318/352] * setting some parameters to log-scale in kNN and SGD --- .../components/classification/k_nearest_neighbors.py | 2 +- ParamSklearn/components/classification/sgd.py | 6 +++--- ParamSklearn/components/regression/k_nearest_neighbors.py | 2 +- ParamSklearn/components/regression/sgd.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index 6a14d032ca..acb604cfc4 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -67,7 +67,7 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( - name="n_neighbors", lower=1, upper=100, default=1)) + name="n_neighbors", lower=1, upper=100, log=True, default=1)) weights = cs.add_hyperparameter(CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform")) p = cs.add_hyperparameter(CategoricalHyperparameter( diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index 377c88e245..b44fa25da5 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -116,17 +116,17 @@ def get_hyperparameter_search_space(dataset_properties=None): loss = cs.add_hyperparameter(CategoricalHyperparameter("loss", ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], - default="hinge")) + default="log")) penalty = cs.add_hyperparameter(CategoricalHyperparameter( "penalty", ["l1", "l2", "elasticnet"], default="l2")) alpha = cs.add_hyperparameter(UniformFloatHyperparameter( "alpha", 10e-7, 1e-1, log=True, default=0.0001)) l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( - "l1_ratio", 0, 1, default=0.15)) + "l1_ratio", 0, 1, log=True, default=0.15)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( - "n_iter", 5, 1000, default=20)) + "n_iter", 5, 1000, log=True, default=20)) epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/ParamSklearn/components/regression/k_nearest_neighbors.py index c7e19ca6ce..335fd66eaf 100644 --- a/ParamSklearn/components/regression/k_nearest_neighbors.py +++ b/ParamSklearn/components/regression/k_nearest_neighbors.py @@ -55,7 +55,7 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( - name="n_neighbors", lower=1, upper=100, default=1)) + name="n_neighbors", lower=1, upper=100, log=True, default=1)) weights = cs.add_hyperparameter(CategoricalHyperparameter( name="weights", choices=["uniform", "distance"], default="uniform")) p = cs.add_hyperparameter(CategoricalHyperparameter( diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py index 5385540c85..6606fd4106 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/ParamSklearn/components/regression/sgd.py @@ -120,11 +120,11 @@ def get_hyperparameter_search_space(dataset_properties=None): alpha = cs.add_hyperparameter(UniformFloatHyperparameter( "alpha", 10e-7, 1e-1, log=True, default=0.01)) l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( - "l1_ratio", 0., 1., default=0.15)) + "l1_ratio", 0., 1., log=True, default=0.15)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( - "n_iter", 5, 1000, default=20)) + "n_iter", 5, 1000, log=True, default=20)) epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( From 57514266efedfaa15547e2d33dd101490dc15b1d Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Sat, 14 Nov 2015 00:01:18 +0100 Subject: [PATCH 319/352] Fix configuration spaces --- ParamSklearn/components/classification/adaboost.py | 7 ++++--- .../components/classification/passive_aggressive.py | 3 ++- ParamSklearn/components/classification/sgd.py | 2 +- ParamSklearn/components/regression/sgd.py | 2 +- source/first_steps.rst | 4 ++-- tests/components/classification/test_sgd.py | 8 ++++---- tests/components/data_preprocessing/test_balancing.py | 6 +++--- tests/test_classification.py | 2 +- 8 files changed, 18 insertions(+), 16 deletions(-) diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index 2bdb10ed27..df8116aa4f 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -39,11 +39,12 @@ def fit(self, X, Y, sample_weight=None): ) if len(Y.shape) == 2 and Y.shape[1] > 1: - self.estimator = MultilabelClassifier(estimator, n_jobs=1) - self.estimator.fit(X, Y, sample_weight=sample_weight) + estimator = MultilabelClassifier(estimator, n_jobs=1) + estimator.fit(X, Y, sample_weight=sample_weight) else: - self.estimator.fit(X, Y, sample_weight=sample_weight) + estimator.fit(X, Y, sample_weight=sample_weight) + self.estimator = estimator return self def predict(self, X): diff --git a/ParamSklearn/components/classification/passive_aggressive.py b/ParamSklearn/components/classification/passive_aggressive.py index 8f870dbf65..473e5d1a4f 100644 --- a/ParamSklearn/components/classification/passive_aggressive.py +++ b/ParamSklearn/components/classification/passive_aggressive.py @@ -87,7 +87,8 @@ def get_hyperparameter_search_space(dataset_properties=None): ["hinge", "squared_hinge"], default="hinge") fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") - n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20) + n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20, + log=True) C = UniformFloatHyperparameter("C", 1e-5, 10, 1, log=True) cs = ConfigurationSpace() cs.add_hyperparameter(loss) diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index b44fa25da5..740ff3c051 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -122,7 +122,7 @@ def get_hyperparameter_search_space(dataset_properties=None): alpha = cs.add_hyperparameter(UniformFloatHyperparameter( "alpha", 10e-7, 1e-1, log=True, default=0.0001)) l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( - "l1_ratio", 0, 1, log=True, default=0.15)) + "l1_ratio", 1e-9, 1, log=True, default=0.15)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py index 6606fd4106..a9dbb56753 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/ParamSklearn/components/regression/sgd.py @@ -120,7 +120,7 @@ def get_hyperparameter_search_space(dataset_properties=None): alpha = cs.add_hyperparameter(UniformFloatHyperparameter( "alpha", 10e-7, 1e-1, log=True, default=0.01)) l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( - "l1_ratio", 0., 1., log=True, default=0.15)) + "l1_ratio", 1e-9, 1., log=True, default=0.15)) fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( "fit_intercept", "True")) n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( diff --git a/source/first_steps.rst b/source/first_steps.rst index 245f4332a2..3520d15200 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -17,10 +17,10 @@ configuration on the iris dataset. >>> np.random.seed(1) >>> np.random.shuffle(indices) >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() - >>> configuration_space.seed(2) + >>> configuration_space.seed(1) >>> configuration = configuration_space.sample_configuration() >>> cls = ParamSklearnClassifier(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.92000000000000004 + 0.93999999999999995 diff --git a/tests/components/classification/test_sgd.py b/tests/components/classification/test_sgd.py index 8310b742ff..488902f6fd 100644 --- a/tests/components/classification/test_sgd.py +++ b/tests/components/classification/test_sgd.py @@ -10,7 +10,7 @@ class SGDComponentTest(unittest.TestCase): def test_default_configuration(self): for i in range(10): predictions, targets = _test_classifier(SGD) - self.assertAlmostEqual(1.0, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score(predictions, targets)) @@ -18,7 +18,7 @@ def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = _test_classifier_iterative_fit( SGD) - self.assertAlmostEqual(1.0, + self.assertAlmostEqual(0.95999999999999996, sklearn.metrics.accuracy_score( predictions, targets)) @@ -26,7 +26,7 @@ def test_default_configuration_digits(self): for i in range(10): predictions, targets = \ _test_classifier(SGD, dataset='digits') - self.assertAlmostEqual(0.89313904068002425, + self.assertAlmostEqual(0.91438979963570133, sklearn.metrics.accuracy_score(predictions, targets)) @@ -35,6 +35,6 @@ def test_default_configuration_digits_iterative_fit(self): predictions, targets = _test_classifier_iterative_fit( SGD, dataset='digits') - self.assertAlmostEqual(0.89313904068002425, + self.assertAlmostEqual(0.91438979963570133, sklearn.metrics.accuracy_score( predictions, targets)) \ No newline at end of file diff --git a/tests/components/data_preprocessing/test_balancing.py b/tests/components/data_preprocessing/test_balancing.py index f4cea80ad4..9b6af91d76 100644 --- a/tests/components/data_preprocessing/test_balancing.py +++ b/tests/components/data_preprocessing/test_balancing.py @@ -69,7 +69,7 @@ def test_weighting_effect(self): ('random_forest', RandomForest, 0.886, 0.885), ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), - ('sgd', SGD, 0.811, 0.902)]: + ('sgd', SGD, 0.908, 0.901)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: # Fit @@ -102,9 +102,9 @@ def test_weighting_effect(self): for name, pre, acc_no_weighting, acc_weighting in \ [('extra_trees_preproc_for_classification', - ExtraTreesPreprocessor, 0.892, 0.910), + ExtraTreesPreprocessor, 0.911, 0.902), ('liblinear_svc_preprocessor', LibLinear_Preprocessor, - 0.906, 0.909)]: + 0.893, 0.894)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: diff --git a/tests/test_classification.py b/tests/test_classification.py index 0c78a57b61..b88ce98fdd 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -415,7 +415,7 @@ def test_get_hyperparameter_search_space(self): 'preprocessor:__choice__').choices), 14) hyperparameters = cs.get_hyperparameters() - self.assertEqual(145, len(hyperparameters)) + self.assertEqual(144, len(hyperparameters)) #for hp in sorted([str(h) for h in hyperparameters]): # print hp From c013f894e9b4f49308df1d60d7decf353c154f43 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 14:50:37 +0100 Subject: [PATCH 320/352] ADD travis support --- .travis.yml | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000..d5c8bf1832 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,36 @@ +language: python +python: + - "2.7" + - "3.4" + +cache: pip +cache: + directories: + - $HOME/.cache/pip + +sudo: false + +# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors + +addons: + apt: + packages: + - libatlas-dev + - liblapack-dev + +#before_install: +# - sudo apt-get install -q libatlas3gf-base libatlas-dev liblapack-dev gfortran + +install: + - easy_install -U distribute + - pip install numpy==1.6.1 + - pip install scipy==0.16.1 + - pip install scikit-learn==0.16.1 + + # Install requirements from other repos + - pip install git+https://github.com/automl/HPOlibConfigSpace.git + + - python setup.py install + +# command to run tests, e.g. python setup.py test +script: python setup.py test \ No newline at end of file From 1d00fa31fbccc9154ccd370b37c1caf71d152c66 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 14:57:05 +0100 Subject: [PATCH 321/352] UPDATE travis, more apt packages --- .travis.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.travis.yml b/.travis.yml index d5c8bf1832..4ee0e13628 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,6 +17,8 @@ addons: packages: - libatlas-dev - liblapack-dev + - libatlas-base-dev + - gfortran #before_install: # - sudo apt-get install -q libatlas3gf-base libatlas-dev liblapack-dev gfortran From 2362d827e2f816bf4099635199a953744c4ef76e Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 16:06:12 +0100 Subject: [PATCH 322/352] UPDATE travis; require numpy 1.9.0 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 4ee0e13628..3945a28759 100644 --- a/.travis.yml +++ b/.travis.yml @@ -25,7 +25,7 @@ addons: install: - easy_install -U distribute - - pip install numpy==1.6.1 + - pip install numpy==1.9.0 - pip install scipy==0.16.1 - pip install scikit-learn==0.16.1 From 08c9827527e74e7c10373a5dd6b3a9846261bb77 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 16:21:21 +0100 Subject: [PATCH 323/352] ADD landscape --- .landscape.yaml | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 .landscape.yaml diff --git a/.landscape.yaml b/.landscape.yaml new file mode 100644 index 0000000000..feb92d3c4f --- /dev/null +++ b/.landscape.yaml @@ -0,0 +1,5 @@ +doc-warnings: no +test-warnings: no +strictness: medium +max-line-length: 80 +autodetect: yes \ No newline at end of file From 318c0fbbf7a372b25f828c916afd32efc413a907 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 16:42:11 +0100 Subject: [PATCH 324/352] UPDATE test and travis --- .travis.yml | 9 +-------- tests/components/regression/test_sgd.py | 4 ++-- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 3945a28759..c9970888db 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,14 +4,10 @@ python: - "3.4" cache: pip -cache: - directories: - - $HOME/.cache/pip sudo: false -# command to install dependencies, e.g. pip install -r requirements.txt --use-mirrors - +# command to install dependencies addons: apt: packages: @@ -20,9 +16,6 @@ addons: - libatlas-base-dev - gfortran -#before_install: -# - sudo apt-get install -q libatlas3gf-base libatlas-dev liblapack-dev gfortran - install: - easy_install -U distribute - pip install numpy==1.9.0 diff --git a/tests/components/regression/test_sgd.py b/tests/components/regression/test_sgd.py index 719475465d..03a6501e07 100644 --- a/tests/components/regression/test_sgd.py +++ b/tests/components/regression/test_sgd.py @@ -24,7 +24,7 @@ def test_default_configuration_iterative_fit(self): def test_default_configuration_digits(self): for i in range(10): predictions, targets = _test_regressor(SGD, dataset='boston') - self.assertAlmostEqual(-2.9165866511775519e+31, + self.assertAlmostEqual(-2.916586651177552e+31, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) @@ -32,6 +32,6 @@ def test_default_configuration_digits_iterative_fit(self): for i in range(10): predictions, targets = _test_regressor_iterative_fit(SGD, dataset='boston') - self.assertAlmostEqual(-2.9165866511775519e+31, + self.assertAlmostEqual(-2.916586651177552e+31, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) \ No newline at end of file From 0e31ed26d7e6ef27a4c7ba0c7899500d8750315b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 16:45:37 +0100 Subject: [PATCH 325/352] ADD travis-ci, landscape --- README.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c623439769..36c6e8ded7 100644 --- a/README.md +++ b/README.md @@ -18,4 +18,15 @@ To build the documentation you also need the packages `sphinx` and `numpydoc`. pip install sphinx pip install numpydoc make html - firefox `pwd`/build/html/index.html \ No newline at end of file + firefox `pwd`/build/html/index.html + + +Status for master branch: + +[![Build Status](https://travis-ci.org/automl/paramsklearn.svg?branch=master)](https://travis-ci.org/automl/paramsklearn) +[![Code Health](https://landscape.io/github/automl/paramsklearn/master/landscape.png)](https://landscape.io/github/automl/paramsklearn/master) + +Status for development branch + +[![Build Status](https://travis-ci.org/automl/paramsklearn.svg?branch=development)](https://travis-ci.org/automl/paramsklearn) +[![Code Health](https://landscape.io/github/automl/paramsklearn/development/landscape.png)](https://landscape.io/github/automl/paramsklearn/development) From a148ce5a08798a155002ea852745d037db931d39 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Mon, 16 Nov 2015 16:52:43 +0100 Subject: [PATCH 326/352] UPDATE upgrade pip --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index c9970888db..eef4a83285 100644 --- a/.travis.yml +++ b/.travis.yml @@ -18,6 +18,7 @@ addons: install: - easy_install -U distribute + - pip install --upgrade pip - pip install numpy==1.9.0 - pip install scipy==0.16.1 - pip install scikit-learn==0.16.1 From d57213d2ed25dc9ed0d4d9038ed47bfed9f13a26 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 09:24:20 +0100 Subject: [PATCH 327/352] ADD travis_wait to install scipy without timeout --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index eef4a83285..b4183b949b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -20,7 +20,7 @@ install: - easy_install -U distribute - pip install --upgrade pip - pip install numpy==1.9.0 - - pip install scipy==0.16.1 + - travis_wait pip install scipy==0.16.1 - pip install scikit-learn==0.16.1 # Install requirements from other repos From d8a9948d402fc196dd541f926833a4507409327b Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 09:48:14 +0100 Subject: [PATCH 328/352] FIX test --- tests/components/regression/test_sgd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/regression/test_sgd.py b/tests/components/regression/test_sgd.py index 03a6501e07..81845b8d48 100644 --- a/tests/components/regression/test_sgd.py +++ b/tests/components/regression/test_sgd.py @@ -24,7 +24,7 @@ def test_default_configuration_iterative_fit(self): def test_default_configuration_digits(self): for i in range(10): predictions, targets = _test_regressor(SGD, dataset='boston') - self.assertAlmostEqual(-2.916586651177552e+31, + self.assertAlmostEqual(-2.9165866511775523e+31, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) @@ -32,6 +32,6 @@ def test_default_configuration_digits_iterative_fit(self): for i in range(10): predictions, targets = _test_regressor_iterative_fit(SGD, dataset='boston') - self.assertAlmostEqual(-2.916586651177552e+31, + self.assertAlmostEqual(-2.9165866511775523e+31, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) \ No newline at end of file From 4bdbd913d93a5a802a546a9d21ad0604e9d46dce Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 10:27:41 +0100 Subject: [PATCH 329/352] ADD coveralls, except SGD did not converge --- .coveralls.yml | 0 tests/test_classification.py | 2 ++ 2 files changed, 2 insertions(+) create mode 100644 .coveralls.yml diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/test_classification.py b/tests/test_classification.py index b88ce98fdd..9306e4aca5 100644 --- a/tests/test_classification.py +++ b/tests/test_classification.py @@ -131,6 +131,8 @@ def test_multilabel(self): self.assertIsInstance(predictions, np.ndarray) predicted_probabilities = cls.predict_proba(X_test_) [self.assertIsInstance(i, np.ndarray) for i in predicted_probabilities] + except np.linalg.LinAlgError: + continue except ValueError as e: if "Floating-point under-/overflow occurred at epoch" in \ e.args[0] or \ From 274c8c6fe13bf833f18a9c7866b40f0c11dc7736 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 10:45:31 +0100 Subject: [PATCH 330/352] USE miniconde --- .travis.yml | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/.travis.yml b/.travis.yml index b4183b949b..9b2b90b304 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,10 +3,16 @@ python: - "2.7" - "3.4" -cache: pip +cache: + directories: + - $HOME/.cache/pip + pip: true sudo: false +before_cache: + - rm -f $HOME/.cache/pip/log/debug.log + # command to install dependencies addons: apt: @@ -16,12 +22,15 @@ addons: - libatlas-base-dev - gfortran +before_install: + - wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh + - bash miniconda.sh -b -p $HOME/miniconda + - export PATH="$HOME/miniconda/bin:$PATH" + - conda update --yes conda + install: - - easy_install -U distribute - - pip install --upgrade pip - - pip install numpy==1.9.0 - - travis_wait pip install scipy==0.16.1 - - pip install scikit-learn==0.16.1 + - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy==1.9.0 scipy==0.16.1 nose scikit-learn==0.16.1 + - pip install pep8 python-coveralls # Install requirements from other repos - pip install git+https://github.com/automl/HPOlibConfigSpace.git @@ -29,4 +38,6 @@ install: - python setup.py install # command to run tests, e.g. python setup.py test -script: python setup.py test \ No newline at end of file +script: python setup.py test + +after_success: coveralls \ No newline at end of file From a668b943db6872315cf421f28cc320c3949dbf01 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 10:48:10 +0100 Subject: [PATCH 331/352] FIX --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9b2b90b304..77b6b13b5b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,7 @@ before_install: - conda update --yes conda install: - - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy==1.9.0 scipy==0.16.1 nose scikit-learn==0.16.1 + - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.1 nose scikit-learn=0.16.1 - pip install pep8 python-coveralls # Install requirements from other repos From 7f2f4932e3c306ca4eb73aea874b48c59bf0bdec Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 10:52:11 +0100 Subject: [PATCH 332/352] FIX --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 77b6b13b5b..78fac45501 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,7 @@ before_install: - conda update --yes conda install: - - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.1 nose scikit-learn=0.16.1 + - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.0 nose scikit-learn=0.16.1 - pip install pep8 python-coveralls # Install requirements from other repos From 61b87132eea41bf3142be5046ef35b7238d241b6 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 10:59:16 +0100 Subject: [PATCH 333/352] FIX install mock --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 78fac45501..05ed8f1c6c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -29,7 +29,7 @@ before_install: - conda update --yes conda install: - - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.0 nose scikit-learn=0.16.1 + - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.0 nose scikit-learn=0.16.1 mock - pip install pep8 python-coveralls # Install requirements from other repos From 01ef49ace4054a8dddeabfe78cb99bda4a04d7a6 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 11:11:48 +0100 Subject: [PATCH 334/352] ADD coveralls --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 05ed8f1c6c..9baa60326e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,6 +31,7 @@ before_install: install: - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.0 nose scikit-learn=0.16.1 mock - pip install pep8 python-coveralls + - pip install coverage # Install requirements from other repos - pip install git+https://github.com/automl/HPOlibConfigSpace.git @@ -38,6 +39,6 @@ install: - python setup.py install # command to run tests, e.g. python setup.py test -script: python setup.py test +script: nosetests --with-coverage after_success: coveralls \ No newline at end of file From 1c16c1fbd8eea5a87f74e74600ca2ee4132daecf Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 11:32:01 +0100 Subject: [PATCH 335/352] add coverage badges --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 36c6e8ded7..393425cd3d 100644 --- a/README.md +++ b/README.md @@ -25,8 +25,10 @@ Status for master branch: [![Build Status](https://travis-ci.org/automl/paramsklearn.svg?branch=master)](https://travis-ci.org/automl/paramsklearn) [![Code Health](https://landscape.io/github/automl/paramsklearn/master/landscape.png)](https://landscape.io/github/automl/paramsklearn/master) +[![Coverage Status](https://coveralls.io/repos/automl/paramsklearn/badge.svg?branch=master&service=github)](https://coveralls.io/github/automl/paramsklearn?branch=master) Status for development branch [![Build Status](https://travis-ci.org/automl/paramsklearn.svg?branch=development)](https://travis-ci.org/automl/paramsklearn) [![Code Health](https://landscape.io/github/automl/paramsklearn/development/landscape.png)](https://landscape.io/github/automl/paramsklearn/development) +[![Coverage Status](https://coveralls.io/repos/automl/paramsklearn/badge.svg?branch=development&service=github)](https://coveralls.io/github/automl/paramsklearn?branch=development) From df1d1f438eacb0b27378584a6998b9253c3917dd Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Tue, 17 Nov 2015 11:37:59 +0100 Subject: [PATCH 336/352] ADD setup.py instead of nosetests --- .travis.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9baa60326e..05f236a080 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,6 +39,8 @@ install: - python setup.py install # command to run tests, e.g. python setup.py test -script: nosetests --with-coverage +script: + - coverage run --source autosklearn setup.py test + after_success: coveralls \ No newline at end of file From 1a465cbaed84aef67a83151ea39864be455f3e8f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 20 Nov 2015 17:43:18 +0100 Subject: [PATCH 337/352] Fix unittest to work with numpy 1.10 --- ParamSklearn/base.py | 1 - .../components/data_preprocessing/balancing.py | 8 +++++--- tests/implementations/test_imputation.py | 17 ++++++++++++++--- 3 files changed, 19 insertions(+), 7 deletions(-) diff --git a/ParamSklearn/base.py b/ParamSklearn/base.py index dddfd0dc6c..17742f7ce9 100644 --- a/ParamSklearn/base.py +++ b/ParamSklearn/base.py @@ -162,7 +162,6 @@ def iterative_fit(self, X, y, fit_params=None, n_iter=1): **fit_params) def estimator_supports_iterative_fit(self): - check_is_fitted(self, 'pipeline_') return hasattr(self.pipeline_.steps[-1][-1], 'iterative_fit') def configuration_fully_fitted(self): diff --git a/ParamSklearn/components/data_preprocessing/balancing.py b/ParamSklearn/components/data_preprocessing/balancing.py index ae08329321..f4f39c1363 100644 --- a/ParamSklearn/components/data_preprocessing/balancing.py +++ b/ParamSklearn/components/data_preprocessing/balancing.py @@ -29,7 +29,9 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # We can have adaboost in here, because in the fit method, # the sample weights are normalized: # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 - clf_ = ['adaboost', 'gradient_boosting'] + # Have RF and ET in here because they emit a warning if class_weights + # are used together with warmstarts + clf_ = ['adaboost', 'gradient_boosting', 'random_forest', 'extra_trees'] pre_ = [] if classifier in clf_ or preprocessor in pre_: if len(Y.shape) > 1: @@ -55,8 +57,8 @@ def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): # Classifiers which can adjust sample weights themselves via the # argument `class_weight` - clf_ = ['decision_tree', 'extra_trees', 'liblinear_svc', - 'libsvm_svc', 'random_forest', 'sgd'] + clf_ = ['decision_tree', 'liblinear_svc', + 'libsvm_svc', 'sgd'] pre_ = ['liblinear_svc_preprocessor', 'extra_trees_preproc_for_classification'] if classifier in clf_: diff --git a/tests/implementations/test_imputation.py b/tests/implementations/test_imputation.py index e3616349b3..99d4b62c5c 100644 --- a/tests/implementations/test_imputation.py +++ b/tests/implementations/test_imputation.py @@ -16,6 +16,18 @@ from sklearn.random_projection import sparse_random_matrix +def safe_median(arr, *args, **kwargs): + # np.median([]) raises a TypeError for numpy >= 1.10.1 + length = arr.size if hasattr(arr, 'size') else len(arr) + return np.nan if length == 0 else np.median(arr, *args, **kwargs) + + +def safe_mean(arr, *args, **kwargs): + # np.mean([]) raises a RuntimeWarning for numpy >= 1.10.1 + length = arr.size if hasattr(arr, 'size') else len(arr) + return np.nan if length == 0 else np.mean(arr, *args, **kwargs) + + class ImputationTest(unittest.TestCase): def _check_statistics(self, X, X_true, strategy, statistics, missing_values): @@ -137,9 +149,9 @@ def test_imputation_mean_median(self): values = np.arange(1, shape[0] + 1) values[4::2] = - values[4::2] - tests = [("mean", "NaN", lambda z, v, p: np.mean(np.hstack((z, v)))), + tests = [("mean", "NaN", lambda z, v, p: safe_mean(np.hstack((z, v)))), ("mean", 0, lambda z, v, p: np.mean(v)), - ("median", "NaN", lambda z, v, p: np.median(np.hstack((z, v)))), + ("median", "NaN", lambda z, v, p: safe_median(np.hstack((z, v)))), ("median", 0, lambda z, v, p: np.median(v))] for strategy, test_missing_values, true_value_fun in tests: @@ -161,7 +173,6 @@ def test_imputation_mean_median(self): z = zeros[:nb_zeros] p = np.repeat(test_missing_values, nb_missing_values) v = values[rng.permutation(len(values))[:nb_values]] - true_statistics[j] = true_value_fun(z, v, p) # Create the columns From e40588f5322b69ce14f7fbfd75233ba58965c698 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Fri, 20 Nov 2015 17:48:07 +0100 Subject: [PATCH 338/352] Loosen numpy version constraints; add python 3.5 --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 05f236a080..bd56d73187 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,7 @@ language: python python: - "2.7" - "3.4" + - "3.5" cache: directories: @@ -29,7 +30,7 @@ before_install: - conda update --yes conda install: - - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9.0 scipy=0.16.0 nose scikit-learn=0.16.1 mock + - conda install --yes python=$TRAVIS_PYTHON_VERSION pip numpy=1.9 scipy=0.16 nose scikit-learn=0.16.1 mock - pip install pep8 python-coveralls - pip install coverage From b1a96ad1c908c3c116b4b4ede6f62067ddd8ad7c Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 24 Nov 2015 10:17:02 +0100 Subject: [PATCH 339/352] Move import into fit(); avoid importing whole sklearn --- ParamSklearn/classification.py | 1 - .../components/classification/adaboost.py | 7 ++++--- .../components/classification/bernoulli_nb.py | 3 ++- .../components/classification/decision_tree.py | 4 ++-- .../components/classification/extra_trees.py | 4 ++-- .../components/classification/gaussian_nb.py | 3 ++- .../classification/gradient_boosting.py | 3 ++- .../classification/k_nearest_neighbors.py | 6 +++--- ParamSklearn/components/classification/lda.py | 6 +++--- .../components/classification/liblinear_svc.py | 6 +++--- .../components/classification/libsvm_svc.py | 3 ++- .../components/classification/multinomial_nb.py | 5 +++-- .../classification/passive_aggressive.py | 4 +++- .../components/classification/proj_logit.py | 1 - ParamSklearn/components/classification/qda.py | 4 ++-- .../components/classification/random_forest.py | 3 ++- ParamSklearn/components/classification/sgd.py | 4 ++-- .../components/data_preprocessing/imputation.py | 5 ++--- .../data_preprocessing/one_hot_encoding.py | 3 ++- .../components/data_preprocessing/rescaling.py | 7 +++---- .../components/feature_preprocessing/densifier.py | 3 +-- .../extra_trees_preproc_for_classification.py | 3 ++- .../components/feature_preprocessing/fast_ica.py | 4 ++-- .../feature_agglomeration.py | 3 ++- .../feature_preprocessing/kernel_pca.py | 5 +++-- .../feature_preprocessing/kitchen_sinks.py | 7 ++++--- .../liblinear_svc_preprocessor.py | 4 ++-- .../feature_preprocessing/nystroem_sampler.py | 7 +++++-- .../components/feature_preprocessing/pca.py | 2 +- .../feature_preprocessing/polynomial.py | 4 ++-- .../random_trees_embedding.py | 4 ++-- .../feature_preprocessing/select_percentile.py | 5 ++--- .../select_percentile_classification.py | 12 ++++++++---- .../select_percentile_regression.py | 3 +-- .../feature_preprocessing/select_rates.py | 11 ++++++++--- .../feature_preprocessing/truncatedSVD.py | 4 ++-- ParamSklearn/components/regression/adaboost.py | 5 +++-- .../components/regression/decision_tree.py | 4 ++-- ParamSklearn/components/regression/extra_trees.py | 4 ++-- .../components/regression/gaussian_process.py | 6 +++--- .../components/regression/gradient_boosting.py | 3 ++- .../components/regression/k_nearest_neighbors.py | 4 ++-- .../components/regression/liblinear_svr.py | 4 ++-- ParamSklearn/components/regression/libsvm_svr.py | 3 ++- .../components/regression/random_forest.py | 5 ++--- .../components/regression/ridge_regression.py | 2 +- ParamSklearn/components/regression/sgd.py | 6 +++--- ParamSklearn/regression.py | 2 -- tests/components/regression/test_sgd.py | 15 --------------- 49 files changed, 115 insertions(+), 111 deletions(-) diff --git a/ParamSklearn/classification.py b/ParamSklearn/classification.py index 691d539dc1..fa332f63ab 100644 --- a/ParamSklearn/classification.py +++ b/ParamSklearn/classification.py @@ -12,7 +12,6 @@ from ParamSklearn.base import ParamSklearnBaseEstimator from ParamSklearn.constants import SPARSE from ParamSklearn.components.data_preprocessing.balancing import Balancing -import ParamSklearn.create_searchspace_util class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): diff --git a/ParamSklearn/components/classification/adaboost.py b/ParamSklearn/components/classification/adaboost.py index df8116aa4f..4190980d76 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/ParamSklearn/components/classification/adaboost.py @@ -1,7 +1,4 @@ import numpy as np -import sklearn.ensemble -import sklearn.tree -import sklearn.multiclass from ParamSklearn.implementations.MultilabelClassifier import MultilabelClassifier @@ -25,6 +22,10 @@ def __init__(self, n_estimators, learning_rate, algorithm, max_depth, self.estimator = None def fit(self, X, Y, sample_weight=None): + import sklearn.ensemble + import sklearn.tree + import sklearn.multiclass + self.n_estimators = int(self.n_estimators) self.learning_rate = float(self.learning_rate) self.max_depth = int(self.max_depth) diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/ParamSklearn/components/classification/bernoulli_nb.py index 2fe640d2a5..c2a8814151 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/ParamSklearn/components/classification/bernoulli_nb.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.naive_bayes from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -29,6 +28,8 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/decision_tree.py b/ParamSklearn/components/classification/decision_tree.py index b39e35da06..a9d91aabed 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/ParamSklearn/components/classification/decision_tree.py @@ -8,8 +8,6 @@ from ParamSklearn.components.base import \ ParamSklearnClassificationAlgorithm from ParamSklearn.constants import * -# get our own forests to replace the sklearn ones -from sklearn.tree import DecisionTreeClassifier class DecisionTree(ParamSklearnClassificationAlgorithm): @@ -29,6 +27,8 @@ def __init__(self, criterion, splitter, max_features, max_depth, self.estimator = None def fit(self, X, y, sample_weight=None): + from sklearn.tree import DecisionTreeClassifier + self.max_features = float(self.max_features) if self.max_depth == "None": self.max_depth = None diff --git a/ParamSklearn/components/classification/extra_trees.py b/ParamSklearn/components/classification/extra_trees.py index de64d68299..acf290e988 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/ParamSklearn/components/classification/extra_trees.py @@ -8,8 +8,6 @@ from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.constants import * -from sklearn.ensemble import ExtraTreesClassifier as ETC - class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): @@ -71,6 +69,8 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + from sklearn.ensemble import ExtraTreesClassifier as ETC + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/ParamSklearn/components/classification/gaussian_nb.py index d44bc68d27..b51b6cb051 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/ParamSklearn/components/classification/gaussian_nb.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.naive_bayes from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -21,6 +20,8 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/ParamSklearn/components/classification/gradient_boosting.py index d8c6630704..893ea4a91a 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/ParamSklearn/components/classification/gradient_boosting.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -40,6 +39,8 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + import sklearn.ensemble + # Special fix for gradient boosting! if isinstance(X, np.ndarray): X = np.ascontiguousarray(X, dtype=X.dtype) diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/ParamSklearn/components/classification/k_nearest_neighbors.py index acb604cfc4..1f4ffdd987 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/ParamSklearn/components/classification/k_nearest_neighbors.py @@ -1,6 +1,3 @@ -import sklearn.neighbors -import sklearn.multiclass - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ Constant, UniformIntegerHyperparameter @@ -18,6 +15,9 @@ def __init__(self, n_neighbors, weights, p, random_state=None): self.random_state = random_state def fit(self, X, Y): + import sklearn.neighbors + import sklearn.multiclass + estimator = \ sklearn.neighbors.KNeighborsClassifier(n_neighbors=self.n_neighbors, weights=self.weights, diff --git a/ParamSklearn/components/classification/lda.py b/ParamSklearn/components/classification/lda.py index 98c46dfafd..b588ff1231 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/ParamSklearn/components/classification/lda.py @@ -1,6 +1,3 @@ -import sklearn.lda -import sklearn.multiclass - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter @@ -22,6 +19,9 @@ def __init__(self, shrinkage, n_components, tol, shrinkage_factor=0.5, self.estimator = None def fit(self, X, Y): + import sklearn.lda + import sklearn.multiclass + if self.shrinkage == "None": self.shrinkage = None solver = 'svd' diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/ParamSklearn/components/classification/liblinear_svc.py index aef4abb7e0..fe863ccda6 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/ParamSklearn/components/classification/liblinear_svc.py @@ -1,6 +1,3 @@ -import sklearn.svm -import sklearn.multiclass - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant @@ -30,6 +27,9 @@ def __init__(self, penalty, loss, dual, tol, C, multi_class, self.estimator = None def fit(self, X, Y): + import sklearn.svm + import sklearn.multiclass + self.C = float(self.C) self.tol = float(self.tol) diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/ParamSklearn/components/classification/libsvm_svc.py index 8d79c4dc9f..b66c6209d2 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/ParamSklearn/components/classification/libsvm_svc.py @@ -1,7 +1,6 @@ import resource import numpy as np -import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.conditions import EqualsCondition, InCondition @@ -75,6 +74,8 @@ def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, self.estimator = None def fit(self, X, Y): + import sklearn.svm + try: soft, hard = resource.getrlimit(resource.RLIMIT_AS) if soft > 0: diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/ParamSklearn/components/classification/multinomial_nb.py index 438513062d..57f1eb8f79 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/ParamSklearn/components/classification/multinomial_nb.py @@ -1,6 +1,4 @@ import numpy as np -import sklearn.naive_bayes -import scipy.sparse from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -31,6 +29,9 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + import scipy.sparse + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/passive_aggressive.py b/ParamSklearn/components/classification/passive_aggressive.py index 473e5d1a4f..c6d09f533e 100644 --- a/ParamSklearn/components/classification/passive_aggressive.py +++ b/ParamSklearn/components/classification/passive_aggressive.py @@ -1,5 +1,4 @@ import numpy as np -from sklearn.linear_model.passive_aggressive import PassiveAggressiveClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -28,6 +27,9 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.passive_aggressive import \ + PassiveAggressiveClassifier + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/proj_logit.py b/ParamSklearn/components/classification/proj_logit.py index 6c5277702c..12d9b3adad 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/ParamSklearn/components/classification/proj_logit.py @@ -10,7 +10,6 @@ from ParamSklearn.implementations import ProjLogit - class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): diff --git a/ParamSklearn/components/classification/qda.py b/ParamSklearn/components/classification/qda.py index fed8e7e73c..eca13ce664 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/ParamSklearn/components/classification/qda.py @@ -1,5 +1,3 @@ -import sklearn.qda - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter @@ -16,6 +14,8 @@ def __init__(self, reg_param, random_state=None): self.estimator = None def fit(self, X, Y): + import sklearn.qda + estimator = sklearn.qda.QDA(self.reg_param) if len(Y.shape) == 2 and Y.shape[1] > 1: diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 49ca2b5857..62027ed2d4 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -1,5 +1,4 @@ import numpy as np -from sklearn.ensemble import RandomForestClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -42,6 +41,8 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + from sklearn.ensemble import RandomForestClassifier + if refit: self.estimator = None diff --git a/ParamSklearn/components/classification/sgd.py b/ParamSklearn/components/classification/sgd.py index 740ff3c051..065cab51a0 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/ParamSklearn/components/classification/sgd.py @@ -1,5 +1,3 @@ -from sklearn.linear_model.stochastic_gradient import SGDClassifier - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, UnParametrizedHyperparameter, \ @@ -37,6 +35,8 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.stochastic_gradient import SGDClassifier + if refit: self.estimator = None diff --git a/ParamSklearn/components/data_preprocessing/imputation.py b/ParamSklearn/components/data_preprocessing/imputation.py index e8ab45185c..848142785b 100644 --- a/ParamSklearn/components/data_preprocessing/imputation.py +++ b/ParamSklearn/components/data_preprocessing/imputation.py @@ -1,6 +1,3 @@ - -import sklearn.preprocessing - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter @@ -14,6 +11,8 @@ def __init__(self, strategy, random_state=None): self.strategy = strategy def fit(self, X, y=None): + import sklearn.preprocessing + self.preprocessor = sklearn.preprocessing.Imputer( strategy=self.strategy, copy=False) self.preprocessor = self.preprocessor.fit(X) diff --git a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py index cf594da1e4..98ccd839a0 100644 --- a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py +++ b/ParamSklearn/components/data_preprocessing/one_hot_encoding.py @@ -1,5 +1,4 @@ import numpy as np -import scipy.sparse import ParamSklearn.implementations.OneHotEncoder @@ -40,6 +39,8 @@ def fit(self, X, y=None): return self def transform(self, X): + import scipy.sparse + is_sparse = scipy.sparse.issparse(X) if self.preprocessor is None: raise NotImplementedError() diff --git a/ParamSklearn/components/data_preprocessing/rescaling.py b/ParamSklearn/components/data_preprocessing/rescaling.py index 537e349ea2..3f47cdc45b 100644 --- a/ParamSklearn/components/data_preprocessing/rescaling.py +++ b/ParamSklearn/components/data_preprocessing/rescaling.py @@ -5,10 +5,6 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction -from ParamSklearn.implementations.StandardScaler import StandardScaler -from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler -from ParamSklearn.implementations.Normalizer import Normalizer -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.constants import * @@ -62,6 +58,7 @@ def get_properties(dataset_properties=None): class MinMaxScalerComponent(Rescaling): def __init__(self, random_state): + from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler self.preprocessor = MinMaxScaler() @staticmethod @@ -88,6 +85,7 @@ def get_properties(dataset_properties=None): class StandardScalerComponent(Rescaling): def __init__(self, random_state): + from ParamSklearn.implementations.StandardScaler import StandardScaler self.preprocessor = StandardScaler() @staticmethod @@ -114,6 +112,7 @@ def get_properties(dataset_properties=None): class NormalizerComponent(Rescaling): def __init__(self, random_state): + from ParamSklearn.implementations.Normalizer import Normalizer self.preprocessor = Normalizer() @staticmethod diff --git a/ParamSklearn/components/feature_preprocessing/densifier.py b/ParamSklearn/components/feature_preprocessing/densifier.py index 7446a37df5..8f83bd4db1 100644 --- a/ParamSklearn/components/feature_preprocessing/densifier.py +++ b/ParamSklearn/components/feature_preprocessing/densifier.py @@ -1,5 +1,3 @@ -from scipy import sparse - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from ParamSklearn.components.base import \ @@ -15,6 +13,7 @@ def fit(self, X, y=None): return self def transform(self, X): + from scipy import sparse if sparse.issparse(X): return X.todense().getA() else: diff --git a/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py b/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py index fdfc34ba9b..5d8dcc62bc 100644 --- a/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py +++ b/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py @@ -1,5 +1,4 @@ import numpy as np -from sklearn.ensemble import ExtraTreesClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -62,6 +61,8 @@ def __init__(self, n_estimators, criterion, min_samples_leaf, self.preprocessor = None def fit(self, X, Y, sample_weight=None): + from sklearn.ensemble import ExtraTreesClassifier + num_features = X.shape[1] max_features = int( float(self.max_features) * (np.log(num_features) + 1)) diff --git a/ParamSklearn/components/feature_preprocessing/fast_ica.py b/ParamSklearn/components/feature_preprocessing/fast_ica.py index 86b7538df3..4b88bf14a8 100644 --- a/ParamSklearn/components/feature_preprocessing/fast_ica.py +++ b/ParamSklearn/components/feature_preprocessing/fast_ica.py @@ -1,7 +1,5 @@ import warnings -import sklearn.decomposition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ UniformIntegerHyperparameter @@ -22,6 +20,8 @@ def __init__(self, algorithm, whiten, fun, n_components=None, self.random_state = random_state def fit(self, X, Y=None): + import sklearn.decomposition + self.preprocessor = sklearn.decomposition.FastICA( n_components=self.n_components, algorithm=self.algorithm, fun=self.fun, whiten=self.whiten, random_state=self.random_state diff --git a/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py b/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py index 770c749428..b8b8ca0990 100644 --- a/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py +++ b/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.cluster from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ @@ -26,6 +25,8 @@ def __init__(self, n_clusters, affinity, linkage, pooling_func, max=np.max) def fit(self, X, Y=None): + import sklearn.cluster + n_clusters = min(self.n_clusters, X.shape[1]) if not callable(self.pooling_func): self.pooling_func = self.pooling_func_mapping[self.pooling_func] diff --git a/ParamSklearn/components/feature_preprocessing/kernel_pca.py b/ParamSklearn/components/feature_preprocessing/kernel_pca.py index bc2004cdc1..5df1a0a745 100644 --- a/ParamSklearn/components/feature_preprocessing/kernel_pca.py +++ b/ParamSklearn/components/feature_preprocessing/kernel_pca.py @@ -1,8 +1,6 @@ import warnings import numpy as np -import scipy.sparse -import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ @@ -25,6 +23,9 @@ def __init__(self, n_components, kernel, degree=3, gamma=0.25, coef0=0.0, self.random_state = random_state def fit(self, X, Y=None): + import scipy.sparse + import sklearn.decomposition + self.preprocessor = sklearn.decomposition.KernelPCA( n_components=self.n_components, kernel=self.kernel, degree=self.degree, gamma=self.gamma, coef0=self.coef0, diff --git a/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py b/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py index 2b31182771..582d0542aa 100644 --- a/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py +++ b/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py @@ -1,5 +1,3 @@ -import sklearn.kernel_approximation - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter @@ -22,7 +20,10 @@ def __init__(self, gamma, n_components, random_state=None): self.random_state = random_state def fit(self, X, Y=None): - self.preprocessor = sklearn.kernel_approximation.RBFSampler(self.gamma, self.n_components, self.random_state) + import sklearn.kernel_approximation + + self.preprocessor = sklearn.kernel_approximation.RBFSampler( + self.gamma, self.n_components, self.random_state) self.preprocessor.fit(X) return self diff --git a/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py b/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py index badb6b0f5f..7620b78509 100644 --- a/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py +++ b/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py @@ -1,5 +1,3 @@ -import sklearn.svm - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant, UnParametrizedHyperparameter @@ -29,6 +27,8 @@ def __init__(self, penalty, loss, dual, tol, C, multi_class, self.preprocessor = None def fit(self, X, Y): + import sklearn.svm + self.C = float(self.C) self.tol = float(self.tol) diff --git a/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py b/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py index 9764569520..6335b1ec18 100644 --- a/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py +++ b/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py @@ -1,6 +1,4 @@ import numpy as np -import scipy.sparse -import sklearn.kernel_approximation from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -23,6 +21,9 @@ def __init__(self, kernel, n_components, gamma=1.0, degree=3, self.random_state = random_state def fit(self, X, Y=None): + import scipy.sparse + import sklearn.kernel_approximation + self.preprocessor = sklearn.kernel_approximation.Nystroem( kernel=self.kernel, n_components=self.n_components, gamma=self.gamma, degree=self.degree, coef0=self.coef0, @@ -40,6 +41,8 @@ def fit(self, X, Y=None): return self def transform(self, X): + import scipy.sparse + # Because the pipeline guarantees that each feature is positive, # clip all values below zero to zero if self.kernel == 'chi2': diff --git a/ParamSklearn/components/feature_preprocessing/pca.py b/ParamSklearn/components/feature_preprocessing/pca.py index d1fed4d1a5..71252e7fd6 100644 --- a/ParamSklearn/components/feature_preprocessing/pca.py +++ b/ParamSklearn/components/feature_preprocessing/pca.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.decomposition from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -16,6 +15,7 @@ def __init__(self, keep_variance, whiten, random_state=None): self.random_state = random_state def fit(self, X, Y=None): + import sklearn.decomposition n_components = float(self.keep_variance) self.preprocessor = sklearn.decomposition.PCA(n_components=n_components, whiten=self.whiten, diff --git a/ParamSklearn/components/feature_preprocessing/polynomial.py b/ParamSklearn/components/feature_preprocessing/polynomial.py index 08aa0cf7a2..3c47e19025 100644 --- a/ParamSklearn/components/feature_preprocessing/polynomial.py +++ b/ParamSklearn/components/feature_preprocessing/polynomial.py @@ -1,5 +1,3 @@ -import sklearn.preprocessing - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ UniformIntegerHyperparameter @@ -18,6 +16,8 @@ def __init__(self, degree, interaction_only, include_bias, random_state=None): self.preprocessor = None def fit(self, X, Y): + import sklearn.preprocessing + self.preprocessor = sklearn.preprocessing.PolynomialFeatures( degree=self.degree, interaction_only=self.interaction_only, include_bias=self.include_bias) diff --git a/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py b/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py index 5ae095798b..b86edcecb2 100644 --- a/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py +++ b/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py @@ -1,5 +1,3 @@ -import sklearn.ensemble - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ UnParametrizedHyperparameter, Constant @@ -24,6 +22,8 @@ def __init__(self, n_estimators, max_depth, min_samples_split, self.random_state = random_state def fit(self, X, Y=None): + import sklearn.ensemble + if self.max_depth == "None": self.max_depth = None else: diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile.py b/ParamSklearn/components/feature_preprocessing/select_percentile.py index cf19ab39c3..c928e2f471 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile.py @@ -1,9 +1,8 @@ -import sklearn.feature_selection - - class SelectPercentileBase(object): def fit(self, X, y): + import sklearn.feature_selection + self.preprocessor = sklearn.feature_selection.SelectPercentile( score_func=self.score_func, percentile=self.percentile) diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py b/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py index 510dca9be7..a7190839a8 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py @@ -1,14 +1,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, CategoricalHyperparameter, Constant -import sklearn.feature_selection - from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.constants import * -import scipy.sparse - class SelectPercentileClassification(SelectPercentileBase, ParamSklearnPreprocessingAlgorithm): @@ -20,6 +16,8 @@ def __init__(self, percentile, score_func="chi2", random_state=None): score_func : callable, Function taking two arrays X and y, and returning a pair of arrays (scores, pvalues). """ + import sklearn.feature_selection + self.random_state = random_state # We don't use this self.percentile = int(float(percentile)) if score_func == "chi2": @@ -31,6 +29,9 @@ def __init__(self, percentile, score_func="chi2", random_state=None): "but is: %s" % score_func) def fit(self, X, y): + import scipy.sparse + import sklearn.feature_selection + self.preprocessor = sklearn.feature_selection.SelectPercentile( score_func=self.score_func, percentile=self.percentile) @@ -47,6 +48,9 @@ def fit(self, X, y): return self def transform(self, X): + import scipy.sparse + import sklearn.feature_selection + # Because the pipeline guarantees that each feature is positive, # clip all values below zero to zero if self.score_func == sklearn.feature_selection.chi2: diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py index a93381594f..41339235e7 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py +++ b/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py @@ -1,8 +1,6 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, UnParametrizedHyperparameter -import sklearn.feature_selection - from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase from ParamSklearn.constants import * @@ -18,6 +16,7 @@ def __init__(self, percentile, score_func="f_classif", random_state=None): score_func : callable, Function taking two arrays X and y, and returning a pair of arrays (scores, pvalues). """ + import sklearn.feature_selection self.random_state = random_state # We don't use this self.percentile = int(float(percentile)) diff --git a/ParamSklearn/components/feature_preprocessing/select_rates.py b/ParamSklearn/components/feature_preprocessing/select_rates.py index 837ef74163..4f6471a11e 100644 --- a/ParamSklearn/components/feature_preprocessing/select_rates.py +++ b/ParamSklearn/components/feature_preprocessing/select_rates.py @@ -2,9 +2,6 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant -import scipy.sparse -import sklearn.feature_selection - from ParamSklearn.components.base import \ ParamSklearnPreprocessingAlgorithm from ParamSklearn.constants import * @@ -13,6 +10,8 @@ class SelectRates(ParamSklearnPreprocessingAlgorithm): def __init__(self, alpha, mode='fpr', score_func="chi2", random_state=None): + import sklearn.feature_selection + self.random_state = random_state # We don't use this self.alpha = float(alpha) @@ -27,6 +26,9 @@ def __init__(self, alpha, mode='fpr', self.mode = mode def fit(self, X, y): + import scipy.sparse + import sklearn.feature_selection + self.preprocessor = sklearn.feature_selection.GenericUnivariateSelect( score_func=self.score_func, param=self.alpha, mode=self.mode) @@ -42,6 +44,9 @@ def fit(self, X, y): return self def transform(self, X): + import scipy.sparse + import sklearn.feature_selection + # Because the pipeline guarantees that each feature is positive, # clip all values below zero to zero if self.score_func == sklearn.feature_selection.chi2: diff --git a/ParamSklearn/components/feature_preprocessing/truncatedSVD.py b/ParamSklearn/components/feature_preprocessing/truncatedSVD.py index 3d9010d01c..d4c7ef4ff6 100644 --- a/ParamSklearn/components/feature_preprocessing/truncatedSVD.py +++ b/ParamSklearn/components/feature_preprocessing/truncatedSVD.py @@ -1,7 +1,5 @@ import numpy as np -import sklearn.decomposition - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter @@ -16,6 +14,8 @@ def __init__(self, target_dim, random_state=None): self.preprocessor = None def fit(self, X, Y): + import sklearn.decomposition + target_dim = min(self.target_dim, X.shape[1] - 1) self.preprocessor = sklearn.decomposition.TruncatedSVD( target_dim, algorithm='randomized') diff --git a/ParamSklearn/components/regression/adaboost.py b/ParamSklearn/components/regression/adaboost.py index cc7dd17f70..af4baee38f 100644 --- a/ParamSklearn/components/regression/adaboost.py +++ b/ParamSklearn/components/regression/adaboost.py @@ -1,6 +1,4 @@ import numpy as np -import sklearn.ensemble -import sklearn.tree from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -21,6 +19,9 @@ def __init__(self, n_estimators, learning_rate, loss, max_depth, self.estimator = None def fit(self, X, Y): + import sklearn.ensemble + import sklearn.tree + self.n_estimators = int(self.n_estimators) self.learning_rate = float(self.learning_rate) self.max_depth = int(self.max_depth) diff --git a/ParamSklearn/components/regression/decision_tree.py b/ParamSklearn/components/regression/decision_tree.py index d3113d9774..0b35718a34 100644 --- a/ParamSklearn/components/regression/decision_tree.py +++ b/ParamSklearn/components/regression/decision_tree.py @@ -8,8 +8,6 @@ from ParamSklearn.components.base import \ ParamSklearnRegressionAlgorithm from ParamSklearn.constants import * -# get our own forests to replace the sklearn ones -from sklearn.tree import DecisionTreeRegressor class DecisionTree(ParamSklearnRegressionAlgorithm): @@ -28,6 +26,8 @@ def __init__(self, criterion, splitter, max_features, max_depth, self.estimator = None def fit(self, X, y, sample_weight=None): + from sklearn.tree import DecisionTreeRegressor + self.max_features = float(self.max_features) if self.max_depth == "None": self.max_depth = None diff --git a/ParamSklearn/components/regression/extra_trees.py b/ParamSklearn/components/regression/extra_trees.py index 12380c080c..f1456f97de 100644 --- a/ParamSklearn/components/regression/extra_trees.py +++ b/ParamSklearn/components/regression/extra_trees.py @@ -8,8 +8,6 @@ from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.constants import * -from sklearn.ensemble import ExtraTreesRegressor as ETR - class ExtraTreesRegressor(ParamSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, @@ -67,6 +65,8 @@ def fit(self, X, y, refit=False): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.ensemble import ExtraTreesRegressor as ETR + if refit: self.estimator = None diff --git a/ParamSklearn/components/regression/gaussian_process.py b/ParamSklearn/components/regression/gaussian_process.py index 5088a7ca36..d984dafd48 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/ParamSklearn/components/regression/gaussian_process.py @@ -1,8 +1,5 @@ import numpy as np -import sklearn.gaussian_process -import sklearn.preprocessing - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter @@ -24,6 +21,9 @@ def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, self.scaler = None def fit(self, X, Y): + import sklearn.gaussian_process + import sklearn.preprocessing + # Instanciate a Gaussian Process model self.estimator = sklearn.gaussian_process.GaussianProcess( corr='squared_exponential', diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/ParamSklearn/components/regression/gradient_boosting.py index 766ba62dbb..5930714a87 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/ParamSklearn/components/regression/gradient_boosting.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.ensemble from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -43,6 +42,8 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + import sklearn.ensemble + # Special fix for gradient boosting! if isinstance(X, np.ndarray): X = np.ascontiguousarray(X, dtype=X.dtype) diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/ParamSklearn/components/regression/k_nearest_neighbors.py index 335fd66eaf..a597eacf5e 100644 --- a/ParamSklearn/components/regression/k_nearest_neighbors.py +++ b/ParamSklearn/components/regression/k_nearest_neighbors.py @@ -1,5 +1,3 @@ -import sklearn.neighbors - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ Constant, UniformIntegerHyperparameter @@ -16,6 +14,8 @@ def __init__(self, n_neighbors, weights, p, random_state=None): self.random_state = random_state def fit(self, X, Y): + import sklearn.neighbors + self.estimator = \ sklearn.neighbors.KNeighborsClassifier( n_neighbors=self.n_neighbors, diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/ParamSklearn/components/regression/liblinear_svr.py index 713199eb7b..6ed55736a2 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/ParamSklearn/components/regression/liblinear_svr.py @@ -1,5 +1,3 @@ -import sklearn.svm - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant @@ -25,6 +23,8 @@ def __init__(self, loss, epsilon, dual, tol, C, fit_intercept, self.estimator = None def fit(self, X, Y): + import sklearn.svm + self.C = float(self.C) self.tol = float(self.tol) self.epsilon = float(self.epsilon) diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/ParamSklearn/components/regression/libsvm_svr.py index a860fa3e3d..7aa53cc9a0 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/ParamSklearn/components/regression/libsvm_svr.py @@ -1,7 +1,6 @@ import resource import numpy as np -import sklearn.svm from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.conditions import InCondition @@ -31,6 +30,8 @@ def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, self.estimator = None def fit(self, X, Y): + import sklearn.svm + try: soft, hard = resource.getrlimit(resource.RLIMIT_AS) if soft > 0: diff --git a/ParamSklearn/components/regression/random_forest.py b/ParamSklearn/components/regression/random_forest.py index 505ce0108a..6a22cd2949 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/ParamSklearn/components/regression/random_forest.py @@ -7,9 +7,6 @@ from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm from ParamSklearn.constants import * -# get our own forests to replace the sklearn ones -#from ParamSklearn.implementations import forest -from sklearn.ensemble import RandomForestRegressor class RandomForest(ParamSklearnRegressionAlgorithm): @@ -40,6 +37,8 @@ def fit(self, X, y, sample_weight=None, refit=False): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.ensemble import RandomForestRegressor + if refit: self.estimator = None diff --git a/ParamSklearn/components/regression/ridge_regression.py b/ParamSklearn/components/regression/ridge_regression.py index e65f1ced8b..2080184182 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/ParamSklearn/components/regression/ridge_regression.py @@ -1,5 +1,4 @@ import numpy as np -import sklearn.linear_model from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ @@ -18,6 +17,7 @@ def __init__(self, alpha, fit_intercept, tol, random_state=None): self.estimator = None def fit(self, X, Y): + import sklearn.linear_model self.estimator = sklearn.linear_model.Ridge(alpha=self.alpha, fit_intercept=self.fit_intercept, tol=self.tol, diff --git a/ParamSklearn/components/regression/sgd.py b/ParamSklearn/components/regression/sgd.py index a9dbb56753..749bd0a21b 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/ParamSklearn/components/regression/sgd.py @@ -1,6 +1,3 @@ -from sklearn.linear_model.stochastic_gradient import SGDRegressor -import sklearn.preprocessing - from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, UnParametrizedHyperparameter, \ @@ -38,6 +35,9 @@ def fit(self, X, y): return self def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.stochastic_gradient import SGDRegressor + import sklearn.preprocessing + if refit: self.estimator = None self.scaler = None diff --git a/ParamSklearn/regression.py b/ParamSklearn/regression.py index 21280d5115..8a7704c4fb 100644 --- a/ParamSklearn/regression.py +++ b/ParamSklearn/regression.py @@ -7,7 +7,6 @@ raise ValueError("ParamSklearn supports only sklearn version 0.16.1, " "you installed %s." % sklearn.__version__) from sklearn.base import RegressorMixin -import numpy as np from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from HPOlibConfigSpace.configuration_space import ConfigurationSpace @@ -15,7 +14,6 @@ from ParamSklearn import components as components from ParamSklearn.base import ParamSklearnBaseEstimator from ParamSklearn.constants import SPARSE -import ParamSklearn.create_searchspace_util class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): diff --git a/tests/components/regression/test_sgd.py b/tests/components/regression/test_sgd.py index 81845b8d48..4c40a467d0 100644 --- a/tests/components/regression/test_sgd.py +++ b/tests/components/regression/test_sgd.py @@ -18,20 +18,5 @@ def test_default_configuration_iterative_fit(self): for i in range(10): predictions, targets = _test_regressor_iterative_fit(SGD) self.assertAlmostEqual(0.092460881802630235, - sklearn.metrics.r2_score(y_true=targets, - y_pred=predictions)) - - def test_default_configuration_digits(self): - for i in range(10): - predictions, targets = _test_regressor(SGD, dataset='boston') - self.assertAlmostEqual(-2.9165866511775523e+31, - sklearn.metrics.r2_score(y_true=targets, - y_pred=predictions)) - - def test_default_configuration_digits_iterative_fit(self): - for i in range(10): - predictions, targets = _test_regressor_iterative_fit(SGD, - dataset='boston') - self.assertAlmostEqual(-2.9165866511775523e+31, sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) \ No newline at end of file From 32c398b65a0368ad4bfefbbcda68c221e0de6f7f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 26 Nov 2015 11:45:41 +0100 Subject: [PATCH 340/352] Don't test python 3.5; not supported right now --- .travis.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index bd56d73187..40271ddda1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,6 @@ language: python python: - "2.7" - "3.4" - - "3.5" cache: directories: @@ -44,4 +43,4 @@ script: - coverage run --source autosklearn setup.py test -after_success: coveralls \ No newline at end of file +after_success: coveralls From 2f680e42e3b829c045751f69fd229f968a802855 Mon Sep 17 00:00:00 2001 From: Katharina Eggensperger Date: Fri, 27 Nov 2015 10:03:45 +0100 Subject: [PATCH 341/352] FIX test; lower bounds --- test/util/test_StopWatch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/util/test_StopWatch.py b/test/util/test_StopWatch.py index 8a6e2e7145..449f1880de 100644 --- a/test/util/test_StopWatch.py +++ b/test/util/test_StopWatch.py @@ -39,7 +39,7 @@ def test_stopwatch_overhead(self): wall_overhead = dur - watch.wall_sum() self.assertLess(wall_overhead, 2) - self.assertLess(cpu_overhead, wall_overhead) + self.assertLess(cpu_overhead, 1.2*wall_overhead) if __name__ == '__main__': From ebb78bfa175156aabb6f7dde869eb1fe142037a6 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 1 Dec 2015 17:13:12 +0100 Subject: [PATCH 342/352] Fix bug pickling suprocess.Popen in python3 did not work --- autosklearn/util/submit_process.py | 7 ++++--- test/automl/test_start_automl.py | 25 ++++++++++++++++++++++--- 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/autosklearn/util/submit_process.py b/autosklearn/util/submit_process.py index f4a3dbe74b..dbffd7b1b8 100644 --- a/autosklearn/util/submit_process.py +++ b/autosklearn/util/submit_process.py @@ -4,9 +4,8 @@ import shlex import subprocess -import lockfile +import psutil -import autosklearn from autosklearn.constants import * from autosklearn.util import logging_ as logging @@ -35,7 +34,9 @@ def submit_call(call, seed, logger, log_dir=None): logger.critical('Problem starting subprocess, see error message ' 'above. PATH is %s' % os.environ['PATH']) - return proc + pid = proc.pid + process = psutil.Process(pid) + return process def run_ensemble_builder(tmp_dir, dataset_name, task_type, metric, limit, diff --git a/test/automl/test_start_automl.py b/test/automl/test_start_automl.py index ada9a21faf..b2f0b44e84 100644 --- a/test/automl/test_start_automl.py +++ b/test/automl/test_start_automl.py @@ -6,12 +6,10 @@ import sys import time -import mock import numpy as np import six import autosklearn.automl -from autosklearn.util import Backend import ParamSklearn.util as putil from autosklearn.constants import * from autosklearn.cli.base_interface import store_and_or_load_data @@ -41,44 +39,65 @@ def test_fit(self): self._tearDown(output) def test_automl_outputs(self): + print("1.") output = os.path.join(self.test_dir, '..', '.tmp_test_automl_outputs') + print("2.") self._setUp(output) - + print("3.") name = '31_bac' + print("4.") dataset = os.path.join(self.test_dir, '..', '.data', name) + print("5.") data_manager_file = os.path.join(output, '.auto-sklearn', 'datamanager.pkl') + print("6.") queue = multiprocessing.Queue() + print("7.") auto = autosklearn.automl.AutoML( output, output, 15, 15, initial_configurations_via_metalearning=25, queue=queue, seed=100) + print("8.") auto.fit_automl_dataset(dataset) + print("9.") # pickled data manager (without one hot encoding!) with open(data_manager_file, 'rb') as fh: + print("10.") D = six.moves.cPickle.load(fh) + print("11.") self.assertTrue(np.allclose(D.data['X_train'][0, :3], [1., 12., 2.])) + print("12.") + print("13.") time_needed_to_load_data, data_manager_file, procs = \ queue.get() + print("14.") for proc in procs: proc.wait() + print("15.") # Start time print(os.listdir(os.path.join(output, '.auto-sklearn'))) + print("16.") start_time_file_path = os.path.join(output, '.auto-sklearn', "start_time_100") + print("17.") with open(start_time_file_path, 'r') as fh: + print("18.") start_time = float(fh.read()) + print("19.") self.assertGreaterEqual(time.time() - start_time, 10) + print("20.") del auto + print("21.") self._tearDown(output) + print("22.") def test_do_dummy_prediction(self): output = os.path.join(self.test_dir, '..', From 702d15dd48be58f5c2ec434737b08a673e1651fb Mon Sep 17 00:00:00 2001 From: hmendozap Date: Wed, 2 Dec 2015 16:45:04 +0100 Subject: [PATCH 343/352] Fix binary classification score and added unittest for it --- autosklearn/automl.py | 6 +++++- test/automl/test_start_automl.py | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/autosklearn/automl.py b/autosklearn/automl.py index 6103a11e95..7f358ce3e3 100644 --- a/autosklearn/automl.py +++ b/autosklearn/automl.py @@ -606,7 +606,11 @@ def _load_models(self): seed) def score(self, X, y): + # fix: Consider only index 1 of second dimension + # Don't know if the reshaping should be done there or in calculate_score prediction = self.predict(X) + if self._task == BINARY_CLASSIFICATION: + prediction = prediction[:, 1].reshape((-1, 1)) return calculate_score(y, prediction, self._task, self._metric, self._label_num, logger=self._logger) @@ -687,4 +691,4 @@ def _delete_output_directories(self): pass else: print("Could not delete tmp dir: %s" % - self._tmp_dir) \ No newline at end of file + self._tmp_dir) diff --git a/test/automl/test_start_automl.py b/test/automl/test_start_automl.py index ada9a21faf..cbcba020b6 100644 --- a/test/automl/test_start_automl.py +++ b/test/automl/test_start_automl.py @@ -40,6 +40,38 @@ def test_fit(self): del automl self._tearDown(output) + def test_binary_score(self): + """ + Test fix for binary classification prediction + taking the index 1 of second dimension in prediction matrix + """ + if self.travis: + self.skipTest('This test does currently not run on travis-ci. ' + 'Make sure it runs locally on your machine!') + + output = os.path.join(self.test_dir, '..', '.tmp_test_fit') + self._setUp(output) + + # Had to use this dummy dataset because + # I cannot find a way to efficiently load a binary dataset + # without changing files in paramsklearn or automl class + + X_train = np.random.rand(100, 20) + Y_train = np.random.randint(0, 2, 100) + + automl = autosklearn.automl.AutoML(output, output, 30, 15) + automl.fit(X_train, Y_train, task=BINARY_CLASSIFICATION) + self.assertEqual(automl._task, BINARY_CLASSIFICATION) + + X_test = np.random.rand(50, 20) + Y_test = np.random.randint(0, 2, 50) + + score = automl.score(X_test, Y_test) + self.assertGreaterEqual(score, 0.0) + + del automl + self._tearDown(output) + def test_automl_outputs(self): output = os.path.join(self.test_dir, '..', '.tmp_test_automl_outputs') From 57fce06db553c33eb12e6001a69257bc5c61826e Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 3 Dec 2015 19:48:24 +0100 Subject: [PATCH 344/352] FIX could not use relative path for tmp_folder and output_folder --- autosklearn/ensemble_selection_script.py | 30 +++++++++++++------ autosklearn/util/backend.py | 2 +- .../03_autosklearn_retrieve_metadata.py | 4 +-- .../04_autosklearn_calculate_metafeatures.py | 4 +-- 4 files changed, 26 insertions(+), 14 deletions(-) diff --git a/autosklearn/ensemble_selection_script.py b/autosklearn/ensemble_selection_script.py index b6bed5987f..1488729967 100644 --- a/autosklearn/ensemble_selection_script.py +++ b/autosklearn/ensemble_selection_script.py @@ -58,15 +58,20 @@ def get_predictions(dir_path, dir_path_list, include_num_runs, match = model_and_automl_re.search(model_name) automl_seed = int(match.group(1)) num_run = int(match.group(2)) + + if model_name.endswith("/"): + model_name = model_name[:-1] + basename = os.path.basename(model_name) + if (automl_seed, num_run) in include_num_runs: if precision == "16": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float16) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float16) elif precision == "32": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float32) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float32) elif precision == "64": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float64) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float64) else: - predictions = np.load(os.path.join(dir_path, model_name)) + predictions = np.load(os.path.join(dir_path, basename)) result.append(predictions) return result @@ -249,7 +254,10 @@ def main(autosklearn_tmp_dir, dir_ensemble_list_mtimes = [] for dir_ensemble_file in dir_ensemble_list: - dir_ensemble_file = os.path.join(dir_ensemble, dir_ensemble_file) + if dir_ensemble_file.endswith("/"): + dir_ensemble_file = dir_ensemble_file[:-1] + basename = os.path.basename(dir_ensemble_file) + dir_ensemble_file = os.path.join(dir_ensemble, basename) mtime = os.path.getmtime(dir_ensemble_file) dir_ensemble_list_mtimes.append(mtime) @@ -285,14 +293,18 @@ def main(autosklearn_tmp_dir, model_idx = 0 for model_name in dir_ensemble_list: + if model_name.endswith("/"): + model_name = model_name[:-1] + basename = os.path.basename(model_name) + if precision is "16": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float16) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float16) elif precision is "32": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float32) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float32) elif precision is "64": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float64) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float64) else: - predictions = np.load(os.path.join(dir_ensemble, model_name)) + predictions = np.load(os.path.join(dir_ensemble, basename)) score = calculate_score(targets_ensemble, predictions, task_type, metric, predictions.shape[1]) diff --git a/autosklearn/util/backend.py b/autosklearn/util/backend.py index 4bd9456682..585eb4385f 100644 --- a/autosklearn/util/backend.py +++ b/autosklearn/util/backend.py @@ -161,7 +161,7 @@ def load_all_models(self, seed): basename = os.path.basename(model_file) automl_seed = int(basename.split('.')[0]) idx = int(basename.split('.')[1]) - with open(os.path.join(model_directory, model_file), 'rb') as fh: + with open(os.path.join(model_directory, basename), 'rb') as fh: models[(automl_seed, idx)] = (pickle.load(fh)) return models diff --git a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py index 90cc65fce5..fccdb90469 100644 --- a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py +++ b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py @@ -165,9 +165,9 @@ def retrieve_matadata(validation_directory, metric, configuration_space, configuration = Configuration( configuration_space, configuration) except Exception as e: - print "Configuration %s not applicable " \ + print("Configuration %s not applicable " \ "because of %s!" \ - % (row[1], e) + % (row[1], e)) break if str(configuration) in \ diff --git a/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py b/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py index 95235f8aa5..419bb96877 100644 --- a/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py +++ b/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py @@ -23,12 +23,12 @@ def calculate_metafeatures(dataset, output_dir, cache_only): mf = DatasetMetafeatures.load(fh) elif cache_only is False: - print dataset_name + print(dataset_name) call = "python -m autosklearn.metalearning.calculate_metafeatures " \ "--data-format automl-competition-format --dataset %s " \ "--output-dir %s" % (dataset, output_dir) - print call + print(call) retval = subprocess.call(call, shell=True) if retval != 0: From c007b94a6d60ccac6f57960badd7d59144196668 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 9 Dec 2015 13:35:23 -0500 Subject: [PATCH 345/352] Update requ.txt --- requ.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requ.txt b/requ.txt index 8f3d335341..f93fdc29e1 100644 --- a/requ.txt +++ b/requ.txt @@ -2,7 +2,7 @@ setuptools mock nose -numpy>=0.16.0 +numpy>=1.9.0 scipy>=0.14.1 scikit-learn==0.16.1 From 7b914352a1ee47b009b67a385ea4f39a69c2168f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 15 Dec 2015 14:37:25 +0100 Subject: [PATCH 346/352] Use development branch of ParamSklearn --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 773dcd913d..dd5d2755a2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,7 +39,7 @@ install: # Install requirements from other repos - pip install git+https://github.com/automl/HPOlibConfigSpace.git - - pip install git+https://github.com/automl/paramsklearn.git + - pip install git+https://github.com/automl/paramsklearn.git@development - python setup.py install script: From 4d965905b8fda7f15f7afeda6030886f5096b13f Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 15 Dec 2015 14:46:07 +0100 Subject: [PATCH 347/352] Update balancing test --- .../classification/random_forest.py | 2 - .../data_preprocessing/test_balancing.py | 67 +++++++++++++------ 2 files changed, 46 insertions(+), 23 deletions(-) diff --git a/ParamSklearn/components/classification/random_forest.py b/ParamSklearn/components/classification/random_forest.py index 62027ed2d4..9c56408f43 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/ParamSklearn/components/classification/random_forest.py @@ -7,8 +7,6 @@ from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm from ParamSklearn.constants import * -# get our own forests to replace the sklearn ones -#from ParamSklearn.implementations import forest class RandomForest(ParamSklearnClassificationAlgorithm): diff --git a/tests/components/data_preprocessing/test_balancing.py b/tests/components/data_preprocessing/test_balancing.py index 9b6af91d76..ffc7b61af9 100644 --- a/tests/components/data_preprocessing/test_balancing.py +++ b/tests/components/data_preprocessing/test_balancing.py @@ -1,8 +1,10 @@ __author__ = 'feurerm' +import copy import unittest import numpy as np +import sklearn.datasets import sklearn.metrics from ParamSklearn.components.data_preprocessing.balancing import Balancing @@ -18,8 +20,6 @@ from ParamSklearn.components.feature_preprocessing\ .extra_trees_preproc_for_classification import ExtraTreesPreprocessor from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor -from ParamSklearn.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding -from ParamSklearn.util import get_dataset class BalancingComponentTest(unittest.TestCase): @@ -61,19 +61,30 @@ def test_balancing_get_weights_svm_sgd(self): list(init_params.items())[0]) def test_weighting_effect(self): + data = sklearn.datasets.make_classification( + n_samples=1000, n_features=20, n_redundant=5, n_informative=5, + n_repeated=2, n_clusters_per_class=2, weights=[0.8, 0.2], + random_state=1) + for name, clf, acc_no_weighting, acc_weighting in \ - [('adaboost', AdaboostClassifier, 0.692, 0.719), - ('decision_tree', DecisionTree, 0.712, 0.668), - ('extra_trees', ExtraTreesClassifier, 0.901, 0.919), - ('gradient_boosting', GradientBoostingClassifier, 0.879, 0.883), - ('random_forest', RandomForest, 0.886, 0.885), - ('libsvm_svc', LibSVM_SVC, 0.915, 0.937), - ('liblinear_svc', LibLinear_SVC, 0.920, 0.923), - ('sgd', SGD, 0.908, 0.901)]: + [('adaboost', AdaboostClassifier, 0.709, 0.662), + ('decision_tree', DecisionTree, 0.683, 0.726), + ('extra_trees', ExtraTreesClassifier, 0.812, 0.812), + ('gradient_boosting', GradientBoostingClassifier, + 0.800, 0.760), + ('random_forest', RandomForest, 0.849, 0.780), + ('libsvm_svc', LibSVM_SVC, 0.571, 0.658), + ('liblinear_svc', LibLinear_SVC, 0.685, 0.699), + ('sgd', SGD, 0.602, 0.720)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: # Fit - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include={'classifier': [name]}) default = cs.get_default_configuration() @@ -82,11 +93,16 @@ def test_weighting_effect(self): predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, - sklearn.metrics.accuracy_score(predictions, Y_test), + sklearn.metrics.f1_score(predictions, Y_test), places=3) # pre_transform and fit_estimator - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include={'classifier': [name]}) default = cs.get_default_configuration() @@ -96,19 +112,23 @@ def test_weighting_effect(self): classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) predictions = classifier.predict(X_test) self.assertAlmostEqual(acc, - sklearn.metrics.accuracy_score( + sklearn.metrics.f1_score( predictions, Y_test), places=3) for name, pre, acc_no_weighting, acc_weighting in \ [('extra_trees_preproc_for_classification', - ExtraTreesPreprocessor, 0.911, 0.902), - ('liblinear_svc_preprocessor', LibLinear_Preprocessor, - 0.893, 0.894)]: + ExtraTreesPreprocessor, 0.682, 0.634), + ('liblinear_svc_preprocessor', LibLinear_Preprocessor, + 0.714, 0.596)]: for strategy, acc in [('none', acc_no_weighting), ('weighting', acc_weighting)]: + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cs = ParamSklearnClassifier.get_hyperparameter_search_space( include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() @@ -117,12 +137,17 @@ def test_weighting_effect(self): predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, - sklearn.metrics.accuracy_score( + sklearn.metrics.f1_score( predictions, Y_test), places=3) # pre_transform and fit_estimator - X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + cs = ParamSklearnClassifier.get_hyperparameter_search_space( include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() @@ -132,6 +157,6 @@ def test_weighting_effect(self): classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) predictions = classifier.predict(X_test) self.assertAlmostEqual(acc, - sklearn.metrics.accuracy_score( + sklearn.metrics.f1_score( predictions, Y_test), places=3) \ No newline at end of file From 9521534d1e2574a6b350cf41088bf7648b6ade80 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Tue, 15 Dec 2015 15:03:19 +0100 Subject: [PATCH 348/352] Only output coverage for auto-sklearn --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index dd5d2755a2..43d10ab58f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,6 +44,6 @@ install: script: # - coverage run --source autosklearn setup.py test - - cd test && nosetests -v --with-coverage + - cd test && nosetests -v --with-coverage --cover-package=autosklearn after_success: coveralls \ No newline at end of file From b0623a5967eb4d1708886edcbee4ed0dc34a57b4 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Wed, 16 Dec 2015 16:35:09 +0100 Subject: [PATCH 349/352] Refactor: integrate ParamSklearn in auto-sklearn repo --- ParamSklearn/__init__.py | 8 - .../components/feature_preprocessing/tfidf.py | 66 -- ParamSklearn/textclassification.py | 36 - autosklearn/automl.py | 4 +- autosklearn/cli/base_interface.py | 2 +- autosklearn/data/abstract_data_manager.py | 2 +- autosklearn/evaluation/abstract_evaluator.py | 8 +- .../metalearning/metafeatures/metafeatures.py | 6 +- .../pipeline}/__init__.py | 0 .../pipeline}/base.py | 24 +- .../pipeline}/classification.py | 20 +- .../pipeline}/components/__init__.py | 25 +- .../pipeline}/components/base.py | 24 +- .../components/classification/__init__.py | 4 +- .../components/classification/adaboost.py | 9 +- .../components/classification/bernoulli_nb.py | 6 +- .../classification/decision_tree.py | 8 +- .../components/classification/extra_trees.py | 6 +- .../components/classification/gaussian_nb.py | 6 +- .../classification/gradient_boosting.py | 6 +- .../classification/k_nearest_neighbors.py | 6 +- .../components/classification/lda.py | 10 +- .../classification/liblinear_svc.py | 8 +- .../components/classification/libsvm_svc.py | 8 +- .../classification/multinomial_nb.py | 6 +- .../classification/passive_aggressive.py | 10 +- .../components/classification/proj_logit.py | 8 +- .../components/classification/qda.py | 10 +- .../classification/random_forest.py | 6 +- .../components/classification/sgd.py | 10 +- .../components/data_preprocessing/__init__.py | 4 +- .../data_preprocessing/balancing.py | 11 +- .../data_preprocessing/imputation.py | 10 +- .../data_preprocessing/one_hot_encoding.py | 14 +- .../data_preprocessing/rescaling.py | 10 +- .../feature_preprocessing/__init__.py | 4 +- .../feature_preprocessing/densifier.py | 12 +- .../extra_trees_preproc_for_classification.py | 8 +- .../feature_preprocessing/fast_ica.py | 8 +- .../feature_agglomeration.py | 8 +- .../components/feature_preprocessing/gem.py | 13 +- .../feature_preprocessing/kernel_pca.py | 8 +- .../feature_preprocessing/kitchen_sinks.py | 11 +- .../liblinear_svc_preprocessor.py | 8 +- .../feature_preprocessing/no_preprocessing.py | 11 +- .../feature_preprocessing/nystroem_sampler.py | 13 +- .../components/feature_preprocessing/pca.py | 10 +- .../feature_preprocessing/polynomial.py | 8 +- .../random_trees_embedding.py | 10 +- .../select_percentile.py | 0 .../select_percentile_classification.py | 13 +- .../select_percentile_regression.py | 13 +- .../feature_preprocessing/select_rates.py | 13 +- .../feature_preprocessing/truncatedSVD.py | 10 +- .../components/regression/__init__.py | 4 +- .../components/regression/adaboost.py | 6 +- .../components/regression/decision_tree.py | 8 +- .../components/regression/extra_trees.py | 6 +- .../components/regression/gaussian_process.py | 6 +- .../regression/gradient_boosting.py | 6 +- .../regression/k_nearest_neighbors.py | 6 +- .../components/regression/liblinear_svr.py | 6 +- .../components/regression/libsvm_svr.py | 6 +- .../components/regression/random_forest.py | 6 +- .../components/regression/ridge_regression.py | 9 +- .../pipeline}/components/regression/sgd.py | 9 +- .../pipeline}/constants.py | 0 .../pipeline}/create_searchspace_util.py | 2 +- .../pipeline}/implementations/Imputation.py | 0 .../pipeline}/implementations/MinMaxScaler.py | 0 .../implementations/MultilabelClassifier.py | 0 .../pipeline}/implementations/Normalizer.py | 0 .../implementations/OneHotEncoder.py | 0 .../pipeline}/implementations/ProjLogit.py | 0 .../implementations/StandardScaler.py | 0 .../pipeline}/implementations/__init__.py | 0 .../pipeline}/implementations/gem.py | 0 .../pipeline}/implementations/util.py | 0 .../pipeline}/regression.py | 22 +- .../pipeline}/util.py | 0 .../util/{paramsklearn.py => pipeline.py} | 16 +- misc/create_hyperparameter_table.py | 14 +- misc/random_sampling.py | 24 - requ.txt | 1 - .../03_autosklearn_retrieve_metadata.py | 4 +- source/conf.py | 14 +- source/first_steps.rst | 6 +- source/index.rst | 4 +- test/automl/base.py | 3 + test/automl/test_estimators.py | 2 +- test/automl/test_start_automl.py | 47 +- test/evaluation/test_cv_evaluator.py | 4 +- test/evaluation/test_holdout_evaluator.py | 4 +- test/evaluation/test_nested_cv_evaluator.py | 4 +- .../pyMetaLearn/test_meta_base.py | 5 +- .../pyMetaLearn/test_meta_features.py | 4 +- .../pyMetaLearn/test_meta_features_sparse.py | 4 +- .../pyMetaLearn/test_metalearner.py | 4 +- test/metalearning/test_metalearning.py | 4 +- test/test_pipeline/__init__.py | 0 .../test_pipeline}/components/__init__.py | 0 .../components/classification/__init__.py | 0 .../classification/test_adaboost.py | 4 +- .../classification/test_bernoulli_nb.py | 4 +- .../classification/test_decision_tree.py | 4 +- .../classification/test_extra_trees.py | 4 +- .../classification/test_gaussian_nb.py | 4 +- .../classification/test_gradient_boosting.py | 4 +- .../classification/test_k_nearest_neighbor.py | 4 +- .../components/classification/test_lda.py | 4 +- .../classification/test_liblinear.py | 4 +- .../classification/test_libsvm_svc.py | 4 +- .../classification/test_multinomial_nb.py | 4 +- .../classification/test_passive_aggressive.py | 4 +- .../classification/test_proj_logit.py | 4 +- .../components/classification/test_qda.py | 4 +- .../classification/test_random_forest.py | 4 +- .../components/classification/test_sgd.py | 4 +- .../components/data_preprocessing/__init__.py | 0 .../data_preprocessing/test_balancing.py | 43 +- .../data_preprocessing/test_imputation.py | 4 +- .../test_one_hot_encoding.py | 4 +- .../data_preprocessing/test_scaling.py | 4 +- .../feature_preprocessing/__init__.py | 0 .../test_NoPreprocessing.py | 4 +- .../feature_preprocessing/test_choice.py | 2 +- .../feature_preprocessing/test_densifier.py | 4 +- .../feature_preprocessing/test_extra_trees.py | 4 +- .../feature_preprocessing/test_fast_ica.py | 4 +- .../test_feature_agglomeration.py | 4 +- .../feature_preprocessing/test_gem.py | 6 +- .../feature_preprocessing/test_kernel_pca.py | 4 +- .../test_kitchen_sinks.py | 4 +- .../feature_preprocessing/test_liblinear.py | 4 +- .../test_nystroem_sampler.py | 4 +- .../feature_preprocessing/test_pca.py | 4 +- .../feature_preprocessing/test_polynomial.py | 4 +- .../test_random_trees_embedding.py | 4 +- .../test_select_percentile_classification.py | 4 +- .../test_select_percentile_regression.py | 4 +- .../test_select_rates.py | 4 +- .../test_truncatedSVD.py | 4 +- .../components/regression/__init__.py | 0 .../components/regression/liblinear_svr.py | 4 +- .../components/regression/test_adaboost.py | 4 +- .../regression/test_decision_tree.py | 4 +- .../components/regression/test_extra_trees.py | 4 +- .../regression/test_gaussian_process.py | 4 +- .../regression/test_gradient_boosting.py | 4 +- .../regression/test_k_nearest_neighbors.py | 4 +- .../regression/test_random_forests.py | 4 +- .../regression/test_ridge_regression.py | 6 +- .../components/regression/test_sgd.py | 4 +- .../test_support_vector_regression.py | 4 +- .../implementations/__init__.py | 0 .../implementations/test_OneHotEncoder.py | 2 +- .../implementations/test_ProjLogit.py | 2 +- .../implementations/test_imputation.py | 2 +- .../implementations/test_minmaxscaler.py | 4 +- .../implementations/test_standard_scaler.py | 4 +- .../implementations/test_util.py | 2 +- {tests => test/test_pipeline}/test_base.py | 14 +- .../test_pipeline}/test_classification.py | 96 +- ..._create_searchspace_util_classification.py | 38 +- .../test_pipeline}/test_doctests.py | 4 +- .../test_pipeline}/test_regression.py | 72 +- .../test_pipeline}/test_textclassification.py | 4 +- testcommand.sh | 2 +- .../components/data_preprocessing/dataset.pkl | 898 ------------------ 169 files changed, 556 insertions(+), 1677 deletions(-) delete mode 100644 ParamSklearn/__init__.py delete mode 100644 ParamSklearn/components/feature_preprocessing/tfidf.py delete mode 100644 ParamSklearn/textclassification.py rename {tests/components/regression => autosklearn/pipeline}/__init__.py (100%) rename {ParamSklearn => autosklearn/pipeline}/base.py (94%) rename {ParamSklearn => autosklearn/pipeline}/classification.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/__init__.py (53%) rename {ParamSklearn => autosklearn/pipeline}/components/base.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/__init__.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/adaboost.py (92%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/bernoulli_nb.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/decision_tree.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/extra_trees.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/gaussian_nb.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/gradient_boosting.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/k_nearest_neighbors.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/lda.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/liblinear_svc.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/libsvm_svc.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/multinomial_nb.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/passive_aggressive.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/proj_logit.py (88%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/qda.py (89%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/random_forest.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/classification/sgd.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/data_preprocessing/__init__.py (87%) rename {ParamSklearn => autosklearn/pipeline}/components/data_preprocessing/balancing.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/data_preprocessing/imputation.py (87%) rename {ParamSklearn => autosklearn/pipeline}/components/data_preprocessing/one_hot_encoding.py (88%) rename {ParamSklearn => autosklearn/pipeline}/components/data_preprocessing/rescaling.py (96%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/__init__.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/densifier.py (82%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/extra_trees_preproc_for_classification.py (96%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/fast_ica.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/feature_agglomeration.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/gem.py (83%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/kernel_pca.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/kitchen_sinks.py (88%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/liblinear_svc_preprocessor.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/no_preprocessing.py (82%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/nystroem_sampler.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/pca.py (90%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/polynomial.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/random_trees_embedding.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/select_percentile.py (100%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/select_percentile_classification.py (91%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/select_percentile_regression.py (84%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/select_rates.py (93%) rename {ParamSklearn => autosklearn/pipeline}/components/feature_preprocessing/truncatedSVD.py (88%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/__init__.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/adaboost.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/decision_tree.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/extra_trees.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/gaussian_process.py (94%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/gradient_boosting.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/k_nearest_neighbors.py (92%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/liblinear_svr.py (95%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/libsvm_svr.py (97%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/random_forest.py (96%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/ridge_regression.py (91%) rename {ParamSklearn => autosklearn/pipeline}/components/regression/sgd.py (96%) rename {ParamSklearn => autosklearn/pipeline}/constants.py (100%) rename {ParamSklearn => autosklearn/pipeline}/create_searchspace_util.py (99%) rename {ParamSklearn => autosklearn/pipeline}/implementations/Imputation.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/MinMaxScaler.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/MultilabelClassifier.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/Normalizer.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/OneHotEncoder.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/ProjLogit.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/StandardScaler.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/__init__.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/gem.py (100%) rename {ParamSklearn => autosklearn/pipeline}/implementations/util.py (100%) rename {ParamSklearn => autosklearn/pipeline}/regression.py (93%) rename {ParamSklearn => autosklearn/pipeline}/util.py (100%) rename autosklearn/util/{paramsklearn.py => pipeline.py} (80%) delete mode 100644 misc/random_sampling.py create mode 100644 test/test_pipeline/__init__.py rename {tests => test/test_pipeline}/components/__init__.py (100%) rename {tests => test/test_pipeline}/components/classification/__init__.py (100%) rename {tests => test/test_pipeline}/components/classification/test_adaboost.py (90%) rename {tests => test/test_pipeline}/components/classification/test_bernoulli_nb.py (84%) rename {tests => test/test_pipeline}/components/classification/test_decision_tree.py (87%) rename {tests => test/test_pipeline}/components/classification/test_extra_trees.py (88%) rename {tests => test/test_pipeline}/components/classification/test_gaussian_nb.py (84%) rename {tests => test/test_pipeline}/components/classification/test_gradient_boosting.py (83%) rename {tests => test/test_pipeline}/components/classification/test_k_nearest_neighbor.py (87%) rename {tests => test/test_pipeline}/components/classification/test_lda.py (87%) rename {tests => test/test_pipeline}/components/classification/test_liblinear.py (69%) rename {tests => test/test_pipeline}/components/classification/test_libsvm_svc.py (94%) rename {tests => test/test_pipeline}/components/classification/test_multinomial_nb.py (91%) rename {tests => test/test_pipeline}/components/classification/test_passive_aggressive.py (91%) rename {tests => test/test_pipeline}/components/classification/test_proj_logit.py (82%) rename {tests => test/test_pipeline}/components/classification/test_qda.py (87%) rename {tests => test/test_pipeline}/components/classification/test_random_forest.py (86%) rename {tests => test/test_pipeline}/components/classification/test_sgd.py (91%) rename {tests => test/test_pipeline}/components/data_preprocessing/__init__.py (100%) rename {tests => test/test_pipeline}/components/data_preprocessing/test_balancing.py (79%) rename {tests => test/test_pipeline}/components/data_preprocessing/test_imputation.py (88%) rename {tests => test/test_pipeline}/components/data_preprocessing/test_one_hot_encoding.py (96%) rename {tests => test/test_pipeline}/components/data_preprocessing/test_scaling.py (94%) rename {tests => test/test_pipeline}/components/feature_preprocessing/__init__.py (100%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_NoPreprocessing.py (82%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_choice.py (94%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_densifier.py (75%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_extra_trees.py (90%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_fast_ica.py (91%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_feature_agglomeration.py (90%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_gem.py (85%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_kernel_pca.py (92%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_kitchen_sinks.py (76%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_liblinear.py (91%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_nystroem_sampler.py (96%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_pca.py (82%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_polynomial.py (91%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_random_trees_embedding.py (92%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_select_percentile_classification.py (95%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_select_percentile_regression.py (90%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_select_rates.py (96%) rename {tests => test/test_pipeline}/components/feature_preprocessing/test_truncatedSVD.py (92%) create mode 100644 test/test_pipeline/components/regression/__init__.py rename {tests => test/test_pipeline}/components/regression/liblinear_svr.py (80%) rename {tests => test/test_pipeline}/components/regression/test_adaboost.py (88%) rename {tests => test/test_pipeline}/components/regression/test_decision_tree.py (85%) rename {tests => test/test_pipeline}/components/regression/test_extra_trees.py (88%) rename {tests => test/test_pipeline}/components/regression/test_gaussian_process.py (78%) rename {tests => test/test_pipeline}/components/regression/test_gradient_boosting.py (79%) rename {tests => test/test_pipeline}/components/regression/test_k_nearest_neighbors.py (87%) rename {tests => test/test_pipeline}/components/regression/test_random_forests.py (85%) rename {tests => test/test_pipeline}/components/regression/test_ridge_regression.py (87%) rename {tests => test/test_pipeline}/components/regression/test_sgd.py (84%) rename {tests => test/test_pipeline}/components/regression/test_support_vector_regression.py (84%) rename {tests => test/test_pipeline}/implementations/__init__.py (100%) rename {tests => test/test_pipeline}/implementations/test_OneHotEncoder.py (99%) rename {tests => test/test_pipeline}/implementations/test_ProjLogit.py (94%) rename {tests => test/test_pipeline}/implementations/test_imputation.py (99%) rename {tests => test/test_pipeline}/implementations/test_minmaxscaler.py (97%) rename {tests => test/test_pipeline}/implementations/test_standard_scaler.py (98%) rename {tests => test/test_pipeline}/implementations/test_util.py (96%) rename {tests => test/test_pipeline}/test_base.py (89%) rename {tests => test/test_pipeline}/test_classification.py (89%) rename {tests => test/test_pipeline}/test_create_searchspace_util_classification.py (76%) rename {tests => test/test_pipeline}/test_doctests.py (79%) rename {tests => test/test_pipeline}/test_regression.py (81%) rename {tests => test/test_pipeline}/test_textclassification.py (86%) delete mode 100644 tests/components/data_preprocessing/dataset.pkl diff --git a/ParamSklearn/__init__.py b/ParamSklearn/__init__.py deleted file mode 100644 index dbdf47cf91..0000000000 --- a/ParamSklearn/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""ParamSklearn is a python package to solve the Combined Algorithm Selection and -Hyperparameter Optimization problem (CASH) for the scikit-learn package. - -ParamSklearn provides a configuration space spanning a huge part of the -scikit-learn models. This configuration space can be searched by one of the -hyperparameter optimization algorithms in HPOlib.""" - -__version__ = "0.16.1.0" \ No newline at end of file diff --git a/ParamSklearn/components/feature_preprocessing/tfidf.py b/ParamSklearn/components/feature_preprocessing/tfidf.py deleted file mode 100644 index 0fd0915249..0000000000 --- a/ParamSklearn/components/feature_preprocessing/tfidf.py +++ /dev/null @@ -1,66 +0,0 @@ -from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ - Configuration - - -from ..base import ParamSklearnPreprocessingAlgorithm - -import numpy as np - - -class TFIDF(object):#ParamSklearnPreprocessingAlgorithm): - def __init__(self, random_state=None): - # This is implementation is for sparse data only! It will make inplace changes to the data! - - self.idf = None - self.random_state = random_state - - def fit(self, X, y): - #count the number of documents in which each word occurs - # @Stefan: Is there a reason why this is called weights and not - # document_frequency? - weights = (X>0.0).sum(axis=0) - # words that never appear have to be treated differently! - # @Stefan: Doesn't weights == 0 yield a boolean numpy array which can - # be directly used for indexing? - indices = np.ravel(np.where(weights == 0)[1]) - - # calculate (the log of) the inverse document frequencies - self.idf = np.array(np.log(float(X.shape[0])/(weights)))[0] - # words that are not in the training data get will be set to zero - self.idf[indices] = 0 - - return self - - def transform(self, X): - if self.idf is None: - raise NotImplementedError() - X.data *= self.idf[X.indices] - return X - - @staticmethod - def get_properties(dataset_properties=None): - return {'shortname': 'TFIDF', - 'name': 'Term Frequency / Inverse Document Frequency', - 'handles_missing_values': False, - 'handles_nominal_values': False, - 'handles_numerical_features': True, - 'prefers_data_scaled': False, - 'prefers_data_normalized': False, - 'handles_regression': False, - 'handles_classification': True, - 'handles_multiclass': True, - 'handles_multilabel': True, - 'is_deterministic': True, - 'handles_sparse': True, - 'handles_dense': True, - # TODO find out what is best used here! - 'preferred_dtype': np.float32} - - @staticmethod - def get_hyperparameter_search_space(dataset_properties=None): - cs = ConfigurationSpace() - return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %" % name diff --git a/ParamSklearn/textclassification.py b/ParamSklearn/textclassification.py deleted file mode 100644 index 1092354ad9..0000000000 --- a/ParamSklearn/textclassification.py +++ /dev/null @@ -1,36 +0,0 @@ -from .classification import ParamSklearnClassifier - - -class ParamSklearnTextClassifier(ParamSklearnClassifier): - @classmethod - def get_hyperparameter_search_space(cls, include_estimators=None, - exclude_estimators=None, - include_preprocessors=None, - exclude_preprocessors=None, - dataset_properties=None): - if include_preprocessors is None: - if exclude_preprocessors is None: - exclude_preprocessors = ["rescaling"] - elif isinstance(exclude_preprocessors, list): - exclude_preprocessors.append(exclude_preprocessors) - else: - raise TypeError() - - # @Stefan: you can exclude classifiers and preprocessing methods here - # From here: http://blog.devzero.com/2013/01/28/how-to-override-a-class-method-in-python/ - cs = super(ParamSklearnTextClassifier, cls).\ - get_hyperparameter_search_space( - include_estimators=include_estimators, - exclude_estimators=exclude_estimators, - include_preprocessors=include_preprocessors, - exclude_preprocessors=exclude_preprocessors, - dataset_properties=dataset_properties - ) - - return cs - - @staticmethod - def _get_pipeline(): - # TODO @Stefan: you probably want to add row normalization after the - # preprocessing step - return ["imputation", "__preprocessor__", "__estimator__"] \ No newline at end of file diff --git a/autosklearn/automl.py b/autosklearn/automl.py index 468a35830c..bc014878ec 100644 --- a/autosklearn/automl.py +++ b/autosklearn/automl.py @@ -26,7 +26,7 @@ convert_conf2smac_string from autosklearn.evaluation import calculate_score from autosklearn.util import StopWatch, get_logger, setup_logger, \ - get_auto_seed, set_auto_seed, del_auto_seed, submit_process, paramsklearn, \ + get_auto_seed, set_auto_seed, del_auto_seed, submit_process, pipeline, \ Backend from autosklearn.util.smac import run_smac @@ -76,7 +76,7 @@ def _create_search_space(tmp_dir, data_info, backend, watcher, logger, task_name = 'CreateConfigSpace' watcher.start_task(task_name) configspace_path = os.path.join(tmp_dir, 'space.pcs') - configuration_space = paramsklearn.get_configuration_space( + configuration_space = pipeline.get_configuration_space( data_info, include_estimators=include_estimators, include_preprocessors=include_preprocessors) diff --git a/autosklearn/cli/base_interface.py b/autosklearn/cli/base_interface.py index da724bdbaa..a4f8bb831e 100644 --- a/autosklearn/cli/base_interface.py +++ b/autosklearn/cli/base_interface.py @@ -11,7 +11,7 @@ from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation import CVEvaluator, HoldoutEvaluator, \ NestedCVEvaluator, TestEvaluator, get_new_run_num -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space from autosklearn.util import Backend diff --git a/autosklearn/data/abstract_data_manager.py b/autosklearn/data/abstract_data_manager.py index bc60fc90d8..63355105d4 100644 --- a/autosklearn/data/abstract_data_manager.py +++ b/autosklearn/data/abstract_data_manager.py @@ -5,7 +5,7 @@ import numpy as np import scipy.sparse -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder from autosklearn.util import predict_RAM_usage diff --git a/autosklearn/evaluation/abstract_evaluator.py b/autosklearn/evaluation/abstract_evaluator.py index 566bcf84a5..833c3bf14c 100644 --- a/autosklearn/evaluation/abstract_evaluator.py +++ b/autosklearn/evaluation/abstract_evaluator.py @@ -7,8 +7,8 @@ import numpy as np import lockfile -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.regression import ParamSklearnRegressor +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.regression import SimpleRegressionPipeline from sklearn.dummy import DummyClassifier, DummyRegressor from autosklearn.constants import * @@ -106,13 +106,13 @@ def __init__(self, Datamanager, configuration=None, if self.configuration is None: self.model_class = MyDummyRegressor else: - self.model_class = ParamSklearnRegressor + self.model_class = SimpleRegressionPipeline self.predict_function = self.predict_regression else: if self.configuration is None: self.model_class = MyDummyClassifier else: - self.model_class = ParamSklearnClassifier + self.model_class = SimpleClassificationPipeline self.predict_function = self.predict_proba if num_run is None: diff --git a/autosklearn/metalearning/metafeatures/metafeatures.py b/autosklearn/metalearning/metafeatures/metafeatures.py index 75bdd47709..f1b0a02a93 100644 --- a/autosklearn/metalearning/metafeatures/metafeatures.py +++ b/autosklearn/metalearning/metafeatures/metafeatures.py @@ -13,9 +13,9 @@ from sklearn.utils import check_array from sklearn.multiclass import OneVsRestClassifier -from ParamSklearn.implementations.Imputation import Imputer -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.Imputation import Imputer +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler from autosklearn.util.logging_ import get_logger from .metafeature import MetaFeature, HelperFunction, DatasetMetafeatures, \ diff --git a/tests/components/regression/__init__.py b/autosklearn/pipeline/__init__.py similarity index 100% rename from tests/components/regression/__init__.py rename to autosklearn/pipeline/__init__.py diff --git a/ParamSklearn/base.py b/autosklearn/pipeline/base.py similarity index 94% rename from ParamSklearn/base.py rename to autosklearn/pipeline/base.py index 17742f7ce9..1aa94770b6 100644 --- a/ParamSklearn/base.py +++ b/autosklearn/pipeline/base.py @@ -2,21 +2,17 @@ from collections import defaultdict import numpy as np -import sklearn -if sklearn.__version__ != "0.16.1": - raise ValueError("ParamSklearn supports only sklearn version 0.16.1, " - "you installed %s." % sklearn.__version__) from sklearn.base import BaseEstimator from sklearn.pipeline import Pipeline from sklearn.utils.validation import check_random_state, check_is_fitted -from ParamSklearn import components as components -import ParamSklearn.create_searchspace_util +from autosklearn.pipeline import components as components +import autosklearn.pipeline.create_searchspace_util -class ParamSklearnBaseEstimator(BaseEstimator): - """Base class for all ParamSklearn task models. +class BasePipeline(BaseEstimator): + """Base class for all pipeline objects. Notes ----- @@ -176,7 +172,7 @@ def predict(self, X, batch_size=None): X : array-like, shape = (n_samples, n_features) batch_size: int or None, defaults to None - batch_size controls whether the ParamSklearn pipeline will be + batch_size controls whether the pipeline will be called on small chunks of the data. Useful when calling the predict method on the whole array X results in a MemoryError. @@ -246,7 +242,7 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, Returns ------- cs : HPOlibConfigSpace.configuration_space.Configuration - The configuration space describing the ParamSklearnClassifier. + The configuration space describing the AutoSklearnClassifier. """ raise NotImplementedError() @@ -279,7 +275,7 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, # This dataset probably contains unsigned data dataset_properties['signed'] = False - matches = ParamSklearn.create_searchspace_util.get_match_array( + matches = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline, dataset_properties, include=include, exclude=exclude) # Now we have only legal combinations at this step of the pipeline @@ -304,7 +300,7 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, # If the node isn't a choice, we have to figure out which of it's # choices are actually legal choices else: - choices_list = ParamSklearn.create_searchspace_util.\ + choices_list = autosklearn.pipeline.create_searchspace_util.\ find_active_choices(matches, node, node_idx, dataset_properties, include.get(node_name), @@ -315,7 +311,7 @@ def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, # And now add forbidden parameter configurations # According to matches if np.sum(matches) < np.size(matches): - cs = ParamSklearn.create_searchspace_util.add_forbidden( + cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( conf_space=cs, pipeline=pipeline, matches=matches, dataset_properties=dataset_properties, include=include, exclude=exclude) @@ -341,7 +337,7 @@ def __repr__(self): @classmethod def _get_pipeline(cls): - if cls == ParamSklearnBaseEstimator: + if cls == autosklearn.pipelineBaseEstimator: return [] raise NotImplementedError() diff --git a/ParamSklearn/classification.py b/autosklearn/pipeline/classification.py similarity index 94% rename from ParamSklearn/classification.py rename to autosklearn/pipeline/classification.py index fa332f63ab..a41cc49125 100644 --- a/ParamSklearn/classification.py +++ b/autosklearn/pipeline/classification.py @@ -8,13 +8,13 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction -from ParamSklearn import components as components -from ParamSklearn.base import ParamSklearnBaseEstimator -from ParamSklearn.constants import SPARSE -from ParamSklearn.components.data_preprocessing.balancing import Balancing +from autosklearn.pipeline import components as components +from autosklearn.pipeline.base import BasePipeline +from autosklearn.pipeline.constants import SPARSE +from autosklearn.pipeline.components.data_preprocessing.balancing import Balancing -class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): +class SimpleClassificationPipeline(ClassifierMixin, BasePipeline): """This class implements the classification task. It implements a pipeline, which includes one preprocessing step and one @@ -42,12 +42,14 @@ class ParamSklearnClassifier(ClassifierMixin, ParamSklearnBaseEstimator): ---------- _estimator : The underlying scikit-learn classification model. This variable is assigned after a call to the - :meth:`ParamSklearn.classification.ParamSklearnClassifier.fit` method. + :meth:`autosklearn.pipeline.classification.SimpleClassificationPipeline + .fit` method. _preprocessor : The underlying scikit-learn preprocessing algorithm. This variable is only assigned if a preprocessor is specified and after a call to the - :meth:`ParamSklearn.classification.ParamSklearnClassifier.fit` method. + :meth:`autosklearn.pipeline.classification.SimpleClassificationPipeline + .fit` method. See also -------- @@ -71,7 +73,7 @@ def pre_transform(self, X, y, fit_params=None, init_params=None): self.configuration['preprocessor:__choice__'], init_params, fit_params) - X, fit_params = super(ParamSklearnClassifier, self).pre_transform( + X, fit_params = super(SimpleClassificationPipeline, self).pre_transform( X, y, fit_params=fit_params, init_params=init_params) return X, fit_params @@ -84,7 +86,7 @@ def predict_proba(self, X, batch_size=None): X : array-like, shape = (n_samples, n_features) batch_size: int or None, defaults to None - batch_size controls whether the ParamSklearn pipeline will be + batch_size controls whether the pipeline will be called on small chunks of the data. Useful when calling the predict method on the whole array X results in a MemoryError. diff --git a/ParamSklearn/components/__init__.py b/autosklearn/pipeline/components/__init__.py similarity index 53% rename from ParamSklearn/components/__init__.py rename to autosklearn/pipeline/components/__init__.py index 44fd0f9e7f..3312b4d12a 100644 --- a/ParamSklearn/components/__init__.py +++ b/autosklearn/pipeline/components/__init__.py @@ -1,8 +1,9 @@ -"""ParamSklearn can be easily extended with new classification and -preprocessing methods. At import time, ParamSklearn checks the directory -``ParamSklearn/components/classification`` for classification algorithms and -``ParamSklearn/components/preprocessing`` for preprocessing algorithms. To be -found, the algorithm must be provide a class implementing one of the given +"""auto-sklearn can be easily extended with new classification and +preprocessing methods. At import time, auto-sklearn checks the directory +``autosklearn/pipeline/components/classification`` for classification +algorithms and ``autosklearn/pipeline/components/preprocessing`` for +preprocessing algorithms. To be found, the algorithm must be provide a class +implementing one of the given interfaces. Coding Guidelines @@ -16,21 +17,21 @@ but rather recommend to implement an algorithm in a scikit-learn compatible way (`see here `_). Such an implementation should then be put into the `implementation` directory. -and can then be easily wrapped with to become a component in ParamSklearn. +and can then be easily wrapped with to become a component in auto-sklearn. Classification ============== -The ParamSklearnClassificationAlgorithm provides an interface for -Classification Algorithms inside ParamSklearn. It provides four important +The SimpleClassificationPipeline provides an interface for +Classification Algorithms inside auto-sklearn. It provides four important functions. Two of them, -:meth:`get_hyperparameter_search_space() ` +:meth:`get_hyperparameter_search_space() ` and -:meth:`get_properties() ` +:meth:`get_properties() ` are used to automatically create a valid configuration space. The other two, -:meth:`fit() ` and -:meth:`predict() ` +:meth:`fit() ` and +:meth:`predict() ` are an implementation of the `scikit-learn predictor API `_. Preprocessing diff --git a/ParamSklearn/components/base.py b/autosklearn/pipeline/components/base.py similarity index 95% rename from ParamSklearn/components/base.py rename to autosklearn/pipeline/components/base.py index ffb860a3d6..ea1df4b719 100644 --- a/ParamSklearn/components/base.py +++ b/autosklearn/pipeline/components/base.py @@ -1,9 +1,9 @@ -class ParamSklearnClassificationAlgorithm(object): +class AutoSklearnClassificationAlgorithm(object): """Provide an abstract interface for classification algorithms in - ParamSklearn. + auto-sklearn. Make a subclass of this and put it into the directory - `ParamSklearn/components/classification` to make it available.""" + `autosklearn/pipeline/components/classification` to make it available.""" def __init__(self): self.estimator = None @@ -120,15 +120,15 @@ def get_estimator(self): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %s" % name + return "autosklearn.pipeline %s" % name -class ParamSklearnPreprocessingAlgorithm(object): +class AutoSklearnPreprocessingAlgorithm(object): """Provide an abstract interface for preprocessing algorithms in - ParamSklearn. + auto-sklearn. Make a subclass of this and put it into the directory - `ParamSklearn/components/preprocessing` to make it available.""" + `autosklearn/pipeline/components/preprocessing` to make it available.""" def __init__(self): self.preprocessor = None @@ -234,15 +234,15 @@ def get_preprocessor(self): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "autosklearn.pipeline %" % name -class ParamSklearnRegressionAlgorithm(object): +class AutoSklearnRegressionAlgorithm(object): """Provide an abstract interface for regression algorithms in - ParamSklearn. + auto-sklearn. Make a subclass of this and put it into the directory - `ParamSklearn/components/regression` to make it available.""" + `autosklearn/pipeline/components/regression` to make it available.""" def __init__(self): self.estimator = None @@ -355,6 +355,6 @@ def get_estimator(self): def __str__(self): name = self.get_properties()['name'] - return "ParamSklearn %" % name + return "autosklearn.pipeline %" % name diff --git a/ParamSklearn/components/classification/__init__.py b/autosklearn/pipeline/components/classification/__init__.py similarity index 97% rename from ParamSklearn/components/classification/__init__.py rename to autosklearn/pipeline/components/classification/__init__.py index a06428a008..6b62ed19b9 100644 --- a/ParamSklearn/components/classification/__init__.py +++ b/autosklearn/pipeline/components/classification/__init__.py @@ -8,7 +8,7 @@ import pkgutil import sys -from ..base import ParamSklearnClassificationAlgorithm +from ..base import AutoSklearnClassificationAlgorithm from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition @@ -23,7 +23,7 @@ module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and ParamSklearnClassificationAlgorithm in obj.__bases__: + if inspect.isclass(obj) and AutoSklearnClassificationAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/ParamSklearn/components/classification/adaboost.py b/autosklearn/pipeline/components/classification/adaboost.py similarity index 92% rename from ParamSklearn/components/classification/adaboost.py rename to autosklearn/pipeline/components/classification/adaboost.py index 4190980d76..abcaf1bc61 100644 --- a/ParamSklearn/components/classification/adaboost.py +++ b/autosklearn/pipeline/components/classification/adaboost.py @@ -1,16 +1,17 @@ import numpy as np -from ParamSklearn.implementations.MultilabelClassifier import MultilabelClassifier +from autosklearn.pipeline.implementations.MultilabelClassifier import \ + MultilabelClassifier from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class AdaboostClassifier(ParamSklearnClassificationAlgorithm): +class AdaboostClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, learning_rate, algorithm, max_depth, random_state=None): diff --git a/ParamSklearn/components/classification/bernoulli_nb.py b/autosklearn/pipeline/components/classification/bernoulli_nb.py similarity index 95% rename from ParamSklearn/components/classification/bernoulli_nb.py rename to autosklearn/pipeline/components/classification/bernoulli_nb.py index c2a8814151..fc4e34f3a7 100644 --- a/ParamSklearn/components/classification/bernoulli_nb.py +++ b/autosklearn/pipeline/components/classification/bernoulli_nb.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class BernoulliNB(ParamSklearnClassificationAlgorithm): +class BernoulliNB(AutoSklearnClassificationAlgorithm): def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.alpha = alpha if fit_prior.lower() == "true": diff --git a/ParamSklearn/components/classification/decision_tree.py b/autosklearn/pipeline/components/classification/decision_tree.py similarity index 95% rename from ParamSklearn/components/classification/decision_tree.py rename to autosklearn/pipeline/components/classification/decision_tree.py index a9d91aabed..e0804d555b 100644 --- a/ParamSklearn/components/classification/decision_tree.py +++ b/autosklearn/pipeline/components/classification/decision_tree.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import \ - ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class DecisionTree(ParamSklearnClassificationAlgorithm): +class DecisionTree(AutoSklearnClassificationAlgorithm): def __init__(self, criterion, splitter, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, class_weight=None, random_state=None): diff --git a/ParamSklearn/components/classification/extra_trees.py b/autosklearn/pipeline/components/classification/extra_trees.py similarity index 97% rename from ParamSklearn/components/classification/extra_trees.py rename to autosklearn/pipeline/components/classification/extra_trees.py index acf290e988..e4276a50df 100644 --- a/ParamSklearn/components/classification/extra_trees.py +++ b/autosklearn/pipeline/components/classification/extra_trees.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class ExtraTreesClassifier(ParamSklearnClassificationAlgorithm): +class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", diff --git a/ParamSklearn/components/classification/gaussian_nb.py b/autosklearn/pipeline/components/classification/gaussian_nb.py similarity index 93% rename from ParamSklearn/components/classification/gaussian_nb.py rename to autosklearn/pipeline/components/classification/gaussian_nb.py index b51b6cb051..2c53d158de 100644 --- a/ParamSklearn/components/classification/gaussian_nb.py +++ b/autosklearn/pipeline/components/classification/gaussian_nb.py @@ -2,11 +2,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class GaussianNB(ParamSklearnClassificationAlgorithm): +class GaussianNB(AutoSklearnClassificationAlgorithm): def __init__(self, random_state=None, verbose=0): diff --git a/ParamSklearn/components/classification/gradient_boosting.py b/autosklearn/pipeline/components/classification/gradient_boosting.py similarity index 97% rename from ParamSklearn/components/classification/gradient_boosting.py rename to autosklearn/pipeline/components/classification/gradient_boosting.py index 893ea4a91a..cc95870f24 100644 --- a/ParamSklearn/components/classification/gradient_boosting.py +++ b/autosklearn/pipeline/components/classification/gradient_boosting.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant, \ CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class GradientBoostingClassifier(ParamSklearnClassificationAlgorithm): +class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, loss, learning_rate, n_estimators, subsample, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_depth, max_features, diff --git a/ParamSklearn/components/classification/k_nearest_neighbors.py b/autosklearn/pipeline/components/classification/k_nearest_neighbors.py similarity index 93% rename from ParamSklearn/components/classification/k_nearest_neighbors.py rename to autosklearn/pipeline/components/classification/k_nearest_neighbors.py index 1f4ffdd987..f0631b9eb4 100644 --- a/ParamSklearn/components/classification/k_nearest_neighbors.py +++ b/autosklearn/pipeline/components/classification/k_nearest_neighbors.py @@ -2,11 +2,11 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ Constant, UniformIntegerHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class KNearestNeighborsClassifier(ParamSklearnClassificationAlgorithm): +class KNearestNeighborsClassifier(AutoSklearnClassificationAlgorithm): def __init__(self, n_neighbors, weights, p, random_state=None): self.n_neighbors = n_neighbors diff --git a/ParamSklearn/components/classification/lda.py b/autosklearn/pipeline/components/classification/lda.py similarity index 93% rename from ParamSklearn/components/classification/lda.py rename to autosklearn/pipeline/components/classification/lda.py index b588ff1231..1802e642bf 100644 --- a/ParamSklearn/components/classification/lda.py +++ b/autosklearn/pipeline/components/classification/lda.py @@ -3,13 +3,13 @@ UniformIntegerHyperparameter, CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.base import \ - ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax -class LDA(ParamSklearnClassificationAlgorithm): +class LDA(AutoSklearnClassificationAlgorithm): def __init__(self, shrinkage, n_components, tol, shrinkage_factor=0.5, random_state=None): self.shrinkage = shrinkage diff --git a/ParamSklearn/components/classification/liblinear_svc.py b/autosklearn/pipeline/components/classification/liblinear_svc.py similarity index 95% rename from ParamSklearn/components/classification/liblinear_svc.py rename to autosklearn/pipeline/components/classification/liblinear_svc.py index fe863ccda6..3b66ccde59 100644 --- a/ParamSklearn/components/classification/liblinear_svc.py +++ b/autosklearn/pipeline/components/classification/liblinear_svc.py @@ -4,12 +4,12 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.implementations.util import softmax -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.implementations.util import softmax +from autosklearn.pipeline.constants import * -class LibLinear_SVC(ParamSklearnClassificationAlgorithm): +class LibLinear_SVC(AutoSklearnClassificationAlgorithm): # Liblinear is not deterministic as it uses a RNG inside def __init__(self, penalty, loss, dual, tol, C, multi_class, fit_intercept, intercept_scaling, class_weight=None, diff --git a/ParamSklearn/components/classification/libsvm_svc.py b/autosklearn/pipeline/components/classification/libsvm_svc.py similarity index 97% rename from ParamSklearn/components/classification/libsvm_svc.py rename to autosklearn/pipeline/components/classification/libsvm_svc.py index b66c6209d2..67d5058348 100644 --- a/ParamSklearn/components/classification/libsvm_svc.py +++ b/autosklearn/pipeline/components/classification/libsvm_svc.py @@ -8,9 +8,9 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax # From the scikit-learn master branch. Will hopefully be there in sklearn 0.17 @@ -58,7 +58,7 @@ def _ovr_decision_function(predictions, confidences, n_classes): return votes + sum_of_confidences * scale -class LibSVM_SVC(ParamSklearnClassificationAlgorithm): +class LibSVM_SVC(AutoSklearnClassificationAlgorithm): def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, class_weight=None, degree=3, coef0=0, random_state=None): self.C = C diff --git a/ParamSklearn/components/classification/multinomial_nb.py b/autosklearn/pipeline/components/classification/multinomial_nb.py similarity index 95% rename from ParamSklearn/components/classification/multinomial_nb.py rename to autosklearn/pipeline/components/classification/multinomial_nb.py index 57f1eb8f79..bc144676b4 100644 --- a/ParamSklearn/components/classification/multinomial_nb.py +++ b/autosklearn/pipeline/components/classification/multinomial_nb.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class MultinomialNB(ParamSklearnClassificationAlgorithm): +class MultinomialNB(AutoSklearnClassificationAlgorithm): def __init__(self, alpha, fit_prior, random_state=None, verbose=0): self.alpha = alpha diff --git a/ParamSklearn/components/classification/passive_aggressive.py b/autosklearn/pipeline/components/classification/passive_aggressive.py similarity index 93% rename from ParamSklearn/components/classification/passive_aggressive.py rename to autosklearn/pipeline/components/classification/passive_aggressive.py index c6d09f533e..9b9da05d2c 100644 --- a/ParamSklearn/components/classification/passive_aggressive.py +++ b/autosklearn/pipeline/components/classification/passive_aggressive.py @@ -5,13 +5,13 @@ CategoricalHyperparameter, UnParametrizedHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.base import \ - ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax -class PassiveAggressive(ParamSklearnClassificationAlgorithm): +class PassiveAggressive(AutoSklearnClassificationAlgorithm): def __init__(self, C, fit_intercept, n_iter, loss, random_state=None): self.C = float(C) self.fit_intercept = fit_intercept == 'True' diff --git a/ParamSklearn/components/classification/proj_logit.py b/autosklearn/pipeline/components/classification/proj_logit.py similarity index 88% rename from ParamSklearn/components/classification/proj_logit.py rename to autosklearn/pipeline/components/classification/proj_logit.py index 12d9b3adad..c9c4d1b4be 100644 --- a/ParamSklearn/components/classification/proj_logit.py +++ b/autosklearn/pipeline/components/classification/proj_logit.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations import ProjLogit +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations import ProjLogit -class ProjLogitCLassifier(ParamSklearnClassificationAlgorithm): +class ProjLogitCLassifier(AutoSklearnClassificationAlgorithm): def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): self.max_epochs = max_epochs diff --git a/ParamSklearn/components/classification/qda.py b/autosklearn/pipeline/components/classification/qda.py similarity index 89% rename from ParamSklearn/components/classification/qda.py rename to autosklearn/pipeline/components/classification/qda.py index eca13ce664..ed9a99326b 100644 --- a/ParamSklearn/components/classification/qda.py +++ b/autosklearn/pipeline/components/classification/qda.py @@ -1,13 +1,13 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ParamSklearn.components.base import \ - ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax -class QDA(ParamSklearnClassificationAlgorithm): +class QDA(AutoSklearnClassificationAlgorithm): def __init__(self, reg_param, random_state=None): self.reg_param = float(reg_param) diff --git a/ParamSklearn/components/classification/random_forest.py b/autosklearn/pipeline/components/classification/random_forest.py similarity index 97% rename from ParamSklearn/components/classification/random_forest.py rename to autosklearn/pipeline/components/classification/random_forest.py index 9c56408f43..9a0ad37eb6 100644 --- a/ParamSklearn/components/classification/random_forest.py +++ b/autosklearn/pipeline/components/classification/random_forest.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * -class RandomForest(ParamSklearnClassificationAlgorithm): +class RandomForest(AutoSklearnClassificationAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, bootstrap, max_leaf_nodes, diff --git a/ParamSklearn/components/classification/sgd.py b/autosklearn/pipeline/components/classification/sgd.py similarity index 95% rename from ParamSklearn/components/classification/sgd.py rename to autosklearn/pipeline/components/classification/sgd.py index 065cab51a0..217f2dccc5 100644 --- a/ParamSklearn/components/classification/sgd.py +++ b/autosklearn/pipeline/components/classification/sgd.py @@ -4,12 +4,12 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.constants import * -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax -class SGD(ParamSklearnClassificationAlgorithm): +class SGD(AutoSklearnClassificationAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, learning_rate, class_weight=None, l1_ratio=0.15, epsilon=0.1, eta0=0.01, power_t=0.5, average=False, random_state=None): @@ -156,5 +156,3 @@ def get_hyperparameter_search_space(dataset_properties=None): return cs - def __str__(self): - return "ParamSklearn StochasticGradientClassifier" diff --git a/ParamSklearn/components/data_preprocessing/__init__.py b/autosklearn/pipeline/components/data_preprocessing/__init__.py similarity index 87% rename from ParamSklearn/components/data_preprocessing/__init__.py rename to autosklearn/pipeline/components/data_preprocessing/__init__.py index 711717a201..68728d3bef 100644 --- a/ParamSklearn/components/data_preprocessing/__init__.py +++ b/autosklearn/pipeline/components/data_preprocessing/__init__.py @@ -5,7 +5,7 @@ import pkgutil import sys -from ..base import ParamSklearnPreprocessingAlgorithm +from ..base import AutoSklearnPreprocessingAlgorithm from .rescaling import RescalingChoice @@ -20,7 +20,7 @@ for member_name, obj in inspect.getmembers(module): if inspect.isclass( - obj) and ParamSklearnPreprocessingAlgorithm in obj.__bases__: + obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/ParamSklearn/components/data_preprocessing/balancing.py b/autosklearn/pipeline/components/data_preprocessing/balancing.py similarity index 93% rename from ParamSklearn/components/data_preprocessing/balancing.py rename to autosklearn/pipeline/components/data_preprocessing/balancing.py index f4f39c1363..4b2f64a2c4 100644 --- a/ParamSklearn/components/data_preprocessing/balancing.py +++ b/autosklearn/pipeline/components/data_preprocessing/balancing.py @@ -3,12 +3,12 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class Balancing(ParamSklearnPreprocessingAlgorithm): +class Balancing(AutoSklearnPreprocessingAlgorithm): def __init__(self, strategy, random_state=None): self.strategy = strategy @@ -111,6 +111,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(strategy) return cs - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/data_preprocessing/imputation.py b/autosklearn/pipeline/components/data_preprocessing/imputation.py similarity index 87% rename from ParamSklearn/components/data_preprocessing/imputation.py rename to autosklearn/pipeline/components/data_preprocessing/imputation.py index 848142785b..70dcfef71f 100644 --- a/ParamSklearn/components/data_preprocessing/imputation.py +++ b/autosklearn/pipeline/components/data_preprocessing/imputation.py @@ -1,11 +1,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class Imputation(ParamSklearnPreprocessingAlgorithm): +class Imputation(AutoSklearnPreprocessingAlgorithm): def __init__(self, strategy, random_state=None): # TODO pay attention to the cases when a copy is made (CSR matrices) self.strategy = strategy @@ -52,7 +52,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() cs.add_hyperparameter(strategy) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py b/autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py similarity index 88% rename from ParamSklearn/components/data_preprocessing/one_hot_encoding.py rename to autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py index 98ccd839a0..24d49c21b0 100644 --- a/ParamSklearn/components/data_preprocessing/one_hot_encoding.py +++ b/autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py @@ -1,17 +1,17 @@ import numpy as np -import ParamSklearn.implementations.OneHotEncoder +import autosklearn.pipeline.implementations.OneHotEncoder from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ UniformFloatHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class OneHotEncoder(ParamSklearnPreprocessingAlgorithm): +class OneHotEncoder(AutoSklearnPreprocessingAlgorithm): def __init__(self, use_minimum_fraction, minimum_fraction=None, categorical_features=None, random_state=None): # TODO pay attention to the cases when a copy is made (CSR matrices) @@ -31,7 +31,7 @@ def fit(self, X, y=None): else: categorical_features = self.categorical_features - self.preprocessor = ParamSklearn.implementations.OneHotEncoder\ + self.preprocessor = autosklearn.pipeline.implementations.OneHotEncoder\ .OneHotEncoder(minimum_fraction=self.minimum_fraction, categorical_features=categorical_features) @@ -83,7 +83,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_condition(EqualsCondition(minimum_fraction, use_minimum_fraction, 'True')) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/data_preprocessing/rescaling.py b/autosklearn/pipeline/components/data_preprocessing/rescaling.py similarity index 96% rename from ParamSklearn/components/data_preprocessing/rescaling.py rename to autosklearn/pipeline/components/data_preprocessing/rescaling.py index 3f47cdc45b..30d402a9b9 100644 --- a/ParamSklearn/components/data_preprocessing/rescaling.py +++ b/autosklearn/pipeline/components/data_preprocessing/rescaling.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction -from ParamSklearn.constants import * +from autosklearn.pipeline.constants import * class Rescaling(object): @@ -58,7 +58,8 @@ def get_properties(dataset_properties=None): class MinMaxScalerComponent(Rescaling): def __init__(self, random_state): - from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler + from autosklearn.pipeline.implementations.MinMaxScaler import \ + MinMaxScaler self.preprocessor = MinMaxScaler() @staticmethod @@ -85,7 +86,8 @@ def get_properties(dataset_properties=None): class StandardScalerComponent(Rescaling): def __init__(self, random_state): - from ParamSklearn.implementations.StandardScaler import StandardScaler + from autosklearn.pipeline.implementations.StandardScaler import \ + StandardScaler self.preprocessor = StandardScaler() @staticmethod @@ -112,7 +114,7 @@ def get_properties(dataset_properties=None): class NormalizerComponent(Rescaling): def __init__(self, random_state): - from ParamSklearn.implementations.Normalizer import Normalizer + from autosklearn.pipeline.implementations.Normalizer import Normalizer self.preprocessor = Normalizer() @staticmethod diff --git a/ParamSklearn/components/feature_preprocessing/__init__.py b/autosklearn/pipeline/components/feature_preprocessing/__init__.py similarity index 97% rename from ParamSklearn/components/feature_preprocessing/__init__.py rename to autosklearn/pipeline/components/feature_preprocessing/__init__.py index dd0144eac0..a4ce03c5af 100644 --- a/ParamSklearn/components/feature_preprocessing/__init__.py +++ b/autosklearn/pipeline/components/feature_preprocessing/__init__.py @@ -6,7 +6,7 @@ import pkgutil import sys -from ..base import ParamSklearnPreprocessingAlgorithm +from ..base import AutoSklearnPreprocessingAlgorithm from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction @@ -22,7 +22,7 @@ module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and ParamSklearnPreprocessingAlgorithm in obj.__bases__: + if inspect.isclass(obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/ParamSklearn/components/feature_preprocessing/densifier.py b/autosklearn/pipeline/components/feature_preprocessing/densifier.py similarity index 82% rename from ParamSklearn/components/feature_preprocessing/densifier.py rename to autosklearn/pipeline/components/feature_preprocessing/densifier.py index 8f83bd4db1..893c768ee9 100644 --- a/ParamSklearn/components/feature_preprocessing/densifier.py +++ b/autosklearn/pipeline/components/feature_preprocessing/densifier.py @@ -1,11 +1,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class Densifier(ParamSklearnPreprocessingAlgorithm): +class Densifier(AutoSklearnPreprocessingAlgorithm): def __init__(self, random_state=None): pass @@ -44,7 +44,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %" % name - diff --git a/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py b/autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py similarity index 96% rename from ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py rename to autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py index 5d8dcc62bc..6bed2c257c 100644 --- a/ParamSklearn/components/feature_preprocessing/extra_trees_preproc_for_classification.py +++ b/autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class ExtraTreesPreprocessor(ParamSklearnPreprocessingAlgorithm): +class ExtraTreesPreprocessor(AutoSklearnPreprocessingAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", diff --git a/ParamSklearn/components/feature_preprocessing/fast_ica.py b/autosklearn/pipeline/components/feature_preprocessing/fast_ica.py similarity index 94% rename from ParamSklearn/components/feature_preprocessing/fast_ica.py rename to autosklearn/pipeline/components/feature_preprocessing/fast_ica.py index 4b88bf14a8..01009dd5c9 100644 --- a/ParamSklearn/components/feature_preprocessing/fast_ica.py +++ b/autosklearn/pipeline/components/feature_preprocessing/fast_ica.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class FastICA(ParamSklearnPreprocessingAlgorithm): +class FastICA(AutoSklearnPreprocessingAlgorithm): def __init__(self, algorithm, whiten, fun, n_components=None, random_state=None): self.n_components = None if n_components is None else int(n_components) diff --git a/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py b/autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py similarity index 94% rename from ParamSklearn/components/feature_preprocessing/feature_agglomeration.py rename to autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py index b8b8ca0990..92ff1f0c75 100644 --- a/ParamSklearn/components/feature_preprocessing/feature_agglomeration.py +++ b/autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py @@ -6,12 +6,12 @@ from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ ForbiddenAndConjunction, ForbiddenEqualsClause -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class FeatureAgglomeration(ParamSklearnPreprocessingAlgorithm): +class FeatureAgglomeration(AutoSklearnPreprocessingAlgorithm): def __init__(self, n_clusters, affinity, linkage, pooling_func, random_state=None): self.n_clusters = int(n_clusters) diff --git a/ParamSklearn/components/feature_preprocessing/gem.py b/autosklearn/pipeline/components/feature_preprocessing/gem.py similarity index 83% rename from ParamSklearn/components/feature_preprocessing/gem.py rename to autosklearn/pipeline/components/feature_preprocessing/gem.py index 339f5ef281..e3cbdff135 100644 --- a/ParamSklearn/components/feature_preprocessing/gem.py +++ b/autosklearn/pipeline/components/feature_preprocessing/gem.py @@ -1,11 +1,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.implementations.gem import GEM as GEMImpl -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.implementations.gem import GEM as GEMImpl +from autosklearn.pipeline.constants import * -class GEM(ParamSklearnPreprocessingAlgorithm): +class GEM(AutoSklearnPreprocessingAlgorithm): def __init__(self, N, precond, random_state=None): self.N = N @@ -50,8 +50,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(N) cs.add_hyperparameter(precond) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/kernel_pca.py b/autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py similarity index 95% rename from ParamSklearn/components/feature_preprocessing/kernel_pca.py rename to autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py index 5df1a0a745..d7eddf86d6 100644 --- a/ParamSklearn/components/feature_preprocessing/kernel_pca.py +++ b/autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py @@ -7,12 +7,12 @@ UniformIntegerHyperparameter, UniformFloatHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition, InCondition -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class KernelPCA(ParamSklearnPreprocessingAlgorithm): +class KernelPCA(AutoSklearnPreprocessingAlgorithm): def __init__(self, n_components, kernel, degree=3, gamma=0.25, coef0=0.0, random_state=None): self.n_components = int(n_components) diff --git a/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py b/autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py similarity index 88% rename from ParamSklearn/components/feature_preprocessing/kitchen_sinks.py rename to autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py index 582d0542aa..d95568ddea 100644 --- a/ParamSklearn/components/feature_preprocessing/kitchen_sinks.py +++ b/autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py @@ -2,10 +2,10 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class RandomKitchenSinks(ParamSklearnPreprocessingAlgorithm): +class RandomKitchenSinks(AutoSklearnPreprocessingAlgorithm): def __init__(self, gamma, n_components, random_state=None): """ Parameters: @@ -62,8 +62,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(gamma) cs.add_hyperparameter(n_components) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py b/autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py similarity index 95% rename from ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py rename to autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py index 7620b78509..61071f1727 100644 --- a/ParamSklearn/components/feature_preprocessing/liblinear_svc_preprocessor.py +++ b/autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py @@ -4,12 +4,12 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class LibLinear_Preprocessor(ParamSklearnPreprocessingAlgorithm): +class LibLinear_Preprocessor(AutoSklearnPreprocessingAlgorithm): # Liblinear is not deterministic as it uses a RNG inside def __init__(self, penalty, loss, dual, tol, C, multi_class, fit_intercept, intercept_scaling, class_weight=None, diff --git a/ParamSklearn/components/feature_preprocessing/no_preprocessing.py b/autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py similarity index 82% rename from ParamSklearn/components/feature_preprocessing/no_preprocessing.py rename to autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py index 3a95204dc4..0caeb4e6ca 100644 --- a/ParamSklearn/components/feature_preprocessing/no_preprocessing.py +++ b/autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py @@ -1,10 +1,10 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class NoPreprocessing(ParamSklearnPreprocessingAlgorithm): +class NoPreprocessing(AutoSklearnPreprocessingAlgorithm): def __init__(self, random_state): """ This preprocessors does not change the data """ @@ -43,8 +43,3 @@ def get_properties(dataset_properties=None): def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py b/autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py similarity index 94% rename from ParamSklearn/components/feature_preprocessing/nystroem_sampler.py rename to autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py index 6335b1ec18..216017b362 100644 --- a/ParamSklearn/components/feature_preprocessing/nystroem_sampler.py +++ b/autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter, CategoricalHyperparameter from HPOlibConfigSpace.conditions import InCondition, EqualsCondition, AndConjunction -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class Nystroem(ParamSklearnPreprocessingAlgorithm): +class Nystroem(AutoSklearnPreprocessingAlgorithm): def __init__(self, kernel, n_components, gamma=1.0, degree=3, coef0=1, random_state=None): self.kernel = kernel @@ -119,8 +119,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_condition(coef0_condition) cs.add_condition(gamma_condition) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/pca.py b/autosklearn/pipeline/components/feature_preprocessing/pca.py similarity index 90% rename from ParamSklearn/components/feature_preprocessing/pca.py rename to autosklearn/pipeline/components/feature_preprocessing/pca.py index 71252e7fd6..26362ffc29 100644 --- a/ParamSklearn/components/feature_preprocessing/pca.py +++ b/autosklearn/pipeline/components/feature_preprocessing/pca.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class PCA(ParamSklearnPreprocessingAlgorithm): +class PCA(AutoSklearnPreprocessingAlgorithm): def __init__(self, keep_variance, whiten, random_state=None): self.keep_variance = keep_variance self.whiten = whiten @@ -66,7 +66,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(keep_variance) cs.add_hyperparameter(whiten) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/feature_preprocessing/polynomial.py b/autosklearn/pipeline/components/feature_preprocessing/polynomial.py similarity index 93% rename from ParamSklearn/components/feature_preprocessing/polynomial.py rename to autosklearn/pipeline/components/feature_preprocessing/polynomial.py index 3c47e19025..9596427801 100644 --- a/ParamSklearn/components/feature_preprocessing/polynomial.py +++ b/autosklearn/pipeline/components/feature_preprocessing/polynomial.py @@ -2,12 +2,12 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ UniformIntegerHyperparameter -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class PolynomialFeatures(ParamSklearnPreprocessingAlgorithm): +class PolynomialFeatures(AutoSklearnPreprocessingAlgorithm): def __init__(self, degree, interaction_only, include_bias, random_state=None): self.degree = int(degree) self.interaction_only = interaction_only.lower() == 'true' diff --git a/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py b/autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py similarity index 93% rename from ParamSklearn/components/feature_preprocessing/random_trees_embedding.py rename to autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py index b86edcecb2..9fe95e577b 100644 --- a/ParamSklearn/components/feature_preprocessing/random_trees_embedding.py +++ b/autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py @@ -2,11 +2,11 @@ from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class RandomTreesEmbedding(ParamSklearnPreprocessingAlgorithm): +class RandomTreesEmbedding(AutoSklearnPreprocessingAlgorithm): def __init__(self, n_estimators, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, @@ -96,7 +96,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(min_weight_fraction_leaf) cs.add_hyperparameter(max_leaf_nodes) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name \ No newline at end of file diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile.py similarity index 100% rename from ParamSklearn/components/feature_preprocessing/select_percentile.py rename to autosklearn/pipeline/components/feature_preprocessing/select_percentile.py diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py similarity index 91% rename from ParamSklearn/components/feature_preprocessing/select_percentile_classification.py rename to autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py index a7190839a8..a5548c102b 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile_classification.py +++ b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py @@ -1,13 +1,13 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, CategoricalHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.components.feature_preprocessing.select_percentile import SelectPercentileBase +from autosklearn.pipeline.constants import * class SelectPercentileClassification(SelectPercentileBase, - ParamSklearnPreprocessingAlgorithm): + AutoSklearnPreprocessingAlgorithm): def __init__(self, percentile, score_func="chi2", random_state=None): """ Parameters: @@ -112,8 +112,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(score_func) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py similarity index 84% rename from ParamSklearn/components/feature_preprocessing/select_percentile_regression.py rename to autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py index 41339235e7..ba96074889 100644 --- a/ParamSklearn/components/feature_preprocessing/select_percentile_regression.py +++ b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py @@ -1,13 +1,13 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, UnParametrizedHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.components.feature_preprocessing.select_percentile import SelectPercentileBase -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.components.feature_preprocessing.select_percentile import SelectPercentileBase +from autosklearn.pipeline.constants import * class SelectPercentileRegression(SelectPercentileBase, - ParamSklearnPreprocessingAlgorithm): + AutoSklearnPreprocessingAlgorithm): def __init__(self, percentile, score_func="f_classif", random_state=None): """ Parameters: @@ -57,8 +57,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(percentile) cs.add_hyperparameter(score_func) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/select_rates.py b/autosklearn/pipeline/components/feature_preprocessing/select_rates.py similarity index 93% rename from ParamSklearn/components/feature_preprocessing/select_rates.py rename to autosklearn/pipeline/components/feature_preprocessing/select_rates.py index 4f6471a11e..243fa88e8b 100644 --- a/ParamSklearn/components/feature_preprocessing/select_rates.py +++ b/autosklearn/pipeline/components/feature_preprocessing/select_rates.py @@ -2,12 +2,12 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ CategoricalHyperparameter, Constant -from ParamSklearn.components.base import \ - ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class SelectRates(ParamSklearnPreprocessingAlgorithm): +class SelectRates(AutoSklearnPreprocessingAlgorithm): def __init__(self, alpha, mode='fpr', score_func="chi2", random_state=None): import sklearn.feature_selection @@ -120,8 +120,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_hyperparameter(mode) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name - diff --git a/ParamSklearn/components/feature_preprocessing/truncatedSVD.py b/autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py similarity index 88% rename from ParamSklearn/components/feature_preprocessing/truncatedSVD.py rename to autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py index d4c7ef4ff6..9108eee2c3 100644 --- a/ParamSklearn/components/feature_preprocessing/truncatedSVD.py +++ b/autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py @@ -3,11 +3,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * -class TruncatedSVD(ParamSklearnPreprocessingAlgorithm): +class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): def __init__(self, target_dim, random_state=None): self.target_dim = int(target_dim) self.random_state = random_state @@ -59,7 +59,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs = ConfigurationSpace() cs.add_hyperparameter(target_dim) return cs - - def __str__(self): - name = self.get_properties()['name'] - return "ParamSklearn %s" % name diff --git a/ParamSklearn/components/regression/__init__.py b/autosklearn/pipeline/components/regression/__init__.py similarity index 97% rename from ParamSklearn/components/regression/__init__.py rename to autosklearn/pipeline/components/regression/__init__.py index 91199da85b..b1c488acb1 100644 --- a/ParamSklearn/components/regression/__init__.py +++ b/autosklearn/pipeline/components/regression/__init__.py @@ -6,7 +6,7 @@ import pkgutil import sys -from ..base import ParamSklearnRegressionAlgorithm +from ..base import AutoSklearnRegressionAlgorithm from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter from HPOlibConfigSpace.conditions import EqualsCondition @@ -21,7 +21,7 @@ module = importlib.import_module(full_module_name) for member_name, obj in inspect.getmembers(module): - if inspect.isclass(obj) and ParamSklearnRegressionAlgorithm in obj.__bases__: + if inspect.isclass(obj) and AutoSklearnRegressionAlgorithm in obj.__bases__: # TODO test if the obj implements the interface # Keep in mind that this only instantiates the ensemble_wrapper, # but not the real target classifier diff --git a/ParamSklearn/components/regression/adaboost.py b/autosklearn/pipeline/components/regression/adaboost.py similarity index 94% rename from ParamSklearn/components/regression/adaboost.py rename to autosklearn/pipeline/components/regression/adaboost.py index af4baee38f..c6b06e99c8 100644 --- a/ParamSklearn/components/regression/adaboost.py +++ b/autosklearn/pipeline/components/regression/adaboost.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UniformIntegerHyperparameter, CategoricalHyperparameter -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class AdaboostRegressor(ParamSklearnRegressionAlgorithm): +class AdaboostRegressor(AutoSklearnRegressionAlgorithm): def __init__(self, n_estimators, learning_rate, loss, max_depth, random_state=None): self.n_estimators = int(n_estimators) diff --git a/ParamSklearn/components/regression/decision_tree.py b/autosklearn/pipeline/components/regression/decision_tree.py similarity index 95% rename from ParamSklearn/components/regression/decision_tree.py rename to autosklearn/pipeline/components/regression/decision_tree.py index 0b35718a34..1fa5259aa8 100644 --- a/ParamSklearn/components/regression/decision_tree.py +++ b/autosklearn/pipeline/components/regression/decision_tree.py @@ -5,12 +5,12 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import \ - ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import \ + AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class DecisionTree(ParamSklearnRegressionAlgorithm): +class DecisionTree(AutoSklearnRegressionAlgorithm): def __init__(self, criterion, splitter, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, random_state=None): diff --git a/ParamSklearn/components/regression/extra_trees.py b/autosklearn/pipeline/components/regression/extra_trees.py similarity index 97% rename from ParamSklearn/components/regression/extra_trees.py rename to autosklearn/pipeline/components/regression/extra_trees.py index f1456f97de..f62ecb2143 100644 --- a/ParamSklearn/components/regression/extra_trees.py +++ b/autosklearn/pipeline/components/regression/extra_trees.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class ExtraTreesRegressor(ParamSklearnRegressionAlgorithm): +class ExtraTreesRegressor(AutoSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, min_samples_leaf, min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", diff --git a/ParamSklearn/components/regression/gaussian_process.py b/autosklearn/pipeline/components/regression/gaussian_process.py similarity index 94% rename from ParamSklearn/components/regression/gaussian_process.py rename to autosklearn/pipeline/components/regression/gaussian_process.py index d984dafd48..b74e1fdfcc 100644 --- a/ParamSklearn/components/regression/gaussian_process.py +++ b/autosklearn/pipeline/components/regression/gaussian_process.py @@ -3,11 +3,11 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class GaussianProcess(ParamSklearnRegressionAlgorithm): +class GaussianProcess(AutoSklearnRegressionAlgorithm): def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, random_state=None): self.nugget = float(nugget) diff --git a/ParamSklearn/components/regression/gradient_boosting.py b/autosklearn/pipeline/components/regression/gradient_boosting.py similarity index 97% rename from ParamSklearn/components/regression/gradient_boosting.py rename to autosklearn/pipeline/components/regression/gradient_boosting.py index 5930714a87..370a535498 100644 --- a/ParamSklearn/components/regression/gradient_boosting.py +++ b/autosklearn/pipeline/components/regression/gradient_boosting.py @@ -6,11 +6,11 @@ UnParametrizedHyperparameter from HPOlibConfigSpace.conditions import InCondition -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class GradientBoosting(ParamSklearnRegressionAlgorithm): +class GradientBoosting(AutoSklearnRegressionAlgorithm): def __init__(self, loss, learning_rate, n_estimators, subsample, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, max_depth, max_features, diff --git a/ParamSklearn/components/regression/k_nearest_neighbors.py b/autosklearn/pipeline/components/regression/k_nearest_neighbors.py similarity index 92% rename from ParamSklearn/components/regression/k_nearest_neighbors.py rename to autosklearn/pipeline/components/regression/k_nearest_neighbors.py index a597eacf5e..d73819c4e5 100644 --- a/ParamSklearn/components/regression/k_nearest_neighbors.py +++ b/autosklearn/pipeline/components/regression/k_nearest_neighbors.py @@ -2,11 +2,11 @@ from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ Constant, UniformIntegerHyperparameter -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class KNearestNeighborsRegressor(ParamSklearnRegressionAlgorithm): +class KNearestNeighborsRegressor(AutoSklearnRegressionAlgorithm): def __init__(self, n_neighbors, weights, p, random_state=None): self.n_neighbors = n_neighbors self.weights = weights diff --git a/ParamSklearn/components/regression/liblinear_svr.py b/autosklearn/pipeline/components/regression/liblinear_svr.py similarity index 95% rename from ParamSklearn/components/regression/liblinear_svr.py rename to autosklearn/pipeline/components/regression/liblinear_svr.py index 6ed55736a2..cf9766bbb3 100644 --- a/ParamSklearn/components/regression/liblinear_svr.py +++ b/autosklearn/pipeline/components/regression/liblinear_svr.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ ForbiddenAndConjunction -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class LibLinear_SVR(ParamSklearnRegressionAlgorithm): +class LibLinear_SVR(AutoSklearnRegressionAlgorithm): # Liblinear is not deterministic as it uses a RNG inside def __init__(self, loss, epsilon, dual, tol, C, fit_intercept, intercept_scaling, random_state=None): diff --git a/ParamSklearn/components/regression/libsvm_svr.py b/autosklearn/pipeline/components/regression/libsvm_svr.py similarity index 97% rename from ParamSklearn/components/regression/libsvm_svr.py rename to autosklearn/pipeline/components/regression/libsvm_svr.py index 7aa53cc9a0..977242d077 100644 --- a/ParamSklearn/components/regression/libsvm_svr.py +++ b/autosklearn/pipeline/components/regression/libsvm_svr.py @@ -8,11 +8,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class LibSVM_SVR(ParamSklearnRegressionAlgorithm): +class LibSVM_SVR(AutoSklearnRegressionAlgorithm): def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, degree=3, coef0=0.0, verbose=False, max_iter=-1, random_state=None): diff --git a/ParamSklearn/components/regression/random_forest.py b/autosklearn/pipeline/components/regression/random_forest.py similarity index 96% rename from ParamSklearn/components/regression/random_forest.py rename to autosklearn/pipeline/components/regression/random_forest.py index 6a22cd2949..fb7ee082bc 100644 --- a/ParamSklearn/components/regression/random_forest.py +++ b/autosklearn/pipeline/components/regression/random_forest.py @@ -5,11 +5,11 @@ UniformIntegerHyperparameter, CategoricalHyperparameter, \ UnParametrizedHyperparameter, Constant -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class RandomForest(ParamSklearnRegressionAlgorithm): +class RandomForest(AutoSklearnRegressionAlgorithm): def __init__(self, n_estimators, criterion, max_features, max_depth, min_samples_split, min_samples_leaf, min_weight_fraction_leaf, bootstrap, max_leaf_nodes, diff --git a/ParamSklearn/components/regression/ridge_regression.py b/autosklearn/pipeline/components/regression/ridge_regression.py similarity index 91% rename from ParamSklearn/components/regression/ridge_regression.py rename to autosklearn/pipeline/components/regression/ridge_regression.py index 2080184182..95b15918ed 100644 --- a/ParamSklearn/components/regression/ridge_regression.py +++ b/autosklearn/pipeline/components/regression/ridge_regression.py @@ -4,11 +4,11 @@ from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ UnParametrizedHyperparameter -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class RidgeRegression(ParamSklearnRegressionAlgorithm): +class RidgeRegression(AutoSklearnRegressionAlgorithm): def __init__(self, alpha, fit_intercept, tol, random_state=None): self.alpha = float(alpha) self.fit_intercept = fit_intercept == 'True' @@ -63,6 +63,3 @@ def get_hyperparameter_search_space(dataset_properties=None): tol = cs.add_hyperparameter(UniformFloatHyperparameter( "tol", 1e-5, 1e-1, default=1e-4, log=True)) return cs - - def __str__(self): - return "ParamSklearn Ridge Regression" diff --git a/ParamSklearn/components/regression/sgd.py b/autosklearn/pipeline/components/regression/sgd.py similarity index 96% rename from ParamSklearn/components/regression/sgd.py rename to autosklearn/pipeline/components/regression/sgd.py index 749bd0a21b..d034a055fc 100644 --- a/ParamSklearn/components/regression/sgd.py +++ b/autosklearn/pipeline/components/regression/sgd.py @@ -4,11 +4,11 @@ UniformIntegerHyperparameter from HPOlibConfigSpace.conditions import InCondition, EqualsCondition -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.constants import * +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * -class SGD(ParamSklearnRegressionAlgorithm): +class SGD(AutoSklearnRegressionAlgorithm): def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, learning_rate, l1_ratio=0.15, epsilon=0.1, eta0=0.01, power_t=0.5, average=False, random_state=None): @@ -155,6 +155,3 @@ def get_hyperparameter_search_space(dataset_properties=None): cs.add_condition(power_t_condition) return cs - - def __str__(self): - return "ParamSklearn StochasticGradientClassifier" diff --git a/ParamSklearn/constants.py b/autosklearn/pipeline/constants.py similarity index 100% rename from ParamSklearn/constants.py rename to autosklearn/pipeline/constants.py diff --git a/ParamSklearn/create_searchspace_util.py b/autosklearn/pipeline/create_searchspace_util.py similarity index 99% rename from ParamSklearn/create_searchspace_util.py rename to autosklearn/pipeline/create_searchspace_util.py index 374c29e11b..544b57e379 100644 --- a/ParamSklearn/create_searchspace_util.py +++ b/autosklearn/pipeline/create_searchspace_util.py @@ -5,7 +5,7 @@ from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause -from ParamSklearn.constants import * +from autosklearn.pipeline.constants import * def get_match_array(pipeline, dataset_properties, diff --git a/ParamSklearn/implementations/Imputation.py b/autosklearn/pipeline/implementations/Imputation.py similarity index 100% rename from ParamSklearn/implementations/Imputation.py rename to autosklearn/pipeline/implementations/Imputation.py diff --git a/ParamSklearn/implementations/MinMaxScaler.py b/autosklearn/pipeline/implementations/MinMaxScaler.py similarity index 100% rename from ParamSklearn/implementations/MinMaxScaler.py rename to autosklearn/pipeline/implementations/MinMaxScaler.py diff --git a/ParamSklearn/implementations/MultilabelClassifier.py b/autosklearn/pipeline/implementations/MultilabelClassifier.py similarity index 100% rename from ParamSklearn/implementations/MultilabelClassifier.py rename to autosklearn/pipeline/implementations/MultilabelClassifier.py diff --git a/ParamSklearn/implementations/Normalizer.py b/autosklearn/pipeline/implementations/Normalizer.py similarity index 100% rename from ParamSklearn/implementations/Normalizer.py rename to autosklearn/pipeline/implementations/Normalizer.py diff --git a/ParamSklearn/implementations/OneHotEncoder.py b/autosklearn/pipeline/implementations/OneHotEncoder.py similarity index 100% rename from ParamSklearn/implementations/OneHotEncoder.py rename to autosklearn/pipeline/implementations/OneHotEncoder.py diff --git a/ParamSklearn/implementations/ProjLogit.py b/autosklearn/pipeline/implementations/ProjLogit.py similarity index 100% rename from ParamSklearn/implementations/ProjLogit.py rename to autosklearn/pipeline/implementations/ProjLogit.py diff --git a/ParamSklearn/implementations/StandardScaler.py b/autosklearn/pipeline/implementations/StandardScaler.py similarity index 100% rename from ParamSklearn/implementations/StandardScaler.py rename to autosklearn/pipeline/implementations/StandardScaler.py diff --git a/ParamSklearn/implementations/__init__.py b/autosklearn/pipeline/implementations/__init__.py similarity index 100% rename from ParamSklearn/implementations/__init__.py rename to autosklearn/pipeline/implementations/__init__.py diff --git a/ParamSklearn/implementations/gem.py b/autosklearn/pipeline/implementations/gem.py similarity index 100% rename from ParamSklearn/implementations/gem.py rename to autosklearn/pipeline/implementations/gem.py diff --git a/ParamSklearn/implementations/util.py b/autosklearn/pipeline/implementations/util.py similarity index 100% rename from ParamSklearn/implementations/util.py rename to autosklearn/pipeline/implementations/util.py diff --git a/ParamSklearn/regression.py b/autosklearn/pipeline/regression.py similarity index 93% rename from ParamSklearn/regression.py rename to autosklearn/pipeline/regression.py index 8a7704c4fb..492a706629 100644 --- a/ParamSklearn/regression.py +++ b/autosklearn/pipeline/regression.py @@ -2,21 +2,17 @@ import copy from itertools import product -import sklearn -if sklearn.__version__ != "0.16.1": - raise ValueError("ParamSklearn supports only sklearn version 0.16.1, " - "you installed %s." % sklearn.__version__) from sklearn.base import RegressorMixin from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction from HPOlibConfigSpace.configuration_space import ConfigurationSpace -from ParamSklearn import components as components -from ParamSklearn.base import ParamSklearnBaseEstimator -from ParamSklearn.constants import SPARSE +from autosklearn.pipeline import components as components +from autosklearn.pipeline.base import BasePipeline +from autosklearn.pipeline.constants import SPARSE -class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): +class SimpleRegressionPipeline(RegressorMixin, BasePipeline): """This class implements the regression task. It implements a pipeline, which includes one preprocessing step and one @@ -44,12 +40,14 @@ class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): ---------- _estimator : The underlying scikit-learn regression model. This variable is assigned after a call to the - :meth:`ParamSklearn.regression.ParamSklearnRegressor.fit` method. + :meth:`autosklearn.pipeline.regression.SimpleRegressionPipeline.fit` + method. _preprocessor : The underlying scikit-learn preprocessing algorithm. This variable is only assigned if a preprocessor is specified and after a call to the - :meth:`ParamSklearn.regression.ParamSklearnRegressor.fit` method. + :meth:`autosklearn.pipeline.regression.SimpleRegressionPipeline.fit` + method. See also -------- @@ -63,7 +61,7 @@ class ParamSklearnRegressor(RegressorMixin, ParamSklearnBaseEstimator): """ def pre_transform(self, X, Y, fit_params=None, init_params=None): - X, fit_params = super(ParamSklearnRegressor, self).pre_transform( + X, fit_params = super(SimpleRegressionPipeline, self).pre_transform( X, Y, fit_params=fit_params, init_params=init_params) self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] return X, fit_params @@ -119,7 +117,7 @@ def get_hyperparameter_search_space(cls, include=None, exclude=None, Returns ------- cs : HPOlibConfigSpace.configuration_space.Configuration - The configuration space describing the ParamSklearnClassifier. + The configuration space describing the SimpleRegressionClassifier. """ cs = ConfigurationSpace() diff --git a/ParamSklearn/util.py b/autosklearn/pipeline/util.py similarity index 100% rename from ParamSklearn/util.py rename to autosklearn/pipeline/util.py diff --git a/autosklearn/util/paramsklearn.py b/autosklearn/util/pipeline.py similarity index 80% rename from autosklearn/util/paramsklearn.py rename to autosklearn/util/pipeline.py index 46227750e8..9f3f3be67c 100755 --- a/autosklearn/util/paramsklearn.py +++ b/autosklearn/util/pipeline.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- from autosklearn.constants import * -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.regression import ParamSklearnRegressor +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.regression import SimpleRegressionPipeline __all__ = [ @@ -31,7 +31,7 @@ def _get_regression_configuration_space(info, include): sparse = False if info['is_sparse'] == 1: sparse = True - configuration_space = ParamSklearnRegressor. \ + configuration_space = SimpleRegressionPipeline. \ get_hyperparameter_search_space(include=include, dataset_properties={'sparse': sparse}) return configuration_space @@ -62,20 +62,20 @@ def _get_classification_configuration_space(info, include): 'sparse': sparse } - return ParamSklearnClassifier.get_hyperparameter_search_space( + return SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties=dataset_properties, include=include) def get_model(configuration, seed): if 'classifier' in configuration: - return ParamSklearnClassifier(configuration, seed) + return SimpleClassificationPipeline(configuration, seed) elif 'regressor' in configuration: - return ParamSklearnRegressor(configuration, seed) + return SimpleRegressionPipeline(configuration, seed) def get_class(info): if info['task'] in REGRESSION_TASKS: - return ParamSklearnRegressor + return SimpleRegressionPipeline else: - return ParamSklearnClassifier + return SimpleClassificationPipeline diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py index 198decac80..6e95289804 100644 --- a/misc/create_hyperparameter_table.py +++ b/misc/create_hyperparameter_table.py @@ -5,8 +5,8 @@ import subprocess import HPOlibConfigSpace.hyperparameters -import ParamSklearn.classification -import ParamSklearn.regression +import autosklearn.pipeline.classification +import autosklearn.pipeline.regression # Some macros COND = "conditional" @@ -59,9 +59,11 @@ def get_dict(task_type="classifier", **kwargs): assert task_type in ("classifier", "regressor") if task_type == "classifier": - cs = ParamSklearn.classification.ParamSklearnClassifier.get_hyperparameter_search_space(dataset_properties=kwargs) + cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ + .get_hyperparameter_search_space(dataset_properties=kwargs) elif task_type == "regressor": - cs = ParamSklearn.regression.ParamSklearnRegressor.get_hyperparameter_search_space(dataset_properties=kwargs) + cs = autosklearn.pipeline.regression.SimpleRegressionPipeline\ + .get_hyperparameter_search_space(dataset_properties=kwargs) else: raise ValueError("'task_type' is not in ('classifier', 'regressor')") @@ -149,7 +151,7 @@ def get_dict(task_type="classifier", **kwargs): d[est][COND][UN] += 1 else: raise ValueError("Don't know that type: %s" % type(h)) - print preprocessor_dict + print(preprocessor_dict) return (estimator_dict, preprocessor_dict) @@ -202,7 +204,7 @@ def main(): tex_doc = template_string % "\n".join([est_table, preproc_table]) if args.save is None: - print tex_doc + print(tex_doc) else: fh = open(args.save, "w") fh.write(tex_doc) diff --git a/misc/random_sampling.py b/misc/random_sampling.py deleted file mode 100644 index 315a7d3adb..0000000000 --- a/misc/random_sampling.py +++ /dev/null @@ -1,24 +0,0 @@ -from ParamSklearn.classification import ParamSklearnClassifier -from HPOlibConfigSpace.random_sampler import RandomSampler -import sklearn.datasets -import sklearn.metrics -import numpy as np - -iris = sklearn.datasets.load_iris() -X = iris.data -Y = iris.target -indices = np.arange(X.shape[0]) -np.random.shuffle(indices) -configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() -sampler = RandomSampler(configuration_space, 1) -for i in range(10000): - configuration = sampler.sample_configuration() - auto = ParamSklearnClassifier(configuration) - try: - auto = auto.fit(X[indices[:100]], Y[indices[:100]]) - except Exception as e: - print configuration - print e - continue - predictions = auto.predict(X[indices[100:]]) - print sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) \ No newline at end of file diff --git a/requ.txt b/requ.txt index f93fdc29e1..c8a3ddae5f 100644 --- a/requ.txt +++ b/requ.txt @@ -18,4 +18,3 @@ pandas Cython git+https://github.com/automl/HPOlibConfigSpace@master -git+https://github.com/automl/paramsklearn@development diff --git a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py index fccdb90469..530f734b98 100644 --- a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py +++ b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py @@ -11,7 +11,7 @@ FloatHyperparameter, CategoricalHyperparameter, Constant from autosklearn.constants import * -from autosklearn.util import paramsklearn +from autosklearn.util import pipeline def retrieve_matadata(validation_directory, metric, configuration_space, @@ -295,7 +295,7 @@ def main(): output_dir_ = os.path.join(output_dir, '%s_%s_%s' % ( metric, TASK_TYPES_TO_STRING[task], 'sparse' if sparse else 'dense')) - configuration_space = paramsklearn.get_configuration_space( + configuration_space = pipeline.get_configuration_space( {'is_sparse': sparse, 'task': task} ) diff --git a/source/conf.py b/source/conf.py index e46c9ef792..9381aebdef 100644 --- a/source/conf.py +++ b/source/conf.py @@ -58,13 +58,13 @@ class BaseEstimator(object): 'sklearn.utils', 'psutil','pyyaml','pandas', 'matplotlib', - 'ParamSklearn', - 'ParamSklearn.implementations', - 'ParamSklearn.implementations.OneHotEncoder', - 'ParamSklearn.implementations.Imputation', - 'ParamSklearn.implementations.StandardScaler', - 'ParamSklearn.classification', - 'ParamSklearn.regression', + 'autosklearn.pipeline', + 'autosklearn.pipeline.implementations', + 'autosklearn.pipeline.implementations.OneHotEncoder', + 'autosklearn.pipeline.implementations.Imputation', + 'autosklearn.pipeline.implementations.StandardScaler', + 'autosklearn.pipeline.classification', + 'autosklearn.pipeline.regression', 'HPOlibConfigSpace', 'HPOlibConfigSpace.converters', 'HPOlibConfigSpace.configuration_space'] diff --git a/source/first_steps.rst b/source/first_steps.rst index 3520d15200..a241c6ce9f 100644 --- a/source/first_steps.rst +++ b/source/first_steps.rst @@ -6,7 +6,7 @@ ParamSklearn, feed it to the random search algorithm implemented by the HPOlibConfigSpace package and then train a classifier with a random configuration on the iris dataset. - >>> from ParamSklearn.classification import ParamSklearnClassifier + >>> from autosklearn.pipeline.classification import SimpleClassificationPipeline >>> import sklearn.datasets >>> import sklearn.metrics >>> import numpy as np @@ -16,10 +16,10 @@ configuration on the iris dataset. >>> indices = np.arange(X.shape[0]) >>> np.random.seed(1) >>> np.random.shuffle(indices) - >>> configuration_space = ParamSklearnClassifier.get_hyperparameter_search_space() + >>> configuration_space = SimpleClassificationPipeline.get_hyperparameter_search_space() >>> configuration_space.seed(1) >>> configuration = configuration_space.sample_configuration() - >>> cls = ParamSklearnClassifier(configuration, random_state=1) + >>> cls = SimpleClassificationPipeline(configuration, random_state=1) >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) >>> predictions = cls.predict(X[indices[100:]]) >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) diff --git a/source/index.rst b/source/index.rst index 4defb25999..5dec85da48 100644 --- a/source/index.rst +++ b/source/index.rst @@ -55,11 +55,9 @@ with Ubuntu. It should run on other Linux distributions, but won't work on a MAC or on a windows PC. It requires scikit-learn 0.16.1, which in turn requires numpy and scipy. -*auto-sklearn* has several dependencies, which are not yet automatically -resolved: +*auto-sklearn* has a dependency, which are not yet automatically resolved: * `HPOlibConfigSpace `_ -* `ParamSklearn `_ Please install these manually with: diff --git a/test/automl/base.py b/test/automl/base.py index 924ac5d8b1..26579b9956 100644 --- a/test/automl/base.py +++ b/test/automl/base.py @@ -7,6 +7,9 @@ class Base(unittest.TestCase): + """All tests which are a subclass of this must define their own output + directory and call self._setUp.""" + def setUp(self): self.test_dir = os.path.dirname(__file__) diff --git a/test/automl/test_estimators.py b/test/automl/test_estimators.py index fdf5f1fe5f..fdd4ec07ff 100644 --- a/test/automl/test_estimators.py +++ b/test/automl/test_estimators.py @@ -5,7 +5,7 @@ import unittest import numpy as np -import ParamSklearn.util as putil +import autosklearn.pipeline.util as putil from autosklearn.classification import AutoSklearnClassifier from autosklearn.util.backend import Backend diff --git a/test/automl/test_start_automl.py b/test/automl/test_start_automl.py index fe0d2fe25b..2cd4765be8 100644 --- a/test/automl/test_start_automl.py +++ b/test/automl/test_start_automl.py @@ -8,9 +8,10 @@ import numpy as np import six +import sklearn.datasets import autosklearn.automl -import ParamSklearn.util as putil +import autosklearn.pipeline.util as putil from autosklearn.constants import * from autosklearn.cli.base_interface import store_and_or_load_data @@ -47,89 +48,65 @@ def test_binary_score(self): self.skipTest('This test does currently not run on travis-ci. ' 'Make sure it runs locally on your machine!') - output = os.path.join(self.test_dir, '..', '.tmp_test_fit') + output = os.path.join(self.test_dir, '..', '.tmp_test_binary_score') self._setUp(output) - # Had to use this dummy dataset because - # I cannot find a way to efficiently load a binary dataset - # without changing files in paramsklearn or automl class - - X_train = np.random.rand(100, 20) - Y_train = np.random.randint(0, 2, 100) + data = sklearn.datasets.make_classification( + n_samples=1000, n_features=20, n_redundant=5, n_informative=5, + n_repeated=2, n_clusters_per_class=2, random_state=1) + X_train = data[0][:700] + Y_train = data[1][:700] + X_test = data[0][700:] + Y_test = data[1][700:] - automl = autosklearn.automl.AutoML(output, output, 30, 15) + automl = autosklearn.automl.AutoML(output, output, 15, 15) automl.fit(X_train, Y_train, task=BINARY_CLASSIFICATION) self.assertEqual(automl._task, BINARY_CLASSIFICATION) - X_test = np.random.rand(50, 20) - Y_test = np.random.randint(0, 2, 50) - score = automl.score(X_test, Y_test) - self.assertGreaterEqual(score, 0.0) + self.assertGreaterEqual(score, 0.5) del automl self._tearDown(output) def test_automl_outputs(self): - print("1.") output = os.path.join(self.test_dir, '..', '.tmp_test_automl_outputs') - print("2.") self._setUp(output) - print("3.") name = '31_bac' - print("4.") dataset = os.path.join(self.test_dir, '..', '.data', name) - print("5.") data_manager_file = os.path.join(output, '.auto-sklearn', 'datamanager.pkl') - print("6.") queue = multiprocessing.Queue() - print("7.") auto = autosklearn.automl.AutoML( output, output, 15, 15, initial_configurations_via_metalearning=25, queue=queue, seed=100) - print("8.") auto.fit_automl_dataset(dataset) - print("9.") # pickled data manager (without one hot encoding!) with open(data_manager_file, 'rb') as fh: - print("10.") D = six.moves.cPickle.load(fh) - print("11.") self.assertTrue(np.allclose(D.data['X_train'][0, :3], [1., 12., 2.])) - print("12.") - print("13.") time_needed_to_load_data, data_manager_file, procs = \ queue.get() - print("14.") for proc in procs: proc.wait() - print("15.") # Start time print(os.listdir(os.path.join(output, '.auto-sklearn'))) - print("16.") start_time_file_path = os.path.join(output, '.auto-sklearn', "start_time_100") - print("17.") with open(start_time_file_path, 'r') as fh: - print("18.") start_time = float(fh.read()) - print("19.") self.assertGreaterEqual(time.time() - start_time, 10) - print("20.") del auto - print("21.") self._tearDown(output) - print("22.") def test_do_dummy_prediction(self): output = os.path.join(self.test_dir, '..', diff --git a/test/evaluation/test_cv_evaluator.py b/test/evaluation/test_cv_evaluator.py index ff73189014..460bba593b 100644 --- a/test/evaluation/test_cv_evaluator.py +++ b/test/evaluation/test_cv_evaluator.py @@ -11,8 +11,8 @@ from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.cv_evaluator import CVEvaluator -from autosklearn.util.paramsklearn import get_configuration_space -from ParamSklearn.util import get_dataset +from autosklearn.util.pipeline import get_configuration_space +from autosklearn.pipeline.util import get_dataset N_TEST_RUNS = 10 diff --git a/test/evaluation/test_holdout_evaluator.py b/test/evaluation/test_holdout_evaluator.py index 87d2a11b8e..9c184fe766 100644 --- a/test/evaluation/test_holdout_evaluator.py +++ b/test/evaluation/test_holdout_evaluator.py @@ -11,13 +11,13 @@ from numpy.linalg import LinAlgError import sklearn.datasets -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.util import get_dataset from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.holdout_evaluator import HoldoutEvaluator from autosklearn.util.data import convert_to_bin -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space N_TEST_RUNS = 10 diff --git a/test/evaluation/test_nested_cv_evaluator.py b/test/evaluation/test_nested_cv_evaluator.py index 3c9ca5f49c..c06fa8bd3f 100644 --- a/test/evaluation/test_nested_cv_evaluator.py +++ b/test/evaluation/test_nested_cv_evaluator.py @@ -11,8 +11,8 @@ from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.nested_cv_evaluator import NestedCVEvaluator -from autosklearn.util.paramsklearn import get_configuration_space -from ParamSklearn.util import get_dataset +from autosklearn.util.pipeline import get_configuration_space +from autosklearn.pipeline.util import get_dataset N_TEST_RUNS = 10 diff --git a/test/metalearning/pyMetaLearn/test_meta_base.py b/test/metalearning/pyMetaLearn/test_meta_base.py index 34741b9a52..22868a60cc 100644 --- a/test/metalearning/pyMetaLearn/test_meta_base.py +++ b/test/metalearning/pyMetaLearn/test_meta_base.py @@ -5,7 +5,7 @@ import numpy as np import pandas as pd -import ParamSklearn.classification +import autosklearn.pipeline.classification from autosklearn.metalearning.metalearning.meta_base import MetaBase, Run @@ -18,7 +18,8 @@ def setUp(self): data_dir = os.path.join(data_dir, 'test_meta_base_data') os.chdir(data_dir) - cs = ParamSklearn.classification.ParamSklearnClassifier.get_hyperparameter_search_space() + cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ + .get_hyperparameter_search_space() self.base = MetaBase(cs, data_dir) diff --git a/test/metalearning/pyMetaLearn/test_meta_features.py b/test/metalearning/pyMetaLearn/test_meta_features.py index d5869dee50..8b391b9dbe 100644 --- a/test/metalearning/pyMetaLearn/test_meta_features.py +++ b/test/metalearning/pyMetaLearn/test_meta_features.py @@ -9,8 +9,8 @@ from sklearn.preprocessing.imputation import Imputer from sklearn.datasets import make_multilabel_classification -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler from autosklearn.metalearning.metafeatures.metafeature import MetaFeatureValue import autosklearn.metalearning.metafeatures.metafeatures as meta_features diff --git a/test/metalearning/pyMetaLearn/test_meta_features_sparse.py b/test/metalearning/pyMetaLearn/test_meta_features_sparse.py index 9e99777d51..6ad3bb61d7 100644 --- a/test/metalearning/pyMetaLearn/test_meta_features_sparse.py +++ b/test/metalearning/pyMetaLearn/test_meta_features_sparse.py @@ -11,8 +11,8 @@ from scipy import sparse from sklearn.preprocessing.imputation import Imputer -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler import autosklearn.metalearning.metafeatures.metafeatures as meta_features import test_meta_features diff --git a/test/metalearning/pyMetaLearn/test_metalearner.py b/test/metalearning/pyMetaLearn/test_metalearner.py index e21abc078c..80bdfd3a65 100644 --- a/test/metalearning/pyMetaLearn/test_metalearner.py +++ b/test/metalearning/pyMetaLearn/test_metalearner.py @@ -9,7 +9,7 @@ import pandas as pd from HPOlibConfigSpace.configuration_space import Configuration -import ParamSklearn.classification +import autosklearn.pipeline.classification import autosklearn.metalearning.optimizers.metalearn_optimizer.metalearner as metalearner @@ -25,7 +25,7 @@ def setUp(self): data_dir = os.path.join(data_dir, 'test_meta_base_data') os.chdir(data_dir) - self.cs = ParamSklearn.classification.ParamSklearnClassifier\ + self.cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ .get_hyperparameter_search_space() self.meta_optimizer = metalearner.MetaLearningOptimizer( diff --git a/test/metalearning/test_metalearning.py b/test/metalearning/test_metalearning.py index ea537037f7..73a8dd175e 100644 --- a/test/metalearning/test_metalearning.py +++ b/test/metalearning/test_metalearning.py @@ -3,13 +3,13 @@ import unittest -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.util import get_dataset from autosklearn.constants import * from autosklearn.metalearning.mismbo import calc_meta_features, \ calc_meta_features_encoded, \ create_metalearning_string_for_smac_call -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space class MetafeatureValueDummy(object): diff --git a/test/test_pipeline/__init__.py b/test/test_pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/components/__init__.py b/test/test_pipeline/components/__init__.py similarity index 100% rename from tests/components/__init__.py rename to test/test_pipeline/components/__init__.py diff --git a/tests/components/classification/__init__.py b/test/test_pipeline/components/classification/__init__.py similarity index 100% rename from tests/components/classification/__init__.py rename to test/test_pipeline/components/classification/__init__.py diff --git a/tests/components/classification/test_adaboost.py b/test/test_pipeline/components/classification/test_adaboost.py similarity index 90% rename from tests/components/classification/test_adaboost.py rename to test/test_pipeline/components/classification/test_adaboost.py index 18eb42807c..4905313498 100644 --- a/tests/components/classification/test_adaboost.py +++ b/test/test_pipeline/components/classification/test_adaboost.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.adaboost import \ +from autosklearn.pipeline.components.classification.adaboost import \ AdaboostClassifier -from ParamSklearn.util import _test_classifier +from autosklearn.pipeline.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_bernoulli_nb.py b/test/test_pipeline/components/classification/test_bernoulli_nb.py similarity index 84% rename from tests/components/classification/test_bernoulli_nb.py rename to test/test_pipeline/components/classification/test_bernoulli_nb.py index 321377bd01..498a40d832 100644 --- a/tests/components/classification/test_bernoulli_nb.py +++ b/test/test_pipeline/components/classification/test_bernoulli_nb.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.bernoulli_nb import \ +from autosklearn.pipeline.components.classification.bernoulli_nb import \ BernoulliNB -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_decision_tree.py b/test/test_pipeline/components/classification/test_decision_tree.py similarity index 87% rename from tests/components/classification/test_decision_tree.py rename to test/test_pipeline/components/classification/test_decision_tree.py index 4b521247b8..f8083cb17f 100644 --- a/tests/components/classification/test_decision_tree.py +++ b/test/test_pipeline/components/classification/test_decision_tree.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.decision_tree import DecisionTree -from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba +from autosklearn.pipeline.components.classification.decision_tree import DecisionTree +from autosklearn.pipeline.util import _test_classifier, _test_classifier_predict_proba import sklearn.metrics diff --git a/tests/components/classification/test_extra_trees.py b/test/test_pipeline/components/classification/test_extra_trees.py similarity index 88% rename from tests/components/classification/test_extra_trees.py rename to test/test_pipeline/components/classification/test_extra_trees.py index c58ef61eeb..fe926f1926 100644 --- a/tests/components/classification/test_extra_trees.py +++ b/test/test_pipeline/components/classification/test_extra_trees.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.extra_trees import \ +from autosklearn.pipeline.components.classification.extra_trees import \ ExtraTreesClassifier -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_gaussian_nb.py b/test/test_pipeline/components/classification/test_gaussian_nb.py similarity index 84% rename from tests/components/classification/test_gaussian_nb.py rename to test/test_pipeline/components/classification/test_gaussian_nb.py index 574c1c49ab..79d1007724 100644 --- a/tests/components/classification/test_gaussian_nb.py +++ b/test/test_pipeline/components/classification/test_gaussian_nb.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.gaussian_nb import \ +from autosklearn.pipeline.components.classification.gaussian_nb import \ GaussianNB -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_gradient_boosting.py b/test/test_pipeline/components/classification/test_gradient_boosting.py similarity index 83% rename from tests/components/classification/test_gradient_boosting.py rename to test/test_pipeline/components/classification/test_gradient_boosting.py index b157d1197f..18137a6fa5 100644 --- a/tests/components/classification/test_gradient_boosting.py +++ b/test/test_pipeline/components/classification/test_gradient_boosting.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.gradient_boosting import \ +from autosklearn.pipeline.components.classification.gradient_boosting import \ GradientBoostingClassifier -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_k_nearest_neighbor.py b/test/test_pipeline/components/classification/test_k_nearest_neighbor.py similarity index 87% rename from tests/components/classification/test_k_nearest_neighbor.py rename to test/test_pipeline/components/classification/test_k_nearest_neighbor.py index ccb7e96091..dcc3d57e14 100644 --- a/tests/components/classification/test_k_nearest_neighbor.py +++ b/test/test_pipeline/components/classification/test_k_nearest_neighbor.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.k_nearest_neighbors import \ +from autosklearn.pipeline.components.classification.k_nearest_neighbors import \ KNearestNeighborsClassifier -from ParamSklearn.util import _test_classifier, _test_classifier_predict_proba +from autosklearn.pipeline.util import _test_classifier, _test_classifier_predict_proba import sklearn.metrics diff --git a/tests/components/classification/test_lda.py b/test/test_pipeline/components/classification/test_lda.py similarity index 87% rename from tests/components/classification/test_lda.py rename to test/test_pipeline/components/classification/test_lda.py index e76c3523a1..28915f0e35 100644 --- a/tests/components/classification/test_lda.py +++ b/test/test_pipeline/components/classification/test_lda.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.lda import LDA -from ParamSklearn.util import _test_classifier +from autosklearn.pipeline.components.classification.lda import LDA +from autosklearn.pipeline.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_liblinear.py b/test/test_pipeline/components/classification/test_liblinear.py similarity index 69% rename from tests/components/classification/test_liblinear.py rename to test/test_pipeline/components/classification/test_liblinear.py index 167397fd33..de30c1405d 100644 --- a/tests/components/classification/test_liblinear.py +++ b/test/test_pipeline/components/classification/test_liblinear.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC -from ParamSklearn.util import _test_classifier +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.util import _test_classifier class LibLinearComponentTest(unittest.TestCase): diff --git a/tests/components/classification/test_libsvm_svc.py b/test/test_pipeline/components/classification/test_libsvm_svc.py similarity index 94% rename from tests/components/classification/test_libsvm_svc.py rename to test/test_pipeline/components/classification/test_libsvm_svc.py index 61ff38c030..a62b464644 100644 --- a/tests/components/classification/test_libsvm_svc.py +++ b/test/test_pipeline/components/classification/test_libsvm_svc.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC -from ParamSklearn.util import _test_classifier, \ +from autosklearn.pipeline.components.classification.libsvm_svc import LibSVM_SVC +from autosklearn.pipeline.util import _test_classifier, \ _test_classifier_predict_proba, get_dataset import numpy as np diff --git a/tests/components/classification/test_multinomial_nb.py b/test/test_pipeline/components/classification/test_multinomial_nb.py similarity index 91% rename from tests/components/classification/test_multinomial_nb.py rename to test/test_pipeline/components/classification/test_multinomial_nb.py index f2cc49f385..8f8bc42379 100644 --- a/tests/components/classification/test_multinomial_nb.py +++ b/test/test_pipeline/components/classification/test_multinomial_nb.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.multinomial_nb import \ +from autosklearn.pipeline.components.classification.multinomial_nb import \ MultinomialNB -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit, \ +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit, \ get_dataset import numpy as np diff --git a/tests/components/classification/test_passive_aggressive.py b/test/test_pipeline/components/classification/test_passive_aggressive.py similarity index 91% rename from tests/components/classification/test_passive_aggressive.py rename to test/test_pipeline/components/classification/test_passive_aggressive.py index e376abe4bf..56ec91b54a 100644 --- a/tests/components/classification/test_passive_aggressive.py +++ b/test/test_pipeline/components/classification/test_passive_aggressive.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.passive_aggressive import \ +from autosklearn.pipeline.components.classification.passive_aggressive import \ PassiveAggressive -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_proj_logit.py b/test/test_pipeline/components/classification/test_proj_logit.py similarity index 82% rename from tests/components/classification/test_proj_logit.py rename to test/test_pipeline/components/classification/test_proj_logit.py index bae277679d..d9972ea916 100644 --- a/tests/components/classification/test_proj_logit.py +++ b/test/test_pipeline/components/classification/test_proj_logit.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.proj_logit import ProjLogitCLassifier -from ParamSklearn.util import _test_classifier +from autosklearn.pipeline.components.classification.proj_logit import ProjLogitCLassifier +from autosklearn.pipeline.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_qda.py b/test/test_pipeline/components/classification/test_qda.py similarity index 87% rename from tests/components/classification/test_qda.py rename to test/test_pipeline/components/classification/test_qda.py index 9d60ac83bf..c8c2c0e2cf 100644 --- a/tests/components/classification/test_qda.py +++ b/test/test_pipeline/components/classification/test_qda.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.qda import QDA -from ParamSklearn.util import _test_classifier +from autosklearn.pipeline.components.classification.qda import QDA +from autosklearn.pipeline.util import _test_classifier import sklearn.metrics diff --git a/tests/components/classification/test_random_forest.py b/test/test_pipeline/components/classification/test_random_forest.py similarity index 86% rename from tests/components/classification/test_random_forest.py rename to test/test_pipeline/components/classification/test_random_forest.py index ee53405031..81bd0a4606 100644 --- a/tests/components/classification/test_random_forest.py +++ b/test/test_pipeline/components/classification/test_random_forest.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.random_forest import RandomForest -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.components.classification.random_forest import RandomForest +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/classification/test_sgd.py b/test/test_pipeline/components/classification/test_sgd.py similarity index 91% rename from tests/components/classification/test_sgd.py rename to test/test_pipeline/components/classification/test_sgd.py index 488902f6fd..883cbf7a59 100644 --- a/tests/components/classification/test_sgd.py +++ b/test/test_pipeline/components/classification/test_sgd.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.classification.sgd import SGD -from ParamSklearn.util import _test_classifier, _test_classifier_iterative_fit +from autosklearn.pipeline.components.classification.sgd import SGD +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit import sklearn.metrics diff --git a/tests/components/data_preprocessing/__init__.py b/test/test_pipeline/components/data_preprocessing/__init__.py similarity index 100% rename from tests/components/data_preprocessing/__init__.py rename to test/test_pipeline/components/data_preprocessing/__init__.py diff --git a/tests/components/data_preprocessing/test_balancing.py b/test/test_pipeline/components/data_preprocessing/test_balancing.py similarity index 79% rename from tests/components/data_preprocessing/test_balancing.py rename to test/test_pipeline/components/data_preprocessing/test_balancing.py index ffc7b61af9..8da740bd53 100644 --- a/tests/components/data_preprocessing/test_balancing.py +++ b/test/test_pipeline/components/data_preprocessing/test_balancing.py @@ -7,19 +7,19 @@ import sklearn.datasets import sklearn.metrics -from ParamSklearn.components.data_preprocessing.balancing import Balancing -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.components.classification.adaboost import AdaboostClassifier -from ParamSklearn.components.classification.decision_tree import DecisionTree -from ParamSklearn.components.classification.extra_trees import ExtraTreesClassifier -from ParamSklearn.components.classification.gradient_boosting import GradientBoostingClassifier -from ParamSklearn.components.classification.random_forest import RandomForest -from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC -from ParamSklearn.components.classification.libsvm_svc import LibSVM_SVC -from ParamSklearn.components.classification.sgd import SGD -from ParamSklearn.components.feature_preprocessing\ +from autosklearn.pipeline.components.data_preprocessing.balancing import Balancing +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.components.classification.adaboost import AdaboostClassifier +from autosklearn.pipeline.components.classification.decision_tree import DecisionTree +from autosklearn.pipeline.components.classification.extra_trees import ExtraTreesClassifier +from autosklearn.pipeline.components.classification.gradient_boosting import GradientBoostingClassifier +from autosklearn.pipeline.components.classification.random_forest import RandomForest +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.components.classification.libsvm_svc import LibSVM_SVC +from autosklearn.pipeline.components.classification.sgd import SGD +from autosklearn.pipeline.components.feature_preprocessing\ .extra_trees_preproc_for_classification import ExtraTreesPreprocessor -from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor +from autosklearn.pipeline.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor class BalancingComponentTest(unittest.TestCase): @@ -85,11 +85,12 @@ def test_weighting_effect(self): X_test = data_[0][700:] Y_test = data_[1][700:] - cs = ParamSklearnClassifier.get_hyperparameter_search_space( - include={'classifier': [name]}) + cs = SimpleClassificationPipeline.\ + get_hyperparameter_search_space( + include={'classifier': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy - classifier = ParamSklearnClassifier(default, random_state=1) + classifier = SimpleClassificationPipeline(default, random_state=1) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, @@ -103,11 +104,11 @@ def test_weighting_effect(self): X_test = data_[0][700:] Y_test = data_[1][700:] - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'classifier': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy - classifier = ParamSklearnClassifier(default, random_state=1) + classifier = SimpleClassificationPipeline(default, random_state=1) Xt, fit_params = classifier.pre_transform(X_train, Y_train) classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) predictions = classifier.predict(X_test) @@ -129,11 +130,11 @@ def test_weighting_effect(self): X_test = data_[0][700:] Y_test = data_[1][700:] - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy - classifier = ParamSklearnClassifier(default, random_state=1) + classifier = SimpleClassificationPipeline(default, random_state=1) predictor = classifier.fit(X_train, Y_train) predictions = predictor.predict(X_test) self.assertAlmostEqual(acc, @@ -148,11 +149,11 @@ def test_weighting_effect(self): X_test = data_[0][700:] Y_test = data_[1][700:] - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'classifier': ['sgd'], 'preprocessor': [name]}) default = cs.get_default_configuration() default._values['balancing:strategy'] = strategy - classifier = ParamSklearnClassifier(default, random_state=1) + classifier = SimpleClassificationPipeline(default, random_state=1) Xt, fit_params = classifier.pre_transform(X_train, Y_train) classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) predictions = classifier.predict(X_test) diff --git a/tests/components/data_preprocessing/test_imputation.py b/test/test_pipeline/components/data_preprocessing/test_imputation.py similarity index 88% rename from tests/components/data_preprocessing/test_imputation.py rename to test/test_pipeline/components/data_preprocessing/test_imputation.py index 092e3d47b7..1f94ab8a5f 100644 --- a/tests/components/data_preprocessing/test_imputation.py +++ b/test/test_pipeline/components/data_preprocessing/test_imputation.py @@ -2,8 +2,8 @@ from scipy import sparse -from ParamSklearn.components.data_preprocessing.imputation import Imputation -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase +from autosklearn.pipeline.components.data_preprocessing.imputation import Imputation +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase class ImputationTest(PreprocessingTestCase): diff --git a/tests/components/data_preprocessing/test_one_hot_encoding.py b/test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py similarity index 96% rename from tests/components/data_preprocessing/test_one_hot_encoding.py rename to test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py index 371152926c..d062a202de 100644 --- a/tests/components/data_preprocessing/test_one_hot_encoding.py +++ b/test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py @@ -4,8 +4,8 @@ import numpy as np from scipy import sparse -from ParamSklearn.components.data_preprocessing.one_hot_encoding import OneHotEncoder -from ParamSklearn.util import _test_preprocessing +from autosklearn.pipeline.components.data_preprocessing.one_hot_encoding import OneHotEncoder +from autosklearn.pipeline.util import _test_preprocessing class OneHotEncoderTest(unittest.TestCase): diff --git a/tests/components/data_preprocessing/test_scaling.py b/test/test_pipeline/components/data_preprocessing/test_scaling.py similarity index 94% rename from tests/components/data_preprocessing/test_scaling.py rename to test/test_pipeline/components/data_preprocessing/test_scaling.py index f3254e6846..0182d26446 100644 --- a/tests/components/data_preprocessing/test_scaling.py +++ b/test/test_pipeline/components/data_preprocessing/test_scaling.py @@ -3,8 +3,8 @@ import numpy as np import sklearn.datasets -from ParamSklearn.components.data_preprocessing.rescaling import RescalingChoice -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.components.data_preprocessing.rescaling import RescalingChoice +from autosklearn.pipeline.util import get_dataset class ScalingComponentTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/__init__.py b/test/test_pipeline/components/feature_preprocessing/__init__.py similarity index 100% rename from tests/components/feature_preprocessing/__init__.py rename to test/test_pipeline/components/feature_preprocessing/__init__.py diff --git a/tests/components/feature_preprocessing/test_NoPreprocessing.py b/test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py similarity index 82% rename from tests/components/feature_preprocessing/test_NoPreprocessing.py rename to test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py index d947645a34..ff613d68aa 100644 --- a/tests/components/feature_preprocessing/test_NoPreprocessing.py +++ b/test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py @@ -1,8 +1,8 @@ import numpy as np import unittest -from ParamSklearn.components.feature_preprocessing.no_preprocessing import NoPreprocessing -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase +from autosklearn.pipeline.components.feature_preprocessing.no_preprocessing import NoPreprocessing +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase class NoneComponentTest(PreprocessingTestCase): diff --git a/tests/components/feature_preprocessing/test_choice.py b/test/test_pipeline/components/feature_preprocessing/test_choice.py similarity index 94% rename from tests/components/feature_preprocessing/test_choice.py rename to test/test_pipeline/components/feature_preprocessing/test_choice.py index dfe5a56b76..9ae503f82c 100644 --- a/tests/components/feature_preprocessing/test_choice.py +++ b/test/test_pipeline/components/feature_preprocessing/test_choice.py @@ -2,7 +2,7 @@ import unittest -import ParamSklearn.components.feature_preprocessing as fp +import autosklearn.pipeline.components.feature_preprocessing as fp class FeatureProcessingTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_densifier.py b/test/test_pipeline/components/feature_preprocessing/test_densifier.py similarity index 75% rename from tests/components/feature_preprocessing/test_densifier.py rename to test/test_pipeline/components/feature_preprocessing/test_densifier.py index 2fedd1abf3..996d655b91 100644 --- a/tests/components/feature_preprocessing/test_densifier.py +++ b/test/test_pipeline/components/feature_preprocessing/test_densifier.py @@ -2,8 +2,8 @@ import numpy as np -from ParamSklearn.components.feature_preprocessing.densifier import Densifier -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase +from autosklearn.pipeline.components.feature_preprocessing.densifier import Densifier +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase class DensifierComponentTest(PreprocessingTestCase): diff --git a/tests/components/feature_preprocessing/test_extra_trees.py b/test/test_pipeline/components/feature_preprocessing/test_extra_trees.py similarity index 90% rename from tests/components/feature_preprocessing/test_extra_trees.py rename to test/test_pipeline/components/feature_preprocessing/test_extra_trees.py index a347ebed61..b1b9656b17 100644 --- a/tests/components/feature_preprocessing/test_extra_trees.py +++ b/test/test_pipeline/components/feature_preprocessing/test_extra_trees.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.feature_preprocessing.extra_trees_preproc_for_classification import \ +from autosklearn.pipeline.components.feature_preprocessing.extra_trees_preproc_for_classification import \ ExtraTreesPreprocessor -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_fast_ica.py b/test/test_pipeline/components/feature_preprocessing/test_fast_ica.py similarity index 91% rename from tests/components/feature_preprocessing/test_fast_ica.py rename to test/test_pipeline/components/feature_preprocessing/test_fast_ica.py index 347be61217..c330ba5a9b 100644 --- a/tests/components/feature_preprocessing/test_fast_ica.py +++ b/test/test_pipeline/components/feature_preprocessing/test_fast_ica.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import Ridge -from ParamSklearn.components.feature_preprocessing.fast_ica import \ +from autosklearn.pipeline.components.feature_preprocessing.fast_ica import \ FastICA -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_feature_agglomeration.py b/test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py similarity index 90% rename from tests/components/feature_preprocessing/test_feature_agglomeration.py rename to test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py index 7dc637f03c..284bdd0754 100644 --- a/tests/components/feature_preprocessing/test_feature_agglomeration.py +++ b/test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py @@ -1,8 +1,8 @@ import unittest from sklearn.ensemble import RandomForestClassifier -from ParamSklearn.components.feature_preprocessing.feature_agglomeration import FeatureAgglomeration -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.components.feature_preprocessing.feature_agglomeration import FeatureAgglomeration +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_gem.py b/test/test_pipeline/components/feature_preprocessing/test_gem.py similarity index 85% rename from tests/components/feature_preprocessing/test_gem.py rename to test/test_pipeline/components/feature_preprocessing/test_gem.py index 827ac13547..1e91dde716 100644 --- a/tests/components/feature_preprocessing/test_gem.py +++ b/test/test_pipeline/components/feature_preprocessing/test_gem.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.classification.proj_logit import ProjLogitCLassifier -from ParamSklearn.components.feature_preprocessing.gem import GEM -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, get_dataset +from autosklearn.pipeline.components.classification.proj_logit import ProjLogitCLassifier +from autosklearn.pipeline.components.feature_preprocessing.gem import GEM +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_kernel_pca.py b/test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py similarity index 92% rename from tests/components/feature_preprocessing/test_kernel_pca.py rename to test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py index ad21d5c826..042d735f20 100644 --- a/tests/components/feature_preprocessing/test_kernel_pca.py +++ b/test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.feature_preprocessing.kernel_pca import \ +from autosklearn.pipeline.components.feature_preprocessing.kernel_pca import \ KernelPCA -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_kitchen_sinks.py b/test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py similarity index 76% rename from tests/components/feature_preprocessing/test_kitchen_sinks.py rename to test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py index 0fe2aa50c3..37ae4f5578 100644 --- a/tests/components/feature_preprocessing/test_kitchen_sinks.py +++ b/test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py @@ -2,8 +2,8 @@ import numpy as np -from ParamSklearn.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase +from autosklearn.pipeline.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase class KitchenSinkComponent(PreprocessingTestCase): diff --git a/tests/components/feature_preprocessing/test_liblinear.py b/test/test_pipeline/components/feature_preprocessing/test_liblinear.py similarity index 91% rename from tests/components/feature_preprocessing/test_liblinear.py rename to test/test_pipeline/components/feature_preprocessing/test_liblinear.py index 4a8bf0cc3a..543a557e47 100644 --- a/tests/components/feature_preprocessing/test_liblinear.py +++ b/test/test_pipeline/components/feature_preprocessing/test_liblinear.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.feature_preprocessing.liblinear_svc_preprocessor import \ +from autosklearn.pipeline.components.feature_preprocessing.liblinear_svc_preprocessor import \ LibLinear_Preprocessor -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_nystroem_sampler.py b/test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py similarity index 96% rename from tests/components/feature_preprocessing/test_nystroem_sampler.py rename to test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py index 361c85d922..edc9cfff25 100644 --- a/tests/components/feature_preprocessing/test_nystroem_sampler.py +++ b/test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py @@ -3,9 +3,9 @@ import numpy as np import sklearn.preprocessing -from ParamSklearn.components.feature_preprocessing.nystroem_sampler import \ +from autosklearn.pipeline.components.feature_preprocessing.nystroem_sampler import \ Nystroem -from ParamSklearn.util import _test_preprocessing, get_dataset +from autosklearn.pipeline.util import _test_preprocessing, get_dataset class NystroemComponentTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_pca.py b/test/test_pipeline/components/feature_preprocessing/test_pca.py similarity index 82% rename from tests/components/feature_preprocessing/test_pca.py rename to test/test_pipeline/components/feature_preprocessing/test_pca.py index a764742d37..c7b47f7818 100644 --- a/tests/components/feature_preprocessing/test_pca.py +++ b/test/test_pipeline/components/feature_preprocessing/test_pca.py @@ -2,8 +2,8 @@ import numpy as np -from ParamSklearn.components.feature_preprocessing.pca import PCA -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase +from autosklearn.pipeline.components.feature_preprocessing.pca import PCA +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase class PCAComponentTest(PreprocessingTestCase): diff --git a/tests/components/feature_preprocessing/test_polynomial.py b/test/test_pipeline/components/feature_preprocessing/test_polynomial.py similarity index 91% rename from tests/components/feature_preprocessing/test_polynomial.py rename to test/test_pipeline/components/feature_preprocessing/test_polynomial.py index 5903f89e9b..855fc59bfd 100644 --- a/tests/components/feature_preprocessing/test_polynomial.py +++ b/test/test_pipeline/components/feature_preprocessing/test_polynomial.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.feature_preprocessing.polynomial import \ +from autosklearn.pipeline.components.feature_preprocessing.polynomial import \ PolynomialFeatures -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/tests/components/feature_preprocessing/test_random_trees_embedding.py b/test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py similarity index 92% rename from tests/components/feature_preprocessing/test_random_trees_embedding.py rename to test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py index e7d102bc00..da74cea977 100644 --- a/tests/components/feature_preprocessing/test_random_trees_embedding.py +++ b/test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py @@ -3,9 +3,9 @@ import numpy as np import scipy.sparse -from ParamSklearn.components.feature_preprocessing.random_trees_embedding import \ +from autosklearn.pipeline.components.feature_preprocessing.random_trees_embedding import \ RandomTreesEmbedding -from ParamSklearn.util import _test_preprocessing, get_dataset +from autosklearn.pipeline.util import _test_preprocessing, get_dataset class RandomTreesEmbeddingComponentTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_select_percentile_classification.py b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py similarity index 95% rename from tests/components/feature_preprocessing/test_select_percentile_classification.py rename to test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py index 593e51e70d..c73786351f 100644 --- a/tests/components/feature_preprocessing/test_select_percentile_classification.py +++ b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py @@ -4,8 +4,8 @@ import scipy.sparse import sklearn.preprocessing -from ParamSklearn.components.feature_preprocessing.select_percentile_classification import SelectPercentileClassification -from ParamSklearn.util import _test_preprocessing, get_dataset +from autosklearn.pipeline.components.feature_preprocessing.select_percentile_classification import SelectPercentileClassification +from autosklearn.pipeline.util import _test_preprocessing, get_dataset class SelectPercentileClassificationTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_select_percentile_regression.py b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py similarity index 90% rename from tests/components/feature_preprocessing/test_select_percentile_regression.py rename to test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py index 379b49349a..2404f08dbf 100644 --- a/tests/components/feature_preprocessing/test_select_percentile_regression.py +++ b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py @@ -2,8 +2,8 @@ import numpy as np -from ParamSklearn.components.feature_preprocessing.select_percentile_regression import SelectPercentileRegression -from ParamSklearn.util import _test_preprocessing, get_dataset +from autosklearn.pipeline.components.feature_preprocessing.select_percentile_regression import SelectPercentileRegression +from autosklearn.pipeline.util import _test_preprocessing, get_dataset class SelectPercentileRegressionTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_select_rates.py b/test/test_pipeline/components/feature_preprocessing/test_select_rates.py similarity index 96% rename from tests/components/feature_preprocessing/test_select_rates.py rename to test/test_pipeline/components/feature_preprocessing/test_select_rates.py index 952820fe16..5f40b2fe9e 100644 --- a/tests/components/feature_preprocessing/test_select_rates.py +++ b/test/test_pipeline/components/feature_preprocessing/test_select_rates.py @@ -4,9 +4,9 @@ import scipy.sparse import sklearn.preprocessing -from ParamSklearn.components.feature_preprocessing.select_rates import \ +from autosklearn.pipeline.components.feature_preprocessing.select_rates import \ SelectRates -from ParamSklearn.util import _test_preprocessing, get_dataset +from autosklearn.pipeline.util import _test_preprocessing, get_dataset class SelectRatesComponentTest(unittest.TestCase): diff --git a/tests/components/feature_preprocessing/test_truncatedSVD.py b/test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py similarity index 92% rename from tests/components/feature_preprocessing/test_truncatedSVD.py rename to test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py index e43cc4e569..ce34950d03 100644 --- a/tests/components/feature_preprocessing/test_truncatedSVD.py +++ b/test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py @@ -1,9 +1,9 @@ import unittest from sklearn.linear_model import RidgeClassifier -from ParamSklearn.components.feature_preprocessing.truncatedSVD import \ +from autosklearn.pipeline.components.feature_preprocessing.truncatedSVD import \ TruncatedSVD -from ParamSklearn.util import _test_preprocessing, PreprocessingTestCase, \ +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ get_dataset import sklearn.metrics diff --git a/test/test_pipeline/components/regression/__init__.py b/test/test_pipeline/components/regression/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/components/regression/liblinear_svr.py b/test/test_pipeline/components/regression/liblinear_svr.py similarity index 80% rename from tests/components/regression/liblinear_svr.py rename to test/test_pipeline/components/regression/liblinear_svr.py index d52a42aff8..9fa68b912a 100644 --- a/tests/components/regression/liblinear_svr.py +++ b/test/test_pipeline/components/regression/liblinear_svr.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.regression.liblinear_svr import \ +from autosklearn.pipeline.components.regression.liblinear_svr import \ LibLinear_SVR -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_adaboost.py b/test/test_pipeline/components/regression/test_adaboost.py similarity index 88% rename from tests/components/regression/test_adaboost.py rename to test/test_pipeline/components/regression/test_adaboost.py index eeb6448106..c1baf78934 100644 --- a/tests/components/regression/test_adaboost.py +++ b/test/test_pipeline/components/regression/test_adaboost.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.regression.adaboost import \ +from autosklearn.pipeline.components.regression.adaboost import \ AdaboostRegressor -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_decision_tree.py b/test/test_pipeline/components/regression/test_decision_tree.py similarity index 85% rename from tests/components/regression/test_decision_tree.py rename to test/test_pipeline/components/regression/test_decision_tree.py index 36ba14a7f2..e3f9f520dd 100644 --- a/tests/components/regression/test_decision_tree.py +++ b/test/test_pipeline/components/regression/test_decision_tree.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.decision_tree import DecisionTree -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.components.regression.decision_tree import DecisionTree +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_extra_trees.py b/test/test_pipeline/components/regression/test_extra_trees.py similarity index 88% rename from tests/components/regression/test_extra_trees.py rename to test/test_pipeline/components/regression/test_extra_trees.py index d55de0a3b0..94012a0c6c 100644 --- a/tests/components/regression/test_extra_trees.py +++ b/test/test_pipeline/components/regression/test_extra_trees.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.regression.extra_trees import \ +from autosklearn.pipeline.components.regression.extra_trees import \ ExtraTreesRegressor -from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics diff --git a/tests/components/regression/test_gaussian_process.py b/test/test_pipeline/components/regression/test_gaussian_process.py similarity index 78% rename from tests/components/regression/test_gaussian_process.py rename to test/test_pipeline/components/regression/test_gaussian_process.py index a909ee1b14..7977eddafc 100644 --- a/tests/components/regression/test_gaussian_process.py +++ b/test/test_pipeline/components/regression/test_gaussian_process.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.gaussian_process import GaussianProcess -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.components.regression.gaussian_process import GaussianProcess +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_gradient_boosting.py b/test/test_pipeline/components/regression/test_gradient_boosting.py similarity index 79% rename from tests/components/regression/test_gradient_boosting.py rename to test/test_pipeline/components/regression/test_gradient_boosting.py index 5f7074c8ce..4a331a79fc 100644 --- a/tests/components/regression/test_gradient_boosting.py +++ b/test/test_pipeline/components/regression/test_gradient_boosting.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.gradient_boosting import GradientBoosting -from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit +from autosklearn.pipeline.components.regression.gradient_boosting import GradientBoosting +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics diff --git a/tests/components/regression/test_k_nearest_neighbors.py b/test/test_pipeline/components/regression/test_k_nearest_neighbors.py similarity index 87% rename from tests/components/regression/test_k_nearest_neighbors.py rename to test/test_pipeline/components/regression/test_k_nearest_neighbors.py index eb863176cd..19aa3dce03 100644 --- a/tests/components/regression/test_k_nearest_neighbors.py +++ b/test/test_pipeline/components/regression/test_k_nearest_neighbors.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.regression.k_nearest_neighbors import \ +from autosklearn.pipeline.components.regression.k_nearest_neighbors import \ KNearestNeighborsRegressor -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/components/regression/test_random_forests.py b/test/test_pipeline/components/regression/test_random_forests.py similarity index 85% rename from tests/components/regression/test_random_forests.py rename to test/test_pipeline/components/regression/test_random_forests.py index 0909cbb0c6..f5d8936da7 100644 --- a/tests/components/regression/test_random_forests.py +++ b/test/test_pipeline/components/regression/test_random_forests.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.random_forest import RandomForest -from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit +from autosklearn.pipeline.components.regression.random_forest import RandomForest +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics diff --git a/tests/components/regression/test_ridge_regression.py b/test/test_pipeline/components/regression/test_ridge_regression.py similarity index 87% rename from tests/components/regression/test_ridge_regression.py rename to test/test_pipeline/components/regression/test_ridge_regression.py index 15cbea26c6..19b7382e6d 100644 --- a/tests/components/regression/test_ridge_regression.py +++ b/test/test_pipeline/components/regression/test_ridge_regression.py @@ -1,8 +1,8 @@ import unittest -from ParamSklearn.components.regression.ridge_regression import RidgeRegression -from ParamSklearn.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks -from ParamSklearn.util import _test_regressor, get_dataset +from autosklearn.pipeline.components.regression.ridge_regression import RidgeRegression +from autosklearn.pipeline.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks +from autosklearn.pipeline.util import _test_regressor, get_dataset import sklearn.metrics diff --git a/tests/components/regression/test_sgd.py b/test/test_pipeline/components/regression/test_sgd.py similarity index 84% rename from tests/components/regression/test_sgd.py rename to test/test_pipeline/components/regression/test_sgd.py index 4c40a467d0..fb15bb1bb6 100644 --- a/tests/components/regression/test_sgd.py +++ b/test/test_pipeline/components/regression/test_sgd.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.sgd import SGD -from ParamSklearn.util import _test_regressor, _test_regressor_iterative_fit +from autosklearn.pipeline.components.regression.sgd import SGD +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit import sklearn.metrics diff --git a/tests/components/regression/test_support_vector_regression.py b/test/test_pipeline/components/regression/test_support_vector_regression.py similarity index 84% rename from tests/components/regression/test_support_vector_regression.py rename to test/test_pipeline/components/regression/test_support_vector_regression.py index 52018824e3..2ecb3c64a7 100644 --- a/tests/components/regression/test_support_vector_regression.py +++ b/test/test_pipeline/components/regression/test_support_vector_regression.py @@ -1,7 +1,7 @@ import unittest -from ParamSklearn.components.regression.libsvm_svr import LibSVM_SVR -from ParamSklearn.util import _test_regressor +from autosklearn.pipeline.components.regression.libsvm_svr import LibSVM_SVR +from autosklearn.pipeline.util import _test_regressor import sklearn.metrics diff --git a/tests/implementations/__init__.py b/test/test_pipeline/implementations/__init__.py similarity index 100% rename from tests/implementations/__init__.py rename to test/test_pipeline/implementations/__init__.py diff --git a/tests/implementations/test_OneHotEncoder.py b/test/test_pipeline/implementations/test_OneHotEncoder.py similarity index 99% rename from tests/implementations/test_OneHotEncoder.py rename to test/test_pipeline/implementations/test_OneHotEncoder.py index 1cb24ab08e..c2cb2b4546 100644 --- a/tests/implementations/test_OneHotEncoder.py +++ b/test/test_pipeline/implementations/test_OneHotEncoder.py @@ -4,7 +4,7 @@ import scipy.sparse from sklearn.utils.testing import assert_array_almost_equal -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder dense1 = np.array([[0, 1, 0], [0, 0, 0], diff --git a/tests/implementations/test_ProjLogit.py b/test/test_pipeline/implementations/test_ProjLogit.py similarity index 94% rename from tests/implementations/test_ProjLogit.py rename to test/test_pipeline/implementations/test_ProjLogit.py index 5b9dc0442c..626a95636a 100644 --- a/tests/implementations/test_ProjLogit.py +++ b/test/test_pipeline/implementations/test_ProjLogit.py @@ -3,7 +3,7 @@ import numpy as np #import scipy.io -from ParamSklearn.implementations.ProjLogit import ProjLogit +from autosklearn.pipeline.implementations.ProjLogit import ProjLogit class TestProjLogit(unittest.TestCase): diff --git a/tests/implementations/test_imputation.py b/test/test_pipeline/implementations/test_imputation.py similarity index 99% rename from tests/implementations/test_imputation.py rename to test/test_pipeline/implementations/test_imputation.py index 99d4b62c5c..29119ca48b 100644 --- a/tests/implementations/test_imputation.py +++ b/test/test_pipeline/implementations/test_imputation.py @@ -9,7 +9,7 @@ from sklearn.utils.testing import assert_false from sklearn.utils.testing import assert_true -from ParamSklearn.implementations.Imputation import Imputer +from autosklearn.pipeline.implementations.Imputation import Imputer from sklearn.pipeline import Pipeline from sklearn import grid_search from sklearn import tree diff --git a/tests/implementations/test_minmaxscaler.py b/test/test_pipeline/implementations/test_minmaxscaler.py similarity index 97% rename from tests/implementations/test_minmaxscaler.py rename to test/test_pipeline/implementations/test_minmaxscaler.py index 3b49a80f68..d22e3e2ec3 100644 --- a/tests/implementations/test_minmaxscaler.py +++ b/test/test_pipeline/implementations/test_minmaxscaler.py @@ -5,8 +5,8 @@ from sklearn.utils.testing import assert_array_almost_equal from sklearn.datasets import load_iris -from ParamSklearn.util import get_dataset -from ParamSklearn.implementations.MinMaxScaler import MinMaxScaler +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.implementations.MinMaxScaler import MinMaxScaler class MinMaxScalerTest(unittest.TestCase): diff --git a/tests/implementations/test_standard_scaler.py b/test/test_pipeline/implementations/test_standard_scaler.py similarity index 98% rename from tests/implementations/test_standard_scaler.py rename to test/test_pipeline/implementations/test_standard_scaler.py index 09f9d7fbd7..044f3edf8a 100644 --- a/tests/implementations/test_standard_scaler.py +++ b/test/test_pipeline/implementations/test_standard_scaler.py @@ -6,8 +6,8 @@ from sklearn.utils.testing import assert_array_almost_equal from sklearn.preprocessing.data import scale -from ParamSklearn.implementations.StandardScaler import StandardScaler -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.util import get_dataset matrix1 = [[0, 1, 2], [0, 1, 2], diff --git a/tests/implementations/test_util.py b/test/test_pipeline/implementations/test_util.py similarity index 96% rename from tests/implementations/test_util.py rename to test/test_pipeline/implementations/test_util.py index 8b688e7f74..cc7a1fc714 100644 --- a/tests/implementations/test_util.py +++ b/test/test_pipeline/implementations/test_util.py @@ -3,7 +3,7 @@ import numpy as np from sklearn.utils.testing import assert_array_almost_equal -from ParamSklearn.implementations.util import softmax +from autosklearn.pipeline.implementations.util import softmax class UtilTest(unittest.TestCase): def test_softmax_binary(self): diff --git a/tests/test_base.py b/test/test_pipeline/test_base.py similarity index 89% rename from tests/test_base.py rename to test/test_pipeline/test_base.py index 6b5cb0a2c1..bc9663dcf1 100644 --- a/tests/test_base.py +++ b/test/test_pipeline/test_base.py @@ -2,23 +2,23 @@ import HPOlibConfigSpace.configuration_space -import ParamSklearn.base -import ParamSklearn.components.feature_preprocessing -import ParamSklearn.components.classification +import autosklearn.pipeline.base +import autosklearn.pipeline.components.feature_preprocessing +import autosklearn.pipeline.components.classification class BaseTest(unittest.TestCase): def test_get_hyperparameter_configuration_space_3choices(self): - base = ParamSklearn.base.ParamSklearnBaseEstimator + base = autosklearn.pipeline.base.BasePipeline cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() dataset_properties = {'target_type': 'classification'} exclude = {} include = {} - pipeline = [('p0', ParamSklearn.components.feature_preprocessing._preprocessors[ + pipeline = [('p0', autosklearn.pipeline.components.feature_preprocessing._preprocessors[ 'preprocessor']), - ('p1', ParamSklearn.components.feature_preprocessing._preprocessors[ + ('p1', autosklearn.pipeline.components.feature_preprocessing._preprocessors[ 'preprocessor']), - ('c', ParamSklearn.components.classification._classifiers[ + ('c', autosklearn.pipeline.components.classification._classifiers[ 'classifier'])] cs = base._get_hyperparameter_search_space(cs, dataset_properties, exclude, include, pipeline) diff --git a/tests/test_classification.py b/test/test_pipeline/test_classification.py similarity index 89% rename from tests/test_classification.py rename to test/test_pipeline/test_classification.py index 9306e4aca5..926198d2df 100644 --- a/tests/test_classification.py +++ b/test/test_pipeline/test_classification.py @@ -17,16 +17,16 @@ Configuration from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.components.base import ParamSklearnClassificationAlgorithm -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -import ParamSklearn.components.classification as classification_components -import ParamSklearn.components.feature_preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset -from ParamSklearn.constants import * +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm, AutoSklearnPreprocessingAlgorithm +import autosklearn.pipeline.components.classification as classification_components +import autosklearn.pipeline.components.feature_preprocessing as preprocessing_components +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.constants import * -class TestParamSklearnClassifier(unittest.TestCase): +class SimpleClassificationPipelineTest(unittest.TestCase): def test_io_dict(self): classifiers = classification_components._classifiers for c in classifiers: @@ -55,7 +55,7 @@ def test_find_classifiers(self): for key in classifiers: if hasattr(classifiers[key], 'get_components'): continue - self.assertIn(ParamSklearnClassificationAlgorithm, + self.assertIn(AutoSklearnClassificationAlgorithm, classifiers[key].__bases__) def test_find_preprocessors(self): @@ -64,15 +64,15 @@ def test_find_preprocessors(self): for key in preprocessors: if hasattr(preprocessors[key], 'get_components'): continue - self.assertIn(ParamSklearnPreprocessingAlgorithm, + self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) def test_default_configuration(self): for i in range(2): - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') - auto = ParamSklearnClassifier(default) + auto = SimpleClassificationPipeline(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(X_test) self.assertAlmostEqual(0.9599999999999995, @@ -80,11 +80,11 @@ def test_default_configuration(self): scores = auto.predict_proba(X_test) def test_repr(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() - representation = repr(ParamSklearnClassifier(default)) + representation = repr(SimpleClassificationPipeline(default)) cls = eval(representation) - self.assertIsInstance(cls, ParamSklearnClassifier) + self.assertIsInstance(cls, SimpleClassificationPipeline) def test_multilabel(self): # Use a limit of ~4GiB @@ -92,7 +92,7 @@ def test_multilabel(self): resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) dataset_properties = {'multilabel': True} - cs = ParamSklearnClassifier.get_hyperparameter_search_space(dataset_properties=dataset_properties) + cs = SimpleClassificationPipeline.get_hyperparameter_search_space(dataset_properties=dataset_properties) print(cs) cs.seed(5) @@ -122,7 +122,7 @@ def test_multilabel(self): if 'classifier:sgd:n_iter' in config: config._values['classifier:sgd:n_iter'] = 5 - cls = ParamSklearnClassifier(config, random_state=1) + cls = SimpleClassificationPipeline(config, random_state=1) print(config) try: cls.fit(X_train, Y_train) @@ -171,7 +171,7 @@ def test_configurations(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() print(cs) cs.seed(1) @@ -185,7 +185,7 @@ def test_configurations(self): config._values['classifier:sgd:n_iter'] = 5 X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') - cls = ParamSklearnClassifier(config, random_state=1) + cls = SimpleClassificationPipeline(config, random_state=1) print(config) try: cls.fit(X_train, Y_train) @@ -232,7 +232,7 @@ def test_configurations_signed_data(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'signed': True}) print(cs) @@ -246,7 +246,7 @@ def test_configurations_signed_data(self): config._values['classifier:sgd:n_iter'] = 5 X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') - cls = ParamSklearnClassifier(config, random_state=1) + cls = SimpleClassificationPipeline(config, random_state=1) print(config) try: cls.fit(X_train, Y_train) @@ -293,7 +293,7 @@ def test_configurations_sparse(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) print(cs) for i in range(10): @@ -307,7 +307,7 @@ def test_configurations_sparse(self): print(config) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) - cls = ParamSklearnClassifier(config, random_state=1) + cls = SimpleClassificationPipeline(config, random_state=1) try: cls.fit(X_train, Y_train) predictions = cls.predict(X_test) @@ -345,7 +345,7 @@ def test_configurations_categorical_data(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) print(cs) for i in range(10): @@ -370,7 +370,7 @@ def test_configurations_categorical_data(self): X_train, X_test, Y_train, Y_test = \ sklearn.cross_validation.train_test_split(X, y) - cls = ParamSklearnClassifier(config, random_state=1,) + cls = SimpleClassificationPipeline(config, random_state=1,) try: cls.fit(X_train, Y_train, init_params={'one_hot_encoding:categorical_features': categorical}) @@ -405,7 +405,7 @@ def test_configurations_categorical_data(self): raise e def test_get_hyperparameter_search_space(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() @@ -427,40 +427,40 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(len(hyperparameters) - 6, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'classifier': ['libsvm_svc']}) self.assertEqual(cs.get_hyperparameter('classifier:__choice__'), CategoricalHyperparameter('classifier:__choice__', ['libsvm_svc'])) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( exclude={'classifier': ['libsvm_svc']}) self.assertNotIn('libsvm_svc', str(cs)) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'preprocessor': ['select_percentile_classification']}) self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), CategoricalHyperparameter('preprocessor:__choice__', ['select_percentile_classification'])) - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( exclude={'preprocessor': ['select_percentile_classification']}) self.assertNotIn('select_percentile_classification', str(cs)) def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, 'qda') - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( include={'preprocessor': ['nystroem_sampler']}) self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, 'sgd') def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(AssertionError, "No valid pipeline found.", - ParamSklearnClassifier.get_hyperparameter_search_space, + SimpleClassificationPipeline.get_hyperparameter_search_space, include={'classifier': ['multinomial_nb'], 'preprocessor': ['pca']}, dataset_properties={'sparse':True}) @@ -469,38 +469,38 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Cannot find a legal default " "configuration.", - ParamSklearnClassifier.get_hyperparameter_search_space, + SimpleClassificationPipeline.get_hyperparameter_search_space, include={'classifier': ['liblinear_svc'], 'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) @unittest.skip("Wait until HPOlibConfigSpace is fixed.") def test_get_hyperparameter_search_space_dataset_properties(self): - cs_mc = ParamSklearnClassifier.get_hyperparameter_search_space( + cs_mc = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'multiclass': True}) self.assertNotIn('bernoulli_nb', str(cs_mc)) - cs_ml = ParamSklearnClassifier.get_hyperparameter_search_space( + cs_ml = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'multilabel': True}) self.assertNotIn('k_nearest_neighbors', str(cs_ml)) self.assertNotIn('liblinear', str(cs_ml)) self.assertNotIn('libsvm_svc', str(cs_ml)) self.assertNotIn('sgd', str(cs_ml)) - cs_sp = ParamSklearnClassifier.get_hyperparameter_search_space( + cs_sp = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) self.assertIn('extra_trees', str(cs_sp)) self.assertIn('gradient_boosting', str(cs_sp)) self.assertIn('random_forest', str(cs_sp)) - cs_mc_ml = ParamSklearnClassifier.get_hyperparameter_search_space( + cs_mc_ml = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'multilabel': True, 'multiclass': True}) self.assertEqual(cs_ml, cs_mc_ml) def test_predict_batched(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() - cls = ParamSklearnClassifier(default) + cls = SimpleClassificationPipeline(default) # Multiclass X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') @@ -528,7 +528,7 @@ def test_predict_batched(self): assert_array_almost_equal(prediction_, prediction) def test_predict_batched_sparse(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) config = Configuration(cs, values={"balancing:strategy": "none", @@ -547,7 +547,7 @@ def test_predict_batched_sparse(self): 'classifier:random_forest:n_estimators': 100, 'classifier:random_forest:min_weight_fraction_leaf': 0.0, "rescaling:__choice__": "min/max"}) - cls = ParamSklearnClassifier(config) + cls = SimpleClassificationPipeline(config) # Multiclass X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', @@ -577,11 +577,11 @@ def test_predict_batched_sparse(self): assert_array_almost_equal(prediction_, prediction) def test_predict_proba_batched(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space() + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() # Multiclass - cls = ParamSklearnClassifier(default) + cls = SimpleClassificationPipeline(default) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') cls.fit(X_train, Y_train) X_test_ = X_test.copy() @@ -595,7 +595,7 @@ def test_predict_proba_batched(self): assert_array_almost_equal(prediction_, prediction) # Multilabel - cls = ParamSklearnClassifier(default) + cls = SimpleClassificationPipeline(default) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') Y_train = np.array([(y, 26 - y) for y in Y_train]) cls.fit(X_train, Y_train) @@ -612,7 +612,7 @@ def test_predict_proba_batched(self): assert_array_almost_equal(prediction_, prediction) def test_predict_proba_batched_sparse(self): - cs = ParamSklearnClassifier.get_hyperparameter_search_space( + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) config = Configuration(cs, @@ -634,7 +634,7 @@ def test_predict_proba_batched_sparse(self): "rescaling:__choice__": "min/max"}) # Multiclass - cls = ParamSklearnClassifier(config) + cls = SimpleClassificationPipeline(config) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) cls.fit(X_train, Y_train) @@ -649,7 +649,7 @@ def test_predict_proba_batched_sparse(self): assert_array_almost_equal(prediction_, prediction) # Multilabel - cls = ParamSklearnClassifier(config) + cls = SimpleClassificationPipeline(config) X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', make_sparse=True) Y_train = np.array([(y, 26 - y) for y in Y_train]) diff --git a/tests/test_create_searchspace_util_classification.py b/test/test_pipeline/test_create_searchspace_util_classification.py similarity index 76% rename from tests/test_create_searchspace_util_classification.py rename to test/test_pipeline/test_create_searchspace_util_classification.py index b4720710b8..a93296ecec 100644 --- a/tests/test_create_searchspace_util_classification.py +++ b/test/test_pipeline/test_create_searchspace_util_classification.py @@ -6,15 +6,15 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.components.classification.liblinear_svc import LibLinear_SVC -from ParamSklearn.components.classification.lda import LDA +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.components.classification.lda import LDA -from ParamSklearn.components.feature_preprocessing.pca import PCA -from ParamSklearn.components.feature_preprocessing.truncatedSVD import TruncatedSVD -from ParamSklearn.components.feature_preprocessing.no_preprocessing import NoPreprocessing -from ParamSklearn.components.feature_preprocessing.fast_ica import FastICA -from ParamSklearn.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding -import ParamSklearn.create_searchspace_util +from autosklearn.pipeline.components.feature_preprocessing.pca import PCA +from autosklearn.pipeline.components.feature_preprocessing.truncatedSVD import TruncatedSVD +from autosklearn.pipeline.components.feature_preprocessing.no_preprocessing import NoPreprocessing +from autosklearn.pipeline.components.feature_preprocessing.fast_ica import FastICA +from autosklearn.pipeline.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding +import autosklearn.pipeline.create_searchspace_util class TestCreateClassificationSearchspace(unittest.TestCase): @@ -36,37 +36,37 @@ def get_available_components(self, *args, **kwargs): return classifiers # Dense - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(numpy.sum(m), 0) - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m, [[1]]) # Sparse preprocessors['tSVD'] = TruncatedSVD - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(m[0], [0]) # pca self.assertEqual(m[1], [1]) # svd - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m[0], [1]) # pca self.assertEqual(m[1], [0]) # svd preprocessors['none'] = NoPreprocessing - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': True}) self.assertEqual(m[0, :], [0]) # pca self.assertEqual(m[1, :], [1]) # tsvd self.assertEqual(m[2, :], [0]) # none - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, LDA)), dataset_properties={'sparse': False}) self.assertEqual(m[0, :], [1]) # pca @@ -74,14 +74,14 @@ def get_available_components(self, *args, **kwargs): self.assertEqual(m[2, :], [1]) # none classifiers['libsvm'] = LibLinear_SVC - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, Classifiers)), dataset_properties={'sparse': False}) self.assertListEqual(list(m[0, :]), [1, 1]) # pca self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd self.assertListEqual(list(m[2, :]), [1, 1]) # none - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, Classifiers)), dataset_properties={'sparse': True}) self.assertListEqual(list(m[0, :]), [0, 0]) # pca @@ -90,7 +90,7 @@ def get_available_components(self, *args, **kwargs): # Do fancy 3d stuff preprocessors['random_trees'] = RandomTreesEmbedding - m = ParamSklearn.create_searchspace_util.get_match_array( + m = autosklearn.pipeline.create_searchspace_util.get_match_array( pipeline=((0, Preprocessors), (1, Preprocessors), (2, Classifiers)), dataset_properties={'sparse': False}) # PCA followed by truncated SVD is forbidden @@ -118,7 +118,7 @@ def test_add_forbidden(self): choices=classifier_list) cs.add_hyperparameter(preprocessor) cs.add_hyperparameter(classifier) - new_cs = ParamSklearn.create_searchspace_util.add_forbidden( + new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( conf_space=cs, node_0_list=preprocessors_list, node_1_list=classifier_list, matches=m, node_0_name='preprocessor', node_1_name="classifier") @@ -126,7 +126,7 @@ def test_add_forbidden(self): self.assertIsInstance(new_cs, ConfigurationSpace) m[1, 1] = 0 - new_cs = ParamSklearn.create_searchspace_util.add_forbidden( + new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( conf_space=cs, node_0_list=preprocessors_list, node_1_list=classifier_list, matches=m, node_0_name='preprocessor', node_1_name="classifier") diff --git a/tests/test_doctests.py b/test/test_pipeline/test_doctests.py similarity index 79% rename from tests/test_doctests.py rename to test/test_pipeline/test_doctests.py index 8082930b76..904a725778 100644 --- a/tests/test_doctests.py +++ b/test/test_pipeline/test_doctests.py @@ -2,12 +2,12 @@ import os import unittest -import ParamSklearn +import autosklearn class DocumentationTest(unittest.TestCase): def test_first_steps(self): - filename = os.path.dirname(ParamSklearn.__file__) + filename = os.path.dirname(autosklearn.__file__) filename = os.path.join(filename, "..", "source", "first_steps.rst") failed, run = doctest.testfile(filename, module_relative=False) self.assertEqual(0, failed) \ No newline at end of file diff --git a/tests/test_regression.py b/test/test_pipeline/test_regression.py similarity index 81% rename from tests/test_regression.py rename to test/test_pipeline/test_regression.py index aad8005b3a..709191534b 100644 --- a/tests/test_regression.py +++ b/test/test_pipeline/test_regression.py @@ -16,16 +16,16 @@ from HPOlibConfigSpace.configuration_space import ConfigurationSpace from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter -from ParamSklearn.regression import ParamSklearnRegressor -from ParamSklearn.components.base import ParamSklearnRegressionAlgorithm -from ParamSklearn.components.base import ParamSklearnPreprocessingAlgorithm -import ParamSklearn.components.regression as regression_components -import ParamSklearn.components.feature_preprocessing as preprocessing_components -from ParamSklearn.util import get_dataset -from ParamSklearn.constants import * +from autosklearn.pipeline.regression import SimpleRegressionPipeline +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm, AutoSklearnRegressionAlgorithm +import autosklearn.pipeline.components.regression as regression_components +import autosklearn.pipeline.components.feature_preprocessing as preprocessing_components +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.constants import * -class TestParamSKlearnRegressor(unittest.TestCase): +class SimpleRegressionPipelineTest(unittest.TestCase): def test_io_dict(self): regressors = regression_components._regressors @@ -58,7 +58,7 @@ def test_find_regressors(self): for key in regressors: if hasattr(regressors[key], 'get_components'): continue - self.assertIn(ParamSklearnRegressionAlgorithm, + self.assertIn(AutoSklearnRegressionAlgorithm, regressors[key].__bases__) def test_find_preprocessors(self): @@ -67,7 +67,7 @@ def test_find_preprocessors(self): for key in preprocessors: if hasattr(preprocessors[key], 'get_components'): continue - self.assertIn(ParamSklearnPreprocessingAlgorithm, + self.assertIn(AutoSklearnPreprocessingAlgorithm, preprocessors[key].__bases__) def test_configurations(self): @@ -75,7 +75,7 @@ def test_configurations(self): limit = 4000 * 1024 * 1024 resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) - cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() print(cs) cs.seed(1) @@ -87,7 +87,7 @@ def test_configurations(self): config._values['regressor:sgd:n_iter'] = 5 X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') - cls = ParamSklearnRegressor(config, random_state=1) + cls = SimpleRegressionPipeline(config, random_state=1) print(config) try: cls.fit(X_train, Y_train) @@ -131,10 +131,10 @@ def test_configurations(self): def test_default_configuration(self): for i in range(2): - cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') - auto = ParamSklearnRegressor(default) + auto = SimpleRegressionPipeline(default) auto = auto.fit(X_train, Y_train) predictions = auto.predict(copy.deepcopy(X_test)) # The lower the worse @@ -144,14 +144,14 @@ def test_default_configuration(self): self.assertEqual(model_score, r2_score) def test_repr(self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() - representation = repr(ParamSklearnRegressor(default)) + representation = repr(SimpleRegressionPipeline(default)) cls = eval(representation) - self.assertIsInstance(cls, ParamSklearnRegressor) + self.assertIsInstance(cls, SimpleRegressionPipeline) def test_get_hyperparameter_search_space(self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() self.assertIsInstance(cs, ConfigurationSpace) conditions = cs.get_conditions() hyperparameters = cs.get_hyperparameters() @@ -159,34 +159,34 @@ def test_get_hyperparameter_search_space(self): self.assertEqual(len(hyperparameters) - 5, len(conditions)) def test_get_hyperparameter_search_space_include_exclude_models(self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( include={'regressor': ['random_forest']}) self.assertEqual(cs.get_hyperparameter('regressor:__choice__'), CategoricalHyperparameter('regressor:__choice__', ['random_forest'])) # TODO add this test when more than one regressor is present - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( exclude={'regressor': ['random_forest']}) self.assertNotIn('random_forest', str(cs)) - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( include={'preprocessor': ['pca']}) self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), CategoricalHyperparameter('preprocessor:__choice__', ['pca'])) - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( exclude={'preprocessor': ['no_preprocessing']}) self.assertNotIn('no_preprocessing', str(cs)) def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier( self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( include={'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, 'gradient_boosting') - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( include={'preprocessor': ['nystroem_sampler']}) self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, 'sgd') @@ -194,7 +194,7 @@ def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classi def test_get_hyperparameter_search_space_only_forbidden_combinations(self): self.assertRaisesRegexp(ValueError, "Cannot find a legal default " "configuration.", - ParamSklearnRegressor.get_hyperparameter_search_space, + SimpleRegressionPipeline.get_hyperparameter_search_space, include={'regressor': ['random_forest'], 'preprocessor': ['kitchen_sinks']}) @@ -202,7 +202,7 @@ def test_get_hyperparameter_search_space_only_forbidden_combinations(self): # data are located behind the densifier self.assertRaisesRegexp(ValueError, "Cannot find a legal default " "configuration", - ParamSklearnRegressor.get_hyperparameter_search_space, + SimpleRegressionPipeline.get_hyperparameter_search_space, include={'regressor': ['ridge_regression'], 'preprocessor': ['densifier']}, dataset_properties={'sparse': True}) @@ -214,36 +214,36 @@ def test_get_hyperparameter_search_space_dataset_properties(self): # test is somewhat stupid pass """ - full_cs = ParamSklearnRegressor.get_hyperparameter_search_space() - cs_mc = ParamSklearnRegressor.get_hyperparameter_search_space() + full_cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + cs_mc = SimpleRegressionPipeline.get_hyperparameter_search_space() self.assertEqual(full_cs, cs_mc) - cs_ml = ParamSklearnRegressor.get_hyperparameter_search_space() + cs_ml = SimpleRegressionPipeline.get_hyperparameter_search_space() self.assertNotIn('k_nearest_neighbors', str(cs_ml)) self.assertNotIn('liblinear', str(cs_ml)) self.assertNotIn('libsvm_svc', str(cs_ml)) self.assertNotIn('sgd', str(cs_ml)) - cs_sp = ParamSklearnRegressor.get_hyperparameter_search_space( + cs_sp = SimpleRegressionPipeline.get_hyperparameter_search_space( sparse=True) self.assertNotIn('extra_trees', str(cs_sp)) self.assertNotIn('gradient_boosting', str(cs_sp)) self.assertNotIn('random_forest', str(cs_sp)) - cs_mc_ml = ParamSklearnRegressor.get_hyperparameter_search_space() + cs_mc_ml = SimpleRegressionPipeline.get_hyperparameter_search_space() self.assertEqual(cs_ml, cs_mc_ml) self.assertRaisesRegexp(ValueError, "No regressor to build a configuration space " - "for...", ParamSklearnRegressor. + "for...", SimpleRegressionPipeline. get_hyperparameter_search_space, multiclass=True, multilabel=True, sparse=True) """ def test_predict_batched(self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space() + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() default = cs.get_default_configuration() - cls = ParamSklearnRegressor(default) + cls = SimpleRegressionPipeline(default) X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') cls.fit(X_train, Y_train) @@ -257,10 +257,10 @@ def test_predict_batched(self): assert_array_almost_equal(prediction_, prediction) def test_predict_batched_sparse(self): - cs = ParamSklearnRegressor.get_hyperparameter_search_space( + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( dataset_properties={'sparse': True}) default = cs.get_default_configuration() - cls = ParamSklearnRegressor(default) + cls = SimpleRegressionPipeline(default) X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston', make_sparse=True) diff --git a/tests/test_textclassification.py b/test/test_pipeline/test_textclassification.py similarity index 86% rename from tests/test_textclassification.py rename to test/test_pipeline/test_textclassification.py index f613bfbde0..28f3dd54af 100644 --- a/tests/test_textclassification.py +++ b/test/test_pipeline/test_textclassification.py @@ -2,13 +2,13 @@ # # from HPOlibConfigSpace.configuration_space import ConfigurationSpace # -# from ParamSklearn.textclassification import ParamSklearnTextClassifier +# from autosklearn.pipeline.textclassification import autosklearn.pipelineTextClassifier # # # class TextClassificationTest(unittest.TestCase): # @unittest.skip("Not properly implemented yet!") # def test_get_hyperparameter_search_space(self): -# cs = ParamSklearnTextClassifier.get_hyperparameter_search_space() +# cs = autosklearn.pipelineTextClassifier.get_hyperparameter_search_space() # self.assertIsInstance(cs, ConfigurationSpace) # conditions = cs.get_conditions() # hyperparameters = cs.get_hyperparameters() diff --git a/testcommand.sh b/testcommand.sh index 24e6795eb5..426743ef2a 100644 --- a/testcommand.sh +++ b/testcommand.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -nosetests --processes=4 --process-timeout=120 \ No newline at end of file +nosetests --processes=3 --process-timeout=120 -v \ No newline at end of file diff --git a/tests/components/data_preprocessing/dataset.pkl b/tests/components/data_preprocessing/dataset.pkl deleted file mode 100644 index a976726d5c..0000000000 --- a/tests/components/data_preprocessing/dataset.pkl +++ /dev/null @@ -1,898 +0,0 @@ -nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.550000000000000000e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.299999952316284180e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.009999871253967285e-01 1.200099975585937500e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.560000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.399999976158142090e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 4.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.510000228881835938e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.150000000000000000e+02 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.100000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 5.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.100999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.899999976158142090e+00 1.135000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.750000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.900000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.550000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 7.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.000000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399999976158142090e+00 1.310000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.799999952316284180e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200999975204467773e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.140000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500999927520751953e+00 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 2.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.900000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500999927520751953e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.000000000000000000e+02 4.120000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.000000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 3.560000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.190000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000000000000000e+00 6.509999847412109375e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 2.550000000000000000e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.525000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.274900024414062500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.069000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 0.000000000000000000e+00 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.525000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.399999946355819702e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 -nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.800000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 -nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 From c8ed9d5f2e5fa4a513ef3044371946a4c4f957f7 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 17 Dec 2015 09:50:46 +0100 Subject: [PATCH 350/352] Remove example for ParamSklearn --- misc/regressors.csv | 2 +- source/first_steps.rst | 26 -------------------------- test/test_pipeline/test_doctests.py | 13 ------------- 3 files changed, 1 insertion(+), 40 deletions(-) delete mode 100644 source/first_steps.rst delete mode 100644 test/test_pipeline/test_doctests.py diff --git a/misc/regressors.csv b/misc/regressors.csv index d8616e8814..83a162e65c 100644 --- a/misc/regressors.csv +++ b/misc/regressors.csv @@ -38,7 +38,7 @@ class,added,comment ,True, ,False,We want to perform CV ourselves ,False, -,, +,True, ,FALSE,This regressor is inside a test module ,, ,True, diff --git a/source/first_steps.rst b/source/first_steps.rst deleted file mode 100644 index a241c6ce9f..0000000000 --- a/source/first_steps.rst +++ /dev/null @@ -1,26 +0,0 @@ -First Steps with ParamSklearn -***************************** - -This example demonstrates how to get the whole configuration space covered by -ParamSklearn, feed it to the random search algorithm implemented by the -HPOlibConfigSpace package and then train a classifier with a random -configuration on the iris dataset. - - >>> from autosklearn.pipeline.classification import SimpleClassificationPipeline - >>> import sklearn.datasets - >>> import sklearn.metrics - >>> import numpy as np - >>> iris = sklearn.datasets.load_iris() - >>> X = iris.data - >>> Y = iris.target - >>> indices = np.arange(X.shape[0]) - >>> np.random.seed(1) - >>> np.random.shuffle(indices) - >>> configuration_space = SimpleClassificationPipeline.get_hyperparameter_search_space() - >>> configuration_space.seed(1) - >>> configuration = configuration_space.sample_configuration() - >>> cls = SimpleClassificationPipeline(configuration, random_state=1) - >>> cls = cls.fit(X[indices[:100]], Y[indices[:100]]) - >>> predictions = cls.predict(X[indices[100:]]) - >>> sklearn.metrics.accuracy_score(predictions, Y[indices[100:]]) - 0.93999999999999995 diff --git a/test/test_pipeline/test_doctests.py b/test/test_pipeline/test_doctests.py deleted file mode 100644 index 904a725778..0000000000 --- a/test/test_pipeline/test_doctests.py +++ /dev/null @@ -1,13 +0,0 @@ -import doctest -import os -import unittest - -import autosklearn - - -class DocumentationTest(unittest.TestCase): - def test_first_steps(self): - filename = os.path.dirname(autosklearn.__file__) - filename = os.path.join(filename, "..", "source", "first_steps.rst") - failed, run = doctest.testfile(filename, module_relative=False) - self.assertEqual(0, failed) \ No newline at end of file From 7c03173c1c75b7f90d245b4d414efd02987aff08 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 17 Dec 2015 09:56:21 +0100 Subject: [PATCH 351/352] Add dataset file for testing; was missing due to gitignore --- .../components/data_preprocessing/dataset.pkl | 898 ++++++++++++++++++ 1 file changed, 898 insertions(+) create mode 100644 test/test_pipeline/components/data_preprocessing/dataset.pkl diff --git a/test/test_pipeline/components/data_preprocessing/dataset.pkl b/test/test_pipeline/components/data_preprocessing/dataset.pkl new file mode 100644 index 0000000000..a976726d5c --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/dataset.pkl @@ -0,0 +1,898 @@ +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.550000000000000000e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.299999952316284180e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.009999871253967285e-01 1.200099975585937500e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.560000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.399999976158142090e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 4.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.510000228881835938e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.150000000000000000e+02 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.100000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 5.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.100999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.899999976158142090e+00 1.135000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.750000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.900000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.550000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 7.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.000000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399999976158142090e+00 1.310000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.799999952316284180e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200999975204467773e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.140000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500999927520751953e+00 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 2.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.900000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500999927520751953e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.000000000000000000e+02 4.120000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.000000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 3.560000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.190000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000000000000000e+00 6.509999847412109375e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 2.550000000000000000e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.525000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.274900024414062500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.069000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 0.000000000000000000e+00 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.525000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.399999946355819702e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.800000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 From 0d89f892edaea96ac2d656012d10c2e002c73fe1 Mon Sep 17 00:00:00 2001 From: Matthias Feurer Date: Thu, 17 Dec 2015 14:22:04 +0100 Subject: [PATCH 352/352] Change test, hopefully stops timeout on travis-ci --- autosklearn/constants.py | 4 ++-- autosklearn/evaluation/util.py | 2 +- .../components/feature_preprocessing/test_gem.py | 8 +++++--- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/autosklearn/constants.py b/autosklearn/constants.py index 3fbbcf0230..141e8e6929 100644 --- a/autosklearn/constants.py +++ b/autosklearn/constants.py @@ -32,8 +32,8 @@ R2_METRIC = 10 A_METRIC = 11 -REGRESSION_METRIC = [R2_METRIC, A_METRIC] -METRIC = CLASSIFICATION_METRICS + REGRESSION_METRIC +REGRESSION_METRICS = [R2_METRIC, A_METRIC] +METRIC = CLASSIFICATION_METRICS + REGRESSION_METRICS STRING_TO_METRIC = { 'acc': ACC_METRIC, 'acc_metric': ACC_METRIC, diff --git a/autosklearn/evaluation/util.py b/autosklearn/evaluation/util.py index acaa63aca4..1bc73a616b 100644 --- a/autosklearn/evaluation/util.py +++ b/autosklearn/evaluation/util.py @@ -47,7 +47,7 @@ def calculate_score(solution, prediction, task_type, metric, num_classes, score = dict() if task_type in REGRESSION_TASKS: cprediction = sanitize_array(prediction) - for metric_ in REGRESSION_METRIC: + for metric_ in REGRESSION_METRICS: score[metric_] = regression_metrics.calculate_score(metric_, solution, cprediction) diff --git a/test/test_pipeline/components/feature_preprocessing/test_gem.py b/test/test_pipeline/components/feature_preprocessing/test_gem.py index 1e91dde716..2ad8115cbe 100644 --- a/test/test_pipeline/components/feature_preprocessing/test_gem.py +++ b/test/test_pipeline/components/feature_preprocessing/test_gem.py @@ -1,6 +1,6 @@ import unittest -from autosklearn.pipeline.components.classification.proj_logit import ProjLogitCLassifier +from autosklearn.pipeline.components.classification.sgd import SGD from autosklearn.pipeline.components.feature_preprocessing.gem import GEM from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, get_dataset import sklearn.metrics @@ -26,11 +26,13 @@ def test_default_configuration_classify(self): X_test_trans = preprocessor.transform(X_test) # fit a classifier on top - classifier = ProjLogitCLassifier(max_epochs = 5, random_state=1) + config = SGD.get_hyperparameter_search_space( \ + ).get_default_configuration() + classifier = SGD(random_state=1, **config._values) predictor = classifier.fit(X_train_trans, Y_train) predictions = predictor.predict(X_test_trans) accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) - self.assertGreaterEqual(accuracy, 0.94) + self.assertGreaterEqual(accuracy, 0.85) def test_preprocessing_dtype(self): super(GEMComponentTest, self)._test_preprocessing_dtype(GEM,