diff --git a/.coveralls.yml b/.coveralls.yml new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.travis.yml b/.travis.yml index 773dcd913d..59f243721b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,11 +39,12 @@ install: # Install requirements from other repos - pip install git+https://github.com/automl/HPOlibConfigSpace.git - - pip install git+https://github.com/automl/paramsklearn.git + - python setup.py install +# command to run tests, e.g. python setup.py test script: # - coverage run --source autosklearn setup.py test - - cd test && nosetests -v --with-coverage + - cd test && nosetests -v --with-coverage --cover-package=autosklearn -after_success: coveralls \ No newline at end of file +after_success: coveralls diff --git a/CHANGES.md b/CHANGES.md new file mode 100644 index 0000000000..e69de29bb2 diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..e1f17e32cb --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,24 @@ +Copyright (c) 2014, Matthias Feurer +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of the nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/autosklearn/automl.py b/autosklearn/automl.py index 4b7f363db9..bc014878ec 100644 --- a/autosklearn/automl.py +++ b/autosklearn/automl.py @@ -26,7 +26,7 @@ convert_conf2smac_string from autosklearn.evaluation import calculate_score from autosklearn.util import StopWatch, get_logger, setup_logger, \ - get_auto_seed, set_auto_seed, del_auto_seed, submit_process, paramsklearn, \ + get_auto_seed, set_auto_seed, del_auto_seed, submit_process, pipeline, \ Backend from autosklearn.util.smac import run_smac @@ -76,7 +76,7 @@ def _create_search_space(tmp_dir, data_info, backend, watcher, logger, task_name = 'CreateConfigSpace' watcher.start_task(task_name) configspace_path = os.path.join(tmp_dir, 'space.pcs') - configuration_space = paramsklearn.get_configuration_space( + configuration_space = pipeline.get_configuration_space( data_info, include_estimators=include_estimators, include_preprocessors=include_preprocessors) @@ -614,7 +614,11 @@ def _load_models(self): seed) def score(self, X, y): + # fix: Consider only index 1 of second dimension + # Don't know if the reshaping should be done there or in calculate_score prediction = self.predict(X) + if self._task == BINARY_CLASSIFICATION: + prediction = prediction[:, 1].reshape((-1, 1)) return calculate_score(y, prediction, self._task, self._metric, self._label_num, logger=self._logger) @@ -695,4 +699,4 @@ def _delete_output_directories(self): pass else: print("Could not delete tmp dir: %s" % - self._tmp_dir) \ No newline at end of file + self._tmp_dir) diff --git a/autosklearn/cli/base_interface.py b/autosklearn/cli/base_interface.py index da724bdbaa..a4f8bb831e 100644 --- a/autosklearn/cli/base_interface.py +++ b/autosklearn/cli/base_interface.py @@ -11,7 +11,7 @@ from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation import CVEvaluator, HoldoutEvaluator, \ NestedCVEvaluator, TestEvaluator, get_new_run_num -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space from autosklearn.util import Backend diff --git a/autosklearn/constants.py b/autosklearn/constants.py index 3fbbcf0230..141e8e6929 100644 --- a/autosklearn/constants.py +++ b/autosklearn/constants.py @@ -32,8 +32,8 @@ R2_METRIC = 10 A_METRIC = 11 -REGRESSION_METRIC = [R2_METRIC, A_METRIC] -METRIC = CLASSIFICATION_METRICS + REGRESSION_METRIC +REGRESSION_METRICS = [R2_METRIC, A_METRIC] +METRIC = CLASSIFICATION_METRICS + REGRESSION_METRICS STRING_TO_METRIC = { 'acc': ACC_METRIC, 'acc_metric': ACC_METRIC, diff --git a/autosklearn/data/abstract_data_manager.py b/autosklearn/data/abstract_data_manager.py index bc60fc90d8..63355105d4 100644 --- a/autosklearn/data/abstract_data_manager.py +++ b/autosklearn/data/abstract_data_manager.py @@ -5,7 +5,7 @@ import numpy as np import scipy.sparse -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder from autosklearn.util import predict_RAM_usage diff --git a/autosklearn/ensemble_selection_script.py b/autosklearn/ensemble_selection_script.py index b6bed5987f..1488729967 100644 --- a/autosklearn/ensemble_selection_script.py +++ b/autosklearn/ensemble_selection_script.py @@ -58,15 +58,20 @@ def get_predictions(dir_path, dir_path_list, include_num_runs, match = model_and_automl_re.search(model_name) automl_seed = int(match.group(1)) num_run = int(match.group(2)) + + if model_name.endswith("/"): + model_name = model_name[:-1] + basename = os.path.basename(model_name) + if (automl_seed, num_run) in include_num_runs: if precision == "16": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float16) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float16) elif precision == "32": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float32) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float32) elif precision == "64": - predictions = np.load(os.path.join(dir_path, model_name)).astype(dtype=np.float64) + predictions = np.load(os.path.join(dir_path, basename)).astype(dtype=np.float64) else: - predictions = np.load(os.path.join(dir_path, model_name)) + predictions = np.load(os.path.join(dir_path, basename)) result.append(predictions) return result @@ -249,7 +254,10 @@ def main(autosklearn_tmp_dir, dir_ensemble_list_mtimes = [] for dir_ensemble_file in dir_ensemble_list: - dir_ensemble_file = os.path.join(dir_ensemble, dir_ensemble_file) + if dir_ensemble_file.endswith("/"): + dir_ensemble_file = dir_ensemble_file[:-1] + basename = os.path.basename(dir_ensemble_file) + dir_ensemble_file = os.path.join(dir_ensemble, basename) mtime = os.path.getmtime(dir_ensemble_file) dir_ensemble_list_mtimes.append(mtime) @@ -285,14 +293,18 @@ def main(autosklearn_tmp_dir, model_idx = 0 for model_name in dir_ensemble_list: + if model_name.endswith("/"): + model_name = model_name[:-1] + basename = os.path.basename(model_name) + if precision is "16": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float16) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float16) elif precision is "32": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float32) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float32) elif precision is "64": - predictions = np.load(os.path.join(dir_ensemble, model_name)).astype(dtype=np.float64) + predictions = np.load(os.path.join(dir_ensemble, basename)).astype(dtype=np.float64) else: - predictions = np.load(os.path.join(dir_ensemble, model_name)) + predictions = np.load(os.path.join(dir_ensemble, basename)) score = calculate_score(targets_ensemble, predictions, task_type, metric, predictions.shape[1]) diff --git a/autosklearn/evaluation/abstract_evaluator.py b/autosklearn/evaluation/abstract_evaluator.py index 566bcf84a5..833c3bf14c 100644 --- a/autosklearn/evaluation/abstract_evaluator.py +++ b/autosklearn/evaluation/abstract_evaluator.py @@ -7,8 +7,8 @@ import numpy as np import lockfile -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.regression import ParamSklearnRegressor +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.regression import SimpleRegressionPipeline from sklearn.dummy import DummyClassifier, DummyRegressor from autosklearn.constants import * @@ -106,13 +106,13 @@ def __init__(self, Datamanager, configuration=None, if self.configuration is None: self.model_class = MyDummyRegressor else: - self.model_class = ParamSklearnRegressor + self.model_class = SimpleRegressionPipeline self.predict_function = self.predict_regression else: if self.configuration is None: self.model_class = MyDummyClassifier else: - self.model_class = ParamSklearnClassifier + self.model_class = SimpleClassificationPipeline self.predict_function = self.predict_proba if num_run is None: diff --git a/autosklearn/evaluation/util.py b/autosklearn/evaluation/util.py index acaa63aca4..1bc73a616b 100644 --- a/autosklearn/evaluation/util.py +++ b/autosklearn/evaluation/util.py @@ -47,7 +47,7 @@ def calculate_score(solution, prediction, task_type, metric, num_classes, score = dict() if task_type in REGRESSION_TASKS: cprediction = sanitize_array(prediction) - for metric_ in REGRESSION_METRIC: + for metric_ in REGRESSION_METRICS: score[metric_] = regression_metrics.calculate_score(metric_, solution, cprediction) diff --git a/autosklearn/metalearning/metafeatures/metafeatures.py b/autosklearn/metalearning/metafeatures/metafeatures.py index 75bdd47709..f1b0a02a93 100644 --- a/autosklearn/metalearning/metafeatures/metafeatures.py +++ b/autosklearn/metalearning/metafeatures/metafeatures.py @@ -13,9 +13,9 @@ from sklearn.utils import check_array from sklearn.multiclass import OneVsRestClassifier -from ParamSklearn.implementations.Imputation import Imputer -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.Imputation import Imputer +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler from autosklearn.util.logging_ import get_logger from .metafeature import MetaFeature, HelperFunction, DatasetMetafeatures, \ diff --git a/autosklearn/pipeline/__init__.py b/autosklearn/pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/autosklearn/pipeline/base.py b/autosklearn/pipeline/base.py new file mode 100644 index 0000000000..1aa94770b6 --- /dev/null +++ b/autosklearn/pipeline/base.py @@ -0,0 +1,346 @@ +from abc import ABCMeta +from collections import defaultdict + +import numpy as np + +from sklearn.base import BaseEstimator +from sklearn.pipeline import Pipeline +from sklearn.utils.validation import check_random_state, check_is_fitted + +from autosklearn.pipeline import components as components +import autosklearn.pipeline.create_searchspace_util + + +class BasePipeline(BaseEstimator): + """Base class for all pipeline objects. + + Notes + ----- + This class should not be instantiated, only subclassed.""" + __metaclass__ = ABCMeta + + def __init__(self, configuration, random_state=None): + self.configuration = configuration + + if random_state is None: + self.random_state = check_random_state(1) + else: + self.random_state = check_random_state(random_state) + + def fit(self, X, y, fit_params=None, init_params=None): + """Fit the selected algorithm to the training data. + + Parameters + ---------- + X : array-like or sparse, shape = (n_samples, n_features) + Training data. The preferred type of the matrix (dense or sparse) + depends on the estimator selected. + + y : array-like + Targets + + fit_params : dict + See the documentation of sklearn.pipeline.Pipeline for formatting + instructions. + + init_params : dict + Pass arguments to the constructors of single methods. To pass + arguments to only one of the methods (lets says the + OneHotEncoder), seperate the class name from the argument by a ':'. + + Returns + ------- + self : returns an instance of self. + + Raises + ------ + NoModelException + NoModelException is raised if fit() is called without specifying + a classification algorithm first. + """ + X, fit_params = self.pre_transform(X, y, fit_params=fit_params, + init_params=init_params) + self.fit_estimator(X, y, fit_params=fit_params) + return self + + def pre_transform(self, X, y, fit_params=None, init_params=None): + + # Save all transformation object in a list to create a pipeline object + steps = [] + + # seperate the init parameters for the single methods + init_params_per_method = defaultdict(dict) + if init_params is not None and len(init_params) != 0: + for init_param, value in init_params.items(): + method, param = init_param.split(":") + init_params_per_method[method][param] = value + + # List of preprocessing steps (and their order) + preprocessors_names = [preprocessor[0] for + preprocessor in self._get_pipeline()[:-1]] + + for preproc_name in preprocessors_names: + preproc_params = {} + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.startswith( + preproc_name + ":"): + continue + if self.configuration[instantiated_hyperparameter] is None: + continue + + name_ = instantiated_hyperparameter.split(":")[-1] + preproc_params[name_] = self.configuration[ + instantiated_hyperparameter] + + if preproc_name in \ + components.feature_preprocessing_components._preprocessors: + _preprocessors = components.feature_preprocessing_components._preprocessors + elif preproc_name in \ + components.data_preprocessing_components._preprocessors: + _preprocessors = components.data_preprocessing_components._preprocessors + else: + raise ValueError(preproc_name) + + preprocessor_object = _preprocessors[preproc_name]( + random_state=self.random_state, **preproc_params) + + # Ducktyping... + if hasattr(preprocessor_object, 'get_components'): + preprocessor_object = preprocessor_object.choice + + steps.append((preproc_name, preprocessor_object)) + + # Extract Estimator Hyperparameters from the configuration object + estimator_name = self._get_pipeline()[-1][0] + estimator_object = self._get_pipeline()[-1][1] + estimator_parameters = {} + for instantiated_hyperparameter in self.configuration: + if not instantiated_hyperparameter.startswith(estimator_name): + continue + if self.configuration[instantiated_hyperparameter] is None: + continue + + name_ = instantiated_hyperparameter.split(":")[-1] + estimator_parameters[name_] = self.configuration[ + instantiated_hyperparameter] + + estimator_parameters.update(init_params_per_method[estimator_name]) + estimator_object = estimator_object(random_state=self.random_state, + **estimator_parameters) + + # Ducktyping... + if hasattr(estimator_object, 'get_components'): + estimator_object = estimator_object.choice + + steps.append((estimator_name, estimator_object)) + + self.pipeline_ = Pipeline(steps) + if fit_params is None or not isinstance(fit_params, dict): + fit_params = dict() + else: + fit_params = {key.replace(":", "__"): value for key, value in + fit_params.items()} + X, fit_params = self.pipeline_._pre_transform(X, y, **fit_params) + return X, fit_params + + def fit_estimator(self, X, y, fit_params=None): + check_is_fitted(self, 'pipeline_') + if fit_params is None: + fit_params = {} + self.pipeline_.steps[-1][-1].fit(X, y, **fit_params) + return self + + def iterative_fit(self, X, y, fit_params=None, n_iter=1): + check_is_fitted(self, 'pipeline_') + if fit_params is None: + fit_params = {} + self.pipeline_.steps[-1][-1].iterative_fit(X, y, n_iter=n_iter, + **fit_params) + + def estimator_supports_iterative_fit(self): + return hasattr(self.pipeline_.steps[-1][-1], 'iterative_fit') + + def configuration_fully_fitted(self): + check_is_fitted(self, 'pipeline_') + return self.pipeline_.steps[-1][-1].configuration_fully_fitted() + + def predict(self, X, batch_size=None): + """Predict the classes using the selected model. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + batch_size: int or None, defaults to None + batch_size controls whether the pipeline will be + called on small chunks of the data. Useful when calling the + predict method on the whole array X results in a MemoryError. + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + Returns the predicted values""" + # TODO check if fit() was called before... + + if batch_size is None: + return self.pipeline_.predict(X) + else: + if type(batch_size) is not int or batch_size <= 0: + raise Exception("batch_size must be a positive integer") + + else: + if self.num_targets == 1: + y = np.zeros((X.shape[0],)) + else: + y = np.zeros((X.shape[0], self.num_targets)) + + # Copied and adapted from the scikit-learn GP code + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0]]) + y[batch_from:batch_to] = \ + self.predict(X[batch_from:batch_to], batch_size=None) + + return y + + @classmethod + def get_hyperparameter_search_space(cls, include=None, exclude=None, + dataset_properties=None): + """Return the configuration space for the CASH problem. + + This method should be called by the method + get_hyperparameter_search_space of a subclass. After the subclass + assembles a list of available estimators and preprocessor components, + _get_hyperparameter_search_space can be called to do the work of + creating the actual + HPOlibConfigSpace.configuration_space.ConfigurationSpace object. + + Parameters + ---------- + estimator_name : str + Name of the estimator hyperparameter which will be used in the + configuration space. For a classification task, this would be + 'classifier'. + + estimator_components : dict {name: component} + Dictionary with all estimator components to be included in the + configuration space. + + preprocessor_components : dict {name: component} + Dictionary with all preprocessor components to be included in the + configuration space. . + + always_active : list of str + A list of components which will always be active in the pipeline. + This is useful for components like imputation which have + hyperparameters to be configured, but which do not have any parent. + + default_estimator : str + Default value for the estimator hyperparameter. + + Returns + ------- + cs : HPOlibConfigSpace.configuration_space.Configuration + The configuration space describing the AutoSklearnClassifier. + + """ + raise NotImplementedError() + + @classmethod + def _get_hyperparameter_search_space(cls, cs, dataset_properties, exclude, + include, pipeline): + if include is None: + include = {} + + keys = [pair[0] for pair in pipeline] + for key in include: + if key not in keys: + raise ValueError('Invalid key in include: %s; should be one ' + 'of %s' % (key, keys)) + + if exclude is None: + exclude = {} + + keys = [pair[0] for pair in pipeline] + for key in exclude: + if key not in keys: + raise ValueError('Invalid key in exclude: %s; should be one ' + 'of %s' % (key, keys)) + + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + if 'signed' not in dataset_properties: + # This dataset probably contains unsigned data + dataset_properties['signed'] = False + + matches = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline, dataset_properties, include=include, exclude=exclude) + + # Now we have only legal combinations at this step of the pipeline + # Simple sanity checks + assert np.sum(matches) != 0, "No valid pipeline found." + + assert np.sum(matches) <= np.size(matches), \ + "'matches' is not binary; %s <= %d, %s" % \ + (str(np.sum(matches)), np.size(matches), str(matches.shape)) + + # Iterate each dimension of the matches array (each step of the + # pipeline) to see if we can add a hyperparameter for that step + for node_idx, n_ in enumerate(pipeline): + node_name, node = n_ + is_choice = hasattr(node, "get_available_components") + + # if the node isn't a choice we can add it immediately because it + # must be active (if it wouldn't, np.sum(matches) would be zero + if not is_choice: + cs.add_configuration_space(node_name, + node.get_hyperparameter_search_space(dataset_properties)) + # If the node isn't a choice, we have to figure out which of it's + # choices are actually legal choices + else: + choices_list = autosklearn.pipeline.create_searchspace_util.\ + find_active_choices(matches, node, node_idx, + dataset_properties, + include.get(node_name), + exclude.get(node_name)) + cs.add_configuration_space(node_name, + node.get_hyperparameter_search_space( + dataset_properties, include=choices_list)) + # And now add forbidden parameter configurations + # According to matches + if np.sum(matches) < np.size(matches): + cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( + conf_space=cs, pipeline=pipeline, matches=matches, + dataset_properties=dataset_properties, include=include, + exclude=exclude) + + return cs + + def __repr__(self): + class_name = self.__class__.__name__ + + configuration = {} + self.configuration._populate_values() + for hp_name in self.configuration: + if self.configuration[hp_name] is not None: + configuration[hp_name] = self.configuration[hp_name] + + configuration_string = ''.join( + ['configuration={\n ', + ',\n '.join(["'%s': %s" % (hp_name, repr(configuration[hp_name])) + for hp_name in sorted(configuration)]), + '}']) + + return '%s(%s)' % (class_name, configuration_string) + + @classmethod + def _get_pipeline(cls): + if cls == autosklearn.pipelineBaseEstimator: + return [] + raise NotImplementedError() + + def _get_estimator_hyperparameter_name(self): + raise NotImplementedError() + diff --git a/autosklearn/pipeline/classification.py b/autosklearn/pipeline/classification.py new file mode 100644 index 0000000000..a41cc49125 --- /dev/null +++ b/autosklearn/pipeline/classification.py @@ -0,0 +1,297 @@ +import copy +from itertools import product + +import numpy as np + +from sklearn.base import ClassifierMixin + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction + +from autosklearn.pipeline import components as components +from autosklearn.pipeline.base import BasePipeline +from autosklearn.pipeline.constants import SPARSE +from autosklearn.pipeline.components.data_preprocessing.balancing import Balancing + + +class SimpleClassificationPipeline(ClassifierMixin, BasePipeline): + """This class implements the classification task. + + It implements a pipeline, which includes one preprocessing step and one + classification algorithm. It can render a search space including all known + classification and preprocessing algorithms. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available classifiers at runtime. For this reason the user must + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. + + Parameters + ---------- + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : The underlying scikit-learn classification model. This + variable is assigned after a call to the + :meth:`autosklearn.pipeline.classification.SimpleClassificationPipeline + .fit` method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :meth:`autosklearn.pipeline.classification.SimpleClassificationPipeline + .fit` method. + + See also + -------- + + References + ---------- + + Examples + -------- + + """ + + def pre_transform(self, X, y, fit_params=None, init_params=None): + self.num_targets = 1 if len(y.shape) == 1 else y.shape[1] + + # Weighting samples has to be done here, not in the components + if self.configuration['balancing:strategy'] == 'weighting': + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + y, self.configuration['classifier:__choice__'], + self.configuration['preprocessor:__choice__'], + init_params, fit_params) + + X, fit_params = super(SimpleClassificationPipeline, self).pre_transform( + X, y, fit_params=fit_params, init_params=init_params) + + return X, fit_params + + def predict_proba(self, X, batch_size=None): + """predict_proba. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + batch_size: int or None, defaults to None + batch_size controls whether the pipeline will be + called on small chunks of the data. Useful when calling the + predict method on the whole array X results in a MemoryError. + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + if batch_size is None: + Xt = X + for name, transform in self.pipeline_.steps[:-1]: + Xt = transform.transform(Xt) + + return self.pipeline_.steps[-1][-1].predict_proba(Xt) + + else: + if type(batch_size) is not int or batch_size <= 0: + raise Exception("batch_size must be a positive integer") + + else: + # Probe for the target array dimensions + target = self.predict_proba(X[0].copy()) + + # Binary or Multiclass + if len(target) == 1: + y = np.zeros((X.shape[0], target.shape[1])) + + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0]]) + y[batch_from:batch_to] = \ + self.predict_proba(X[batch_from:batch_to], + batch_size=None) + + elif len(target) > 1: + y = [np.zeros((X.shape[0], target[i].shape[1])) + for i in range(len(target))] + + for k in range(max(1, int(np.ceil(float(X.shape[0]) / + batch_size)))): + batch_from = k * batch_size + batch_to = min([(k + 1) * batch_size, X.shape[0]]) + predictions = \ + self.predict_proba(X[batch_from:batch_to], + batch_size=None) + + for i in range(len(target)): + y[i][batch_from:batch_to] = predictions[i] + + return y + + @classmethod + def get_hyperparameter_search_space(cls, include=None, exclude=None, + dataset_properties=None): + """Create the hyperparameter configuration space. + + Parameters + ---------- + include : dict (optional, default=None) + + Returns + ------- + """ + cs = ConfigurationSpace() + + if dataset_properties is None or not isinstance(dataset_properties, dict): + dataset_properties = dict() + if not 'target_type' in dataset_properties: + dataset_properties['target_type'] = 'classification' + if dataset_properties['target_type'] != 'classification': + dataset_properties['target_type'] = 'classification' + + pipeline = cls._get_pipeline() + cs = cls._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + classifiers = cs.get_hyperparameter('classifier:__choice__').choices + preprocessors = cs.get_hyperparameter('preprocessor:__choice__').choices + available_classifiers = pipeline[-1][1].get_available_components( + dataset_properties) + available_preprocessors = pipeline[-2][1].get_available_components( + dataset_properties) + + possible_default_classifier = copy.copy(list( + available_classifiers.keys())) + default = cs.get_hyperparameter('classifier:__choice__').default + del possible_default_classifier[possible_default_classifier.index(default)] + + # A classifier which can handle sparse data after the densifier is + # forbidden for memory issues + for key in classifiers: + if SPARSE in available_classifiers[key].get_properties()['input']: + if 'densifier' in preprocessors: + while True: + try: + cs.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'classifier:__choice__'), key), + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') + )) + # Success + break + except ValueError: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError("Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default + + # which would take too long + # Combinations of non-linear models with feature learning: + classifiers_ = ["adaboost", "decision_tree", "extra_trees", + "gradient_boosting", "k_nearest_neighbors", + "libsvm_svc", "random_forest", "gaussian_nb", + "decision_tree"] + feature_learning = ["kitchen_sinks", "nystroem_sampler"] + + for c, f in product(classifiers_, feature_learning): + if c not in classifiers: + continue + if f not in preprocessors: + continue + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) + break + except KeyError: + break + except ValueError as e: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default + + # Won't work + # Multinomial NB etc don't use with features learning, pca etc + classifiers_ = ["multinomial_nb"] + preproc_with_negative_X = ["kitchen_sinks", "pca", "truncatedSVD", + "fast_ica", "kernel_pca", "nystroem_sampler"] + + for c, f in product(classifiers_, preproc_with_negative_X): + if c not in classifiers: + continue + if f not in preprocessors: + continue + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f), + ForbiddenEqualsClause(cs.get_hyperparameter( + "classifier:__choice__"), c))) + break + except KeyError: + break + except ValueError: + # Change the default and try again + try: + default = possible_default_classifier.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'classifier:__choice__').default = default + + return cs + + @classmethod + def _get_pipeline(cls): + steps = [] + + # Add the always active preprocessing components + steps.extend( + [["one_hot_encoding", + components.data_preprocessing._preprocessors['one_hot_encoding']], + ["imputation", + components.data_preprocessing._preprocessors['imputation']], + ["rescaling", + components.data_preprocessing._preprocessors['rescaling']], + ["balancing", + components.data_preprocessing._preprocessors['balancing']]]) + + # Add the preprocessing component + steps.append(['preprocessor', + components.feature_preprocessing._preprocessors['preprocessor']]) + + # Add the classification component + steps.append(['classifier', + components.classification_components._classifiers['classifier']]) + return steps + + def _get_estimator_hyperparameter_name(self): + return "classifier" + diff --git a/autosklearn/pipeline/components/__init__.py b/autosklearn/pipeline/components/__init__.py new file mode 100644 index 0000000000..3312b4d12a --- /dev/null +++ b/autosklearn/pipeline/components/__init__.py @@ -0,0 +1,46 @@ +"""auto-sklearn can be easily extended with new classification and +preprocessing methods. At import time, auto-sklearn checks the directory +``autosklearn/pipeline/components/classification`` for classification +algorithms and ``autosklearn/pipeline/components/preprocessing`` for +preprocessing algorithms. To be found, the algorithm must be provide a class +implementing one of the given +interfaces. + +Coding Guidelines +================= +Please try to adhere to the `scikit-learn coding guidelines `_. + +Own Implementation of Algorithms +================================ +When adding new algorithms, it is possible to implement it directly in the +fit/predict/transform method of a component. We do not recommend this, +but rather recommend to implement an algorithm in a scikit-learn compatible +way (`see here `_). +Such an implementation should then be put into the `implementation` directory. +and can then be easily wrapped with to become a component in auto-sklearn. + +Classification +============== + +The SimpleClassificationPipeline provides an interface for +Classification Algorithms inside auto-sklearn. It provides four important +functions. Two of them, +:meth:`get_hyperparameter_search_space() ` +and +:meth:`get_properties() ` +are used to +automatically create a valid configuration space. The other two, +:meth:`fit() ` and +:meth:`predict() ` +are an implementation of the `scikit-learn predictor API `_. + +Preprocessing +=============""" + +from . import classification as classification_components +from . import regression as regression_components +from . import feature_preprocessing as feature_preprocessing_components +from . import data_preprocessing as data_preprocessing_components + + + diff --git a/autosklearn/pipeline/components/base.py b/autosklearn/pipeline/components/base.py new file mode 100644 index 0000000000..ea1df4b719 --- /dev/null +++ b/autosklearn/pipeline/components/base.py @@ -0,0 +1,360 @@ +class AutoSklearnClassificationAlgorithm(object): + """Provide an abstract interface for classification algorithms in + auto-sklearn. + + Make a subclass of this and put it into the directory + `autosklearn/pipeline/components/classification` to make it available.""" + + def __init__(self): + self.estimator = None + self.properties = None + + @staticmethod + def get_properties(dataset_properties=None): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Can the algorithm handle multiclass-classification problems? + (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this classification algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this classification algorithm. + """ + raise NotImplementedError() + + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict_proba(self, X): + """Predict probabilities. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + + def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ + return self.estimator + + def __str__(self): + name = self.get_properties()['name'] + return "autosklearn.pipeline %s" % name + + +class AutoSklearnPreprocessingAlgorithm(object): + """Provide an abstract interface for preprocessing algorithms in + auto-sklearn. + + Make a subclass of this and put it into the directory + `autosklearn/pipeline/components/preprocessing` to make it available.""" + + def __init__(self): + self.preprocessor = None + + @staticmethod + def get_properties(dataset_properties=None): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Can preprocess regression data? + (handles_regression : {True, False} + * Can preprocess classification data? + (handles_classification : {True, False} + * Can the algorithm handle multiclass-classification problems? + (handles_multiclass : {True, False}) + * Can the algorithm handle multilabel-classification problems? + (handles_multilabel : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this preprocessing algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this preprocessing algorithm. + """ + raise NotImplementedError() + + def fit(self, X, Y): + """The fit function calls the fit function of the underlying + scikit-learn preprocessing algorithm and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def transform(self, X): + """The transform function calls the transform function of the + underlying scikit-learn model and returns the transformed array. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + X : array + Return the transformed training data + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def get_preprocessor(self): + """Return the underlying preprocessor object. + + Returns + ------- + preprocessor : the underlying preprocessor object + """ + return self.preprocessor + + def __str__(self): + name = self.get_properties()['name'] + return "autosklearn.pipeline %" % name + + +class AutoSklearnRegressionAlgorithm(object): + """Provide an abstract interface for regression algorithms in + auto-sklearn. + + Make a subclass of this and put it into the directory + `autosklearn/pipeline/components/regression` to make it available.""" + + def __init__(self): + self.estimator = None + self.properties = None + + @staticmethod + def get_properties(dataset_properties=None): + """Get the properties of the underlying algorithm. These are: + + * Short name + * Full name + * Can the algorithm handle missing values? + (handles_missing_values : {True, False}) + * Can the algorithm handle nominal features? + (handles_nominal_features : {True, False}) + * Can the algorithm handle numerical features? + (handles_numerical_features : {True, False}) + * Does the algorithm prefer data scaled in [0,1]? + (prefers_data_scaled : {True, False} + * Does the algorithm prefer data normalized to 0-mean, 1std? + (prefers_data_normalized : {True, False} + * Is the algorithm deterministic for a given seed? + (is_deterministic : {True, False) + * Can the algorithm handle sparse data? + (handles_sparse : {True, False} + * What are the preferred types of the data array? + (preferred_dtype : list of tuples) + + Returns + ------- + dict + """ + raise NotImplementedError() + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + """Return the configuration space of this regression algorithm. + + Returns + ------- + HPOlibConfigspace.configuration_space.ConfigurationSpace + The configuration space of this regression algorithm. + """ + raise NotImplementedError() + + def fit(self, X, y): + """The fit function calls the fit function of the underlying + scikit-learn model and returns `self`. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + Training data + + y : array-like, shape = [n_samples] + + Returns + ------- + self : returns an instance of self. + Targets + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict(self, X): + """The predict function calls the predict function of the + underlying scikit-learn model and returns an array with the predictions. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape = (n_samples,) + Returns the predicted values + + Notes + ----- + Please see the `scikit-learn API documentation + `_ for further information.""" + raise NotImplementedError() + + def predict_proba(self, X): + """Predict probabilities. + + Parameters + ---------- + X : array-like, shape = (n_samples, n_features) + + Returns + ------- + array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes) + """ + raise NotImplementedError() + + def get_estimator(self): + """Return the underlying estimator object. + + Returns + ------- + estimator : the underlying estimator object + """ + return self.estimator + + def __str__(self): + name = self.get_properties()['name'] + return "autosklearn.pipeline %" % name + + diff --git a/autosklearn/pipeline/components/classification/__init__.py b/autosklearn/pipeline/components/classification/__init__.py new file mode 100644 index 0000000000..6b62ed19b9 --- /dev/null +++ b/autosklearn/pipeline/components/classification/__init__.py @@ -0,0 +1,169 @@ +__author__ = 'feurerm' + +from collections import OrderedDict +import copy +import importlib +import inspect +import os +import pkgutil +import sys + +from ..base import AutoSklearnClassificationAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +classifier_directory = os.path.split(__file__)[0] +_classifiers = OrderedDict() + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([classifier_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnClassificationAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + classifier = obj + _classifiers[module_name] = classifier + + +class ClassifierChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _classifiers + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + available_comp = cls.get_components() + components_dict = OrderedDict() + + if include is not None and exclude is not None: + raise ValueError("The argument include and exclude cannot be used together.") + + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Avoid infinite loop + if entry == ClassifierChoice: + continue + + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and entry.get_properties()[ + 'handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and available_comp[name]. \ + get_properties()['handles_multilabel'] is False: + continue + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError("The arguments include_estimators and " + "exclude_estimators cannot be used together.") + + cs = ConfigurationSpace() + + # Compile a list of all estimator objects for this problem + available_estimators = cls.get_available_components( + data_prop=dataset_properties, + include=include, + exclude=exclude) + + if len(available_estimators) == 0: + raise ValueError("No classifiers found") + + if default is None: + defaults = ['random_forest', 'liblinear_svc', 'sgd', + 'libsvm_svc'] + list(available_estimators.keys()) + for default_ in defaults: + if default_ in available_estimators: + if include is not None and default_ not in include: + continue + if exclude is not None and default_ in exclude: + continue + default = default_ + break + + estimator = CategoricalHyperparameter('__choice__', + list(available_estimators.keys()), + default=default) + cs.add_hyperparameter(estimator) + for estimator_name in available_estimators.keys(): + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + + estimator_configuration_space = available_estimators[ + estimator_name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in estimator_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % ( + estimator_name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(estimator_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, estimator, + estimator_name) + cs.add_condition(condition) + + for condition in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + dlcs = condition.get_descendant_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(estimator_name): + dlc.child.name = "%s:%s" % ( + estimator_name, dlc.child.name) + if not dlc.parent.name.startswith(estimator_name): + dlc.parent.name = "%s:%s" % ( + estimator_name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(estimator_name): + dlc.hyperparameter.name = "%s:%s" % (estimator_name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_classifiers['classifier'] = ClassifierChoice \ No newline at end of file diff --git a/autosklearn/pipeline/components/classification/adaboost.py b/autosklearn/pipeline/components/classification/adaboost.py new file mode 100644 index 0000000000..abcaf1bc61 --- /dev/null +++ b/autosklearn/pipeline/components/classification/adaboost.py @@ -0,0 +1,97 @@ +import numpy as np + +from autosklearn.pipeline.implementations.MultilabelClassifier import \ + MultilabelClassifier + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class AdaboostClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, n_estimators, learning_rate, algorithm, max_depth, + random_state=None): + self.n_estimators = int(n_estimators) + self.learning_rate = float(learning_rate) + self.algorithm = algorithm + self.random_state = random_state + self.max_depth = max_depth + self.estimator = None + + def fit(self, X, Y, sample_weight=None): + import sklearn.ensemble + import sklearn.tree + import sklearn.multiclass + + self.n_estimators = int(self.n_estimators) + self.learning_rate = float(self.learning_rate) + self.max_depth = int(self.max_depth) + base_estimator = sklearn.tree.DecisionTreeClassifier(max_depth=self.max_depth) + + estimator = sklearn.ensemble.AdaBoostClassifier( + base_estimator=base_estimator, + n_estimators=self.n_estimators, + learning_rate=self.learning_rate, + algorithm=self.algorithm, + random_state=self.random_state + ) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + estimator = MultilabelClassifier(estimator, n_jobs=1) + estimator.fit(X, Y, sample_weight=sample_weight) + else: + estimator.fit(X, Y, sample_weight=sample_weight) + + self.estimator = estimator + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'AB', + 'name': 'AdaBoost Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # base_estimator = Constant(name="base_estimator", value="None") + n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_estimators", lower=50, upper=500, default=50, log=False)) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) + algorithm = cs.add_hyperparameter(CategoricalHyperparameter( + name="algorithm", choices=["SAMME.R", "SAMME"], default="SAMME.R")) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=1, log=False)) + return cs + diff --git a/autosklearn/pipeline/components/classification/bernoulli_nb.py b/autosklearn/pipeline/components/classification/bernoulli_nb.py new file mode 100644 index 0000000000..fc4e34f3a7 --- /dev/null +++ b/autosklearn/pipeline/components/classification/bernoulli_nb.py @@ -0,0 +1,113 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class BernoulliNB(AutoSklearnClassificationAlgorithm): + def __init__(self, alpha, fit_prior, random_state=None, verbose=0): + self.alpha = alpha + if fit_prior.lower() == "true": + self.fit_prior = True + elif fit_prior.lower() == "false": + self.fit_prior = False + else: + self.fit_prior = fit_prior + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.BernoulliNB( + alpha=self.alpha, fit_prior=self.fit_prior) + self.classes_ = np.unique(y.astype(int)) + + for iter in range(n_iter): + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) + # Upper limit, scipy.sparse doesn't seem to handle max > len(matrix) + stop = min(stop, y.shape[0]) + self.estimator.partial_fit(X[start:stop], y[start:stop], self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'BernoulliNB', + 'name': 'Bernoulli Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + # sklearn website says: ... BernoulliNB is designed for + # binary/boolean features. + 'handles_numerical_features': False, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': np.bool} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # the smoothing parameter is a non-negative float + # I will limit it to 1000 and put it on a logarithmic scale. (SF) + # Please adjust that, if you know a proper range, this is just a guess. + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, + default=1, log=True) + + fit_prior = CategoricalHyperparameter(name="fit_prior", + choices=["True", "False"], + default="True") + + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_prior) + + return cs diff --git a/autosklearn/pipeline/components/classification/decision_tree.py b/autosklearn/pipeline/components/classification/decision_tree.py new file mode 100644 index 0000000000..e0804d555b --- /dev/null +++ b/autosklearn/pipeline/components/classification/decision_tree.py @@ -0,0 +1,108 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class DecisionTree(AutoSklearnClassificationAlgorithm): + def __init__(self, criterion, splitter, max_features, max_depth, + min_samples_split, min_samples_leaf, min_weight_fraction_leaf, + max_leaf_nodes, class_weight=None, random_state=None): + self.criterion = criterion + self.splitter = splitter + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.random_state = random_state + self.class_weight = class_weight + self.estimator = None + + def fit(self, X, y, sample_weight=None): + from sklearn.tree import DecisionTreeClassifier + + self.max_features = float(self.max_features) + if self.max_depth == "None": + self.max_depth = None + else: + num_features = X.shape[1] + max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + + self.estimator = DecisionTreeClassifier( + criterion=self.criterion, + max_depth=max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + class_weight=self.class_weight, + random_state=self.random_state) + self.estimator.fit(X, y, sample_weight=sample_weight) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'DT', + 'name': 'Decision Tree Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + splitter = cs.add_hyperparameter(Constant("splitter", "best")) + max_features = cs.add_hyperparameter(Constant('max_features', 1.0)) + max_depth = cs.add_hyperparameter(UniformFloatHyperparameter( + 'max_depth', 0., 2., default=0.5)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter( + Constant("min_weight_fraction_leaf", 0.0)) + max_leaf_nodes = cs.add_hyperparameter( + UnParametrizedHyperparameter("max_leaf_nodes", "None")) + + return cs diff --git a/autosklearn/pipeline/components/classification/extra_trees.py b/autosklearn/pipeline/components/classification/extra_trees.py new file mode 100644 index 0000000000..e4276a50df --- /dev/null +++ b/autosklearn/pipeline/components/classification/extra_trees.py @@ -0,0 +1,160 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class ExtraTreesClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, max_leaf_nodes_or_max_depth="max_depth", + bootstrap=False, max_leaf_nodes=None, max_depth="None", + min_weight_fraction_leaf=0.0, + oob_score=False, n_jobs=1, random_state=None, verbose=0, + class_weight=None): + + self.n_estimators = int(n_estimators) + self.estimator_increment = 10 + if criterion not in ("gini", "entropy"): + raise ValueError("'criterion' is not in ('gini', 'entropy'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + #if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None + else: + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.class_weight = class_weight + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self + + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + from sklearn.ensemble import ExtraTreesClassifier as ETC + + if refit: + self.estimator = None + + if self.estimator is None: + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.estimator = ETC( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, + random_state=self.random_state, + class_weight=self.class_weight, + warm_start=True + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'ET', + 'name': 'Extra Trees Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) + + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter(Constant( + 'min_weight_fraction_leaf', 0.)) + + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) + + return cs diff --git a/autosklearn/pipeline/components/classification/gaussian_nb.py b/autosklearn/pipeline/components/classification/gaussian_nb.py new file mode 100644 index 0000000000..2c53d158de --- /dev/null +++ b/autosklearn/pipeline/components/classification/gaussian_nb.py @@ -0,0 +1,88 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class GaussianNB(AutoSklearnClassificationAlgorithm): + + def __init__(self, random_state=None, verbose=0): + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.GaussianNB() + self.classes_ = np.unique(y.astype(int)) + + for iter in range(n_iter): + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) + self.estimator.partial_fit(X[start:stop], y[start:stop], + self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'GaussianNB', + 'name': 'Gaussian Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + diff --git a/autosklearn/pipeline/components/classification/gradient_boosting.py b/autosklearn/pipeline/components/classification/gradient_boosting.py new file mode 100644 index 0000000000..cc95870f24 --- /dev/null +++ b/autosklearn/pipeline/components/classification/gradient_boosting.py @@ -0,0 +1,157 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, UnParametrizedHyperparameter, Constant, \ + CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class GradientBoostingClassifier(AutoSklearnClassificationAlgorithm): + def __init__(self, loss, learning_rate, n_estimators, subsample, + min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, max_depth, max_features, + max_leaf_nodes, init=None, random_state=None, verbose=0): + self.loss = loss + self.learning_rate = learning_rate + self.n_estimators = n_estimators + self.subsample = subsample + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_depth = max_depth + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + self.init = init + self.random_state = random_state + self.verbose = verbose + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self + + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + import sklearn.ensemble + + # Special fix for gradient boosting! + if isinstance(X, np.ndarray): + X = np.ascontiguousarray(X, dtype=X.dtype) + if refit: + self.estimator = None + + if self.estimator is None: + self.learning_rate = float(self.learning_rate) + self.n_estimators = int(self.n_estimators) + self.subsample = float(self.subsample) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.verbose = int(self.verbose) + + self.estimator = sklearn.ensemble.GradientBoostingClassifier( + loss=self.loss, + learning_rate=self.learning_rate, + n_estimators=0, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + max_depth=self.max_depth, + max_features=max_features, + max_leaf_nodes=self.max_leaf_nodes, + init=self.init, + random_state=self.random_state, + verbose=self.verbose, + warm_start=True, + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + + return self + + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'GB', + 'name': 'Gradient Boosting Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + loss = cs.add_hyperparameter(Constant("loss", "deviance")) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True)) + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False)) + min_weight_fraction_leaf = cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + subsample = cs.add_hyperparameter(UniformFloatHyperparameter( + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False)) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter( + name="max_leaf_nodes", value="None")) + + return cs + diff --git a/autosklearn/pipeline/components/classification/k_nearest_neighbors.py b/autosklearn/pipeline/components/classification/k_nearest_neighbors.py new file mode 100644 index 0000000000..f0631b9eb4 --- /dev/null +++ b/autosklearn/pipeline/components/classification/k_nearest_neighbors.py @@ -0,0 +1,76 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + Constant, UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class KNearestNeighborsClassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, n_neighbors, weights, p, random_state=None): + self.n_neighbors = n_neighbors + self.weights = weights + self.p = p + self.random_state = random_state + + def fit(self, X, Y): + import sklearn.neighbors + import sklearn.multiclass + + estimator = \ + sklearn.neighbors.KNeighborsClassifier(n_neighbors=self.n_neighbors, + weights=self.weights, + p=self.p) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'KNN', + 'name': 'K-Nearest Neighbor Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype' : None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_neighbors", lower=1, upper=100, log=True, default=1)) + weights = cs.add_hyperparameter(CategoricalHyperparameter( + name="weights", choices=["uniform", "distance"], default="uniform")) + p = cs.add_hyperparameter(CategoricalHyperparameter( + name="p", choices=[1, 2], default=2)) + + return cs diff --git a/autosklearn/pipeline/components/classification/lda.py b/autosklearn/pipeline/components/classification/lda.py new file mode 100644 index 0000000000..1802e642bf --- /dev/null +++ b/autosklearn/pipeline/components/classification/lda.py @@ -0,0 +1,98 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax + + +class LDA(AutoSklearnClassificationAlgorithm): + def __init__(self, shrinkage, n_components, tol, shrinkage_factor=0.5, + random_state=None): + self.shrinkage = shrinkage + self.n_components = n_components + self.tol = tol + self.shrinkage_factor = shrinkage_factor + self.estimator = None + + def fit(self, X, Y): + import sklearn.lda + import sklearn.multiclass + + if self.shrinkage == "None": + self.shrinkage = None + solver = 'svd' + elif self.shrinkage == "auto": + solver = 'lsqr' + elif self.shrinkage == "manual": + self.shrinkage = float(self.shrinkage_factor) + solver = 'lsqr' + else: + raise ValueError(self.shrinkage) + + self.n_components = int(self.n_components) + self.tol = float(self.tol) + + estimator = sklearn.lda.LDA(n_components=self.n_components, + shrinkage=self.shrinkage, + tol=self.tol, + solver=solver) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.predict_proba(X) + return softmax(df) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'LDA', + 'name': 'Linear Discriminant Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + shrinkage = cs.add_hyperparameter(CategoricalHyperparameter( + "shrinkage", ["None", "auto", "manual"], default="None")) + shrinkage_factor = cs.add_hyperparameter(UniformFloatHyperparameter( + "shrinkage_factor", 0., 1., 0.5)) + n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( + 'n_components', 1, 250, default=10)) + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + + cs.add_condition(EqualsCondition(shrinkage_factor, shrinkage, "manual")) + return cs diff --git a/autosklearn/pipeline/components/classification/liblinear_svc.py b/autosklearn/pipeline/components/classification/liblinear_svc.py new file mode 100644 index 0000000000..3b66ccde59 --- /dev/null +++ b/autosklearn/pipeline/components/classification/liblinear_svc.py @@ -0,0 +1,130 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.implementations.util import softmax +from autosklearn.pipeline.constants import * + + +class LibLinear_SVC(AutoSklearnClassificationAlgorithm): + # Liblinear is not deterministic as it uses a RNG inside + def __init__(self, penalty, loss, dual, tol, C, multi_class, + fit_intercept, intercept_scaling, class_weight=None, + random_state=None): + self.penalty = penalty + self.loss = loss + self.dual = dual + self.tol = tol + self.C = C + self.multi_class = multi_class + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling + self.class_weight = class_weight + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + import sklearn.svm + import sklearn.multiclass + + self.C = float(self.C) + self.tol = float(self.tol) + + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' + self.intercept_scaling = float(self.intercept_scaling) + + if self.class_weight == "None": + self.class_weight = None + + estimator = sklearn.svm.LinearSVC(penalty=self.penalty, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + multi_class=self.multi_class, + random_state=self.random_state) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.decision_function(X) + return softmax(df) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Liblinear-SVC', + 'name': 'Liblinear Support Vector Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2"], default="l2")) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["hinge", "squared_hinge"], default="squared_hinge")) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # This is set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr")) + # These are set ad-hoc + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) + + penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(penalty, "l1"), + ForbiddenEqualsClause(loss, "hinge") + ) + constant_penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(penalty, "l2"), + ForbiddenEqualsClause(loss, "hinge") + ) + penalty_and_dual = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(penalty, "l1") + ) + cs.add_forbidden_clause(penalty_and_loss) + cs.add_forbidden_clause(constant_penalty_and_loss) + cs.add_forbidden_clause(penalty_and_dual) + return cs diff --git a/autosklearn/pipeline/components/classification/libsvm_svc.py b/autosklearn/pipeline/components/classification/libsvm_svc.py new file mode 100644 index 0000000000..67d5058348 --- /dev/null +++ b/autosklearn/pipeline/components/classification/libsvm_svc.py @@ -0,0 +1,201 @@ +import resource + +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.conditions import EqualsCondition, InCondition +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax + + +# From the scikit-learn master branch. Will hopefully be there in sklearn 0.17 +def _ovr_decision_function(predictions, confidences, n_classes): + """Compute a continuous, tie-breaking ovr decision function. + It is important to include a continuous value, not only votes, + to make computing AUC or calibration meaningful. + Parameters + ---------- + predictions : array-like, shape (n_samples, n_classifiers) + Predicted classes for each binary classifier. + confidences : array-like, shape (n_samples, n_classifiers) + Decision functions or predicted probabilities for positive class + for each binary classifier. + n_classes : int + Number of classes. n_classifiers must be + ``n_classes * (n_classes - 1 ) / 2`` + """ + n_samples = predictions.shape[0] + votes = np.zeros((n_samples, n_classes)) + sum_of_confidences = np.zeros((n_samples, n_classes)) + + k = 0 + for i in range(n_classes): + for j in range(i + 1, n_classes): + sum_of_confidences[:, i] -= confidences[:, k] + sum_of_confidences[:, j] += confidences[:, k] + votes[predictions[:, k] == 0, i] += 1 + votes[predictions[:, k] == 1, j] += 1 + k += 1 + + max_confidences = sum_of_confidences.max() + min_confidences = sum_of_confidences.min() + + if max_confidences == min_confidences: + return votes + + # Scale the sum_of_confidences to (-0.5, 0.5) and add it with votes. + # The motivation is to use confidence levels as a way to break ties in + # the votes without switching any decision made based on a difference + # of 1 vote. + eps = np.finfo(sum_of_confidences.dtype).eps + max_abs_confidence = max(abs(max_confidences), abs(min_confidences)) + scale = (0.5 - eps) / max_abs_confidence + return votes + sum_of_confidences * scale + + +class LibSVM_SVC(AutoSklearnClassificationAlgorithm): + def __init__(self, C, kernel, gamma, shrinking, tol, max_iter, + class_weight=None, degree=3, coef0=0, random_state=None): + self.C = C + self.kernel = kernel + self.degree = degree + self.gamma = gamma + self.coef0 = coef0 + self.shrinking = shrinking + self.tol = tol + self.class_weight = class_weight + self.max_iter = max_iter + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + import sklearn.svm + + try: + soft, hard = resource.getrlimit(resource.RLIMIT_AS) + if soft > 0: + soft /= 1024 * 1024 + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + else: + cache_size = 200 + except Exception: + cache_size = 200 + + self.C = float(self.C) + if self.degree is None: + self.degree = 3 + else: + self.degree = int(self.degree) + if self.gamma is None: + self.gamma = 0.0 + else: + self.gamma = float(self.gamma) + if self.coef0 is None: + self.coef0 = 0.0 + else: + self.coef0 = float(self.coef0) + self.tol = float(self.tol) + self.max_iter = float(self.max_iter) + self.shrinking = self.shrinking == 'True' + + if self.class_weight == "None": + self.class_weight = None + + self.estimator = sklearn.svm.SVC(C=self.C, + kernel=self.kernel, + degree=self.degree, + gamma=self.gamma, + coef0=self.coef0, + shrinking=self.shrinking, + tol=self.tol, + class_weight=self.class_weight, + max_iter=self.max_iter, + random_state=self.random_state, + cache_size=cache_size) + # probability=True) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + # return self.estimator.predict_proba(X) + decision = self.estimator.decision_function(X) + if len(self.estimator.classes_) > 2: + decision = _ovr_decision_function(decision < 0, decision, + len(self.estimator.classes_)) + return softmax(decision) + + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'LibSVM-SVC', + 'name': 'LibSVM Support Vector Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparsity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # C-continouos and double precision... + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + C = UniformFloatHyperparameter("C", 0.03125, 32768, log=True, + default=1.0) + # No linear kernel here, because we have liblinear + kernel = CategoricalHyperparameter(name="kernel", + choices=["rbf", "poly", "sigmoid"], + default="rbf") + degree = UniformIntegerHyperparameter("degree", 1, 5, default=3) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True, default=0.1) + # TODO this is totally ad-hoc + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + # probability is no hyperparameter, but an argument to the SVM algo + shrinking = CategoricalHyperparameter("shrinking", ["True", "False"], + default="True") + tol = UniformFloatHyperparameter("tol", 1e-5, 1e-1, default=1e-4, + log=True) + # cache size is not a hyperparameter, but an argument to the program! + max_iter = UnParametrizedHyperparameter("max_iter", -1) + + cs = ConfigurationSpace() + cs.add_hyperparameter(C) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(shrinking) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(max_iter) + + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coef0_condition) + + return cs diff --git a/autosklearn/pipeline/components/classification/multinomial_nb.py b/autosklearn/pipeline/components/classification/multinomial_nb.py new file mode 100644 index 0000000000..bc144676b4 --- /dev/null +++ b/autosklearn/pipeline/components/classification/multinomial_nb.py @@ -0,0 +1,123 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class MultinomialNB(AutoSklearnClassificationAlgorithm): + + def __init__(self, alpha, fit_prior, random_state=None, verbose=0): + self.alpha = alpha + if fit_prior.lower() == "true": + self.fit_prior = True + elif fit_prior.lower() == "false": + self.fit_prior = False + else: + self.fit_prior = fit_prior + + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + import sklearn.naive_bayes + import scipy.sparse + + if refit: + self.estimator = None + + if self.estimator is None: + self.n_iter = 0 + self.fully_fit_ = False + self.estimator = sklearn.naive_bayes.MultinomialNB( + alpha=self.alpha, fit_prior=self.fit_prior) + self.classes_ = np.unique(y.astype(int)) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + for iter in range(n_iter): + start = min(self.n_iter * 1000, y.shape[0]) + stop = min((self.n_iter + 1) * 1000, y.shape[0]) + self.estimator.partial_fit(X[start:stop], y[start:stop], + self.classes_) + self.n_iter += 1 + + if stop >= len(y): + self.fully_fit_ = True + break + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + elif not hasattr(self, 'fully_fit_'): + return False + else: + return self.fully_fit_ + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'MultinomialNB', + 'name': 'Multinomial Naive Bayes classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + # sklearn website says: The multinomial distribution normally + # requires integer feature counts. However, in practice, + # fractional counts such as tf-idf may also work. + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE, SIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # the smoothing parameter is a non-negative float + # I will limit it to 100 and put it on a logarithmic scale. (SF) + # Please adjust that, if you know a proper range, this is just a guess. + alpha = UniformFloatHyperparameter(name="alpha", lower=1e-2, upper=100, + default=1, log=True) + + fit_prior = CategoricalHyperparameter(name="fit_prior", + choices=["True", "False"], + default="True") + + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(fit_prior) + + return cs + diff --git a/autosklearn/pipeline/components/classification/passive_aggressive.py b/autosklearn/pipeline/components/classification/passive_aggressive.py new file mode 100644 index 0000000000..9b9da05d2c --- /dev/null +++ b/autosklearn/pipeline/components/classification/passive_aggressive.py @@ -0,0 +1,100 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax + + +class PassiveAggressive(AutoSklearnClassificationAlgorithm): + def __init__(self, C, fit_intercept, n_iter, loss, random_state=None): + self.C = float(C) + self.fit_intercept = fit_intercept == 'True' + self.n_iter = int(n_iter) + self.loss = loss + self.random_state = random_state + self.estimator = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.passive_aggressive import \ + PassiveAggressiveClassifier + + if refit: + self.estimator = None + + if self.estimator is None: + self.estimator = PassiveAggressiveClassifier( + C=self.C, fit_intercept=self.fit_intercept, n_iter=1, + loss=self.loss, shuffle=True, random_state=self.random_state, + warm_start=True) + self.classes_ = np.unique(y.astype(int)) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, y) + + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.decision_function(X) + return softmax(df) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'PassiveAggressive Classifier', + 'name': 'Passive Aggressive Stochastic Gradient Descent ' + 'Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + loss = CategoricalHyperparameter("loss", + ["hinge", "squared_hinge"], + default="hinge") + fit_intercept = UnParametrizedHyperparameter("fit_intercept", "True") + n_iter = UniformIntegerHyperparameter("n_iter", 5, 1000, default=20, + log=True) + C = UniformFloatHyperparameter("C", 1e-5, 10, 1, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(loss) + cs.add_hyperparameter(fit_intercept) + cs.add_hyperparameter(n_iter) + cs.add_hyperparameter(C) + return cs diff --git a/autosklearn/pipeline/components/classification/proj_logit.py b/autosklearn/pipeline/components/classification/proj_logit.py new file mode 100644 index 0000000000..c9c4d1b4be --- /dev/null +++ b/autosklearn/pipeline/components/classification/proj_logit.py @@ -0,0 +1,60 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations import ProjLogit + + +class ProjLogitCLassifier(AutoSklearnClassificationAlgorithm): + + def __init__(self, max_epochs = 2, random_state=None, n_jobs=1): + self.max_epochs = max_epochs + self.estimator = None + + def fit(self, X, Y): + self.estimator = ProjLogit.ProjLogit(max_epochs = int(self.max_epochs)) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'PLogit', + 'name': 'Logistic Regresion using Least Squares', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': np.float32} + + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + max_epochs = UniformIntegerHyperparameter("max_epochs", 1, 20, default=2) + cs = ConfigurationSpace() + cs.add_hyperparameter(max_epochs) + return cs diff --git a/autosklearn/pipeline/components/classification/qda.py b/autosklearn/pipeline/components/classification/qda.py new file mode 100644 index 0000000000..ed9a99326b --- /dev/null +++ b/autosklearn/pipeline/components/classification/qda.py @@ -0,0 +1,68 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter + +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax + + +class QDA(AutoSklearnClassificationAlgorithm): + + def __init__(self, reg_param, random_state=None): + self.reg_param = float(reg_param) + self.estimator = None + + def fit(self, X, Y): + import sklearn.qda + + estimator = sklearn.qda.QDA(self.reg_param) + + if len(Y.shape) == 2 and Y.shape[1] > 1: + self.estimator = sklearn.multiclass.OneVsRestClassifier(estimator, n_jobs=1) + else: + self.estimator = estimator + + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + df = self.estimator.predict_proba(X) + return softmax(df) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'QDA', + 'name': 'Quadratic Discriminant Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + reg_param = UniformFloatHyperparameter('reg_param', 0.0, 10.0, + default=0.5) + cs = ConfigurationSpace() + cs.add_hyperparameter(reg_param) + return cs diff --git a/autosklearn/pipeline/components/classification/random_forest.py b/autosklearn/pipeline/components/classification/random_forest.py new file mode 100644 index 0000000000..9a0ad37eb6 --- /dev/null +++ b/autosklearn/pipeline/components/classification/random_forest.py @@ -0,0 +1,146 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * + + +class RandomForest(AutoSklearnClassificationAlgorithm): + def __init__(self, n_estimators, criterion, max_features, + max_depth, min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, bootstrap, max_leaf_nodes, + random_state=None, n_jobs=1, class_weight=None): + self.n_estimators = n_estimators + self.estimator_increment = 10 + self.criterion = criterion + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.bootstrap = bootstrap + self.max_leaf_nodes = max_leaf_nodes + self.random_state = random_state + self.n_jobs = n_jobs + self.class_weight = class_weight + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self + + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + from sklearn.ensemble import RandomForestClassifier + + if refit: + self.estimator = None + + if self.estimator is None: + self.n_estimators = int(self.n_estimators) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + if self.max_features not in ("sqrt", "log2", "auto"): + num_features = X.shape[1] + max_features = int(float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + else: + max_features = self.max_features + if self.bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + + # initial fit of only increment trees + self.estimator = RandomForestClassifier( + n_estimators=0, + criterion=self.criterion, + max_features=max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, + n_jobs=self.n_jobs, + class_weight=self.class_weight, + warm_start=True) + + tmp = self.estimator + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'RF', + 'name': 'Random Forest Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + cs.add_hyperparameter(Constant("n_estimators", 100)) + cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_depth", "None")) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_leaf_nodes", "None")) + cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="True")) + return cs diff --git a/autosklearn/pipeline/components/classification/sgd.py b/autosklearn/pipeline/components/classification/sgd.py new file mode 100644 index 0000000000..217f2dccc5 --- /dev/null +++ b/autosklearn/pipeline/components/classification/sgd.py @@ -0,0 +1,158 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from autosklearn.pipeline.components.base import AutoSklearnClassificationAlgorithm +from autosklearn.pipeline.constants import * +from autosklearn.pipeline.implementations.util import softmax + + +class SGD(AutoSklearnClassificationAlgorithm): + def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, + learning_rate, class_weight=None, l1_ratio=0.15, epsilon=0.1, + eta0=0.01, power_t=0.5, average=False, random_state=None): + self.loss = loss + self.penalty = penalty + self.alpha = alpha + self.fit_intercept = fit_intercept + self.n_iter = n_iter + self.learning_rate = learning_rate + self.class_weight = class_weight + self.l1_ratio = l1_ratio + self.epsilon = epsilon + self.eta0 = eta0 + self.power_t = power_t + self.random_state = random_state + self.average = average + self.estimator = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.stochastic_gradient import SGDClassifier + + if refit: + self.estimator = None + + if self.estimator is None: + self.alpha = float(self.alpha) + self.fit_intercept = self.fit_intercept == 'True' + self.n_iter = int(self.n_iter) + if self.class_weight == "None": + self.class_weight = None + self.l1_ratio = float(self.l1_ratio) if self.l1_ratio is not None else 0.15 + self.epsilon = float(self.epsilon) if self.epsilon is not None else 0.1 + self.eta0 = float(self.eta0) + self.power_t = float(self.power_t) if self.power_t is not None else 0.25 + self.average = self.average == 'True' + self.estimator = SGDClassifier(loss=self.loss, + penalty=self.penalty, + alpha=self.alpha, + fit_intercept=self.fit_intercept, + n_iter=self.n_iter, + learning_rate=self.learning_rate, + class_weight=self.class_weight, + l1_ratio=self.l1_ratio, + epsilon=self.epsilon, + eta0=self.eta0, + power_t=self.power_t, + shuffle=True, + average=self.average, + random_state=self.random_state) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, y) + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + + if self.loss in ["log", "modified_huber"]: + return self.estimator.predict_proba(X) + else: + df = self.estimator.decision_function(X) + return softmax(df) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'SGD Classifier', + 'name': 'Stochastic Gradient Descent Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype' : None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + loss = cs.add_hyperparameter(CategoricalHyperparameter("loss", + ["hinge", "log", "modified_huber", "squared_hinge", "perceptron"], + default="log")) + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2", "elasticnet"], default="l2")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10e-7, 1e-1, log=True, default=0.0001)) + l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( + "l1_ratio", 1e-9, 1, log=True, default=0.15)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_iter", 5, 1000, log=True, default=20)) + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) + learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( + "learning_rate", ["optimal", "invscaling", "constant"], + default="optimal")) + eta0 = cs.add_hyperparameter(UniformFloatHyperparameter( + "eta0", 10**-7, 0.1, default=0.01)) + power_t = cs.add_hyperparameter(UniformFloatHyperparameter( + "power_t", 1e-5, 1, default=0.25)) + average = cs.add_hyperparameter(CategoricalHyperparameter( + "average", ["False", "True"], default="False")) + + # TODO add passive/aggressive here, although not properly documented? + elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") + epsilon_condition = EqualsCondition(epsilon, loss, "modified_huber") + # eta0 seems to be always active according to the source code; when + # learning_rate is set to optimial, eta0 is the starting value: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx + #eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling") + #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant") + #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant) + power_t_condition = EqualsCondition(power_t, learning_rate, "invscaling") + + cs.add_condition(elasticnet) + cs.add_condition(epsilon_condition) + cs.add_condition(power_t_condition) + + return cs + diff --git a/autosklearn/pipeline/components/data_preprocessing/__init__.py b/autosklearn/pipeline/components/data_preprocessing/__init__.py new file mode 100644 index 0000000000..68728d3bef --- /dev/null +++ b/autosklearn/pipeline/components/data_preprocessing/__init__.py @@ -0,0 +1,30 @@ +import collections +import importlib +import inspect +import os +import pkgutil +import sys + +from ..base import AutoSklearnPreprocessingAlgorithm +from .rescaling import RescalingChoice + + +preprocessors_directory = os.path.split(__file__)[0] +_preprocessors = collections.OrderedDict() + +for module_loader, module_name, ispkg in pkgutil.iter_modules( + [preprocessors_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass( + obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + preprocessor = obj + _preprocessors[module_name] = preprocessor + +_preprocessors['rescaling'] = RescalingChoice \ No newline at end of file diff --git a/autosklearn/pipeline/components/data_preprocessing/balancing.py b/autosklearn/pipeline/components/data_preprocessing/balancing.py new file mode 100644 index 0000000000..4b2f64a2c4 --- /dev/null +++ b/autosklearn/pipeline/components/data_preprocessing/balancing.py @@ -0,0 +1,113 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class Balancing(AutoSklearnPreprocessingAlgorithm): + def __init__(self, strategy, random_state=None): + self.strategy = strategy + + def fit(self, X, y=None): + return self + + def transform(self, X): + return X + + def get_weights(self, Y, classifier, preprocessor, init_params, fit_params): + if init_params is None: + init_params = {} + + if fit_params is None: + fit_params = {} + + # Classifiers which require sample weights: + # We can have adaboost in here, because in the fit method, + # the sample weights are normalized: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/ensemble/weight_boosting.py#L121 + # Have RF and ET in here because they emit a warning if class_weights + # are used together with warmstarts + clf_ = ['adaboost', 'gradient_boosting', 'random_forest', 'extra_trees'] + pre_ = [] + if classifier in clf_ or preprocessor in pre_: + if len(Y.shape) > 1: + offsets = [2 ** i for i in range(Y.shape[1])] + Y_ = np.sum(Y * offsets, axis=1) + else: + Y_ = Y + + unique, counts = np.unique(Y_, return_counts=True) + cw = 1. / counts + cw = cw / np.mean(cw) + + sample_weights = np.ones(Y_.shape) + + for i, ue in enumerate(unique): + mask = Y_ == ue + sample_weights[mask] *= cw[i] + + if classifier in clf_: + fit_params['classifier:sample_weight'] = sample_weights + if preprocessor in pre_: + fit_params['preprocessor:sample_weight'] = sample_weights + + # Classifiers which can adjust sample weights themselves via the + # argument `class_weight` + clf_ = ['decision_tree', 'liblinear_svc', + 'libsvm_svc', 'sgd'] + pre_ = ['liblinear_svc_preprocessor', + 'extra_trees_preproc_for_classification'] + if classifier in clf_: + init_params['classifier:class_weight'] = 'auto' + if preprocessor in pre_: + init_params['preprocessor:class_weight'] = 'auto' + + clf_ = ['ridge'] + if classifier in clf_: + class_weights = {} + + unique, counts = np.unique(Y, return_counts=True) + cw = 1. / counts + cw = cw / np.mean(cw) + + for i, ue in enumerate(unique): + class_weights[ue] = cw[i] + + if classifier in clf_: + init_params['classifier:class_weight'] = class_weights + + return init_params, fit_params + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Balancing', + 'name': 'Balancing Imbalanced Class Distributions', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA, SIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # TODO add replace by zero! + strategy = CategoricalHyperparameter( + "strategy", ["none", "weighting"], default="none") + cs = ConfigurationSpace() + cs.add_hyperparameter(strategy) + return cs + diff --git a/autosklearn/pipeline/components/data_preprocessing/imputation.py b/autosklearn/pipeline/components/data_preprocessing/imputation.py new file mode 100644 index 0000000000..70dcfef71f --- /dev/null +++ b/autosklearn/pipeline/components/data_preprocessing/imputation.py @@ -0,0 +1,54 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class Imputation(AutoSklearnPreprocessingAlgorithm): + def __init__(self, strategy, random_state=None): + # TODO pay attention to the cases when a copy is made (CSR matrices) + self.strategy = strategy + + def fit(self, X, y=None): + import sklearn.preprocessing + + self.preprocessor = sklearn.preprocessing.Imputer( + strategy=self.strategy, copy=False) + self.preprocessor = self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Imputation', + 'name': 'Imputation', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # TODO add replace by zero! + strategy = CategoricalHyperparameter( + "strategy", ["mean", "median", "most_frequent"], default="mean") + cs = ConfigurationSpace() + cs.add_hyperparameter(strategy) + return cs diff --git a/autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py b/autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py new file mode 100644 index 0000000000..24d49c21b0 --- /dev/null +++ b/autosklearn/pipeline/components/data_preprocessing/one_hot_encoding.py @@ -0,0 +1,85 @@ +import numpy as np + +import autosklearn.pipeline.implementations.OneHotEncoder + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformFloatHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class OneHotEncoder(AutoSklearnPreprocessingAlgorithm): + def __init__(self, use_minimum_fraction, minimum_fraction=None, + categorical_features=None, random_state=None): + # TODO pay attention to the cases when a copy is made (CSR matrices) + self.use_minimum_fraction = use_minimum_fraction + self.minimum_fraction = minimum_fraction + self.categorical_features = categorical_features + + def fit(self, X, y=None): + if self.use_minimum_fraction is None or \ + self.use_minimum_fraction.lower() == 'false': + self.minimum_fraction = None + else: + self.minimum_fraction = float(self.minimum_fraction) + + if self.categorical_features is None: + categorical_features = [] + else: + categorical_features = self.categorical_features + + self.preprocessor = autosklearn.pipeline.implementations.OneHotEncoder\ + .OneHotEncoder(minimum_fraction=self.minimum_fraction, + categorical_features=categorical_features) + + self.preprocessor = self.preprocessor.fit(X) + return self + + def transform(self, X): + import scipy.sparse + + is_sparse = scipy.sparse.issparse(X) + if self.preprocessor is None: + raise NotImplementedError() + X = self.preprocessor.transform(X) + if is_sparse: + return X + elif isinstance(X, np.ndarray): + return X + else: + return X.toarray() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': '1Hot', + 'name': 'One Hot Encoder', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + use_minimum_fraction = cs.add_hyperparameter(CategoricalHyperparameter( + "use_minimum_fraction", ["True", "False"], default="True")) + minimum_fraction = cs.add_hyperparameter(UniformFloatHyperparameter( + "minimum_fraction", lower=.0001, upper=0.5, default=0.01, log=True)) + cs.add_condition(EqualsCondition(minimum_fraction, + use_minimum_fraction, 'True')) + return cs diff --git a/autosklearn/pipeline/components/data_preprocessing/rescaling.py b/autosklearn/pipeline/components/data_preprocessing/rescaling.py new file mode 100644 index 0000000000..30d402a9b9 --- /dev/null +++ b/autosklearn/pipeline/components/data_preprocessing/rescaling.py @@ -0,0 +1,245 @@ +from collections import OrderedDict +import copy + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction + +from autosklearn.pipeline.constants import * + + +class Rescaling(object): + def fit(self, X, y=None): + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + + +class NoRescalingComponent(Rescaling): + def __init__(self, random_state): + pass + + def fit(self, X, y=None): + return self + + def transform(self, X): + return X + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'MinMaxScaler', + 'name': 'MinMaxScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class MinMaxScalerComponent(Rescaling): + def __init__(self, random_state): + from autosklearn.pipeline.implementations.MinMaxScaler import \ + MinMaxScaler + self.preprocessor = MinMaxScaler() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'MinMaxScaler', + 'name': 'MinMaxScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT, SIGNED_DATA), + 'preferred_dtype': None} + + +class StandardScalerComponent(Rescaling): + def __init__(self, random_state): + from autosklearn.pipeline.implementations.StandardScaler import \ + StandardScaler + self.preprocessor = StandardScaler() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'StandardScaler', + 'name': 'StandardScaler', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class NormalizerComponent(Rescaling): + def __init__(self, random_state): + from autosklearn.pipeline.implementations.Normalizer import Normalizer + self.preprocessor = Normalizer() + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Normalizer', + 'name': 'Normalizer', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + +class RescalingChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return OrderedDict((('none', NoRescalingComponent), + ('min/max', MinMaxScalerComponent), + ('standardize', StandardScalerComponent), + ('normalize', NormalizerComponent))) + + @classmethod + def get_available_components(cls, data_prop=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + available_comp = cls.get_components() + + components_dict = OrderedDict() + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + entry = available_comp[name] + + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties=None, + default=None, + include=None, + exclude=None): + cs = ConfigurationSpace() + + # Compile a list of legal preprocessors for this problem + available_preprocessors = cls.get_available_components( + data_prop=dataset_properties, + include=include, exclude=exclude) + + if len(available_preprocessors) == 0: + raise ValueError( + "No rescaling algorithm found.") + + if default is None: + defaults = ['min/max', 'standardize', 'none', 'normalize'] + for default_ in defaults: + if default_ in available_preprocessors: + default = default_ + break + + preprocessor = CategoricalHyperparameter('__choice__', + list( + available_preprocessors.keys()), + default=default) + cs.add_hyperparameter(preprocessor) + for name in available_preprocessors: + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, preprocessor, + name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + if not isinstance(condition, AbstractConjunction): + dlcs = [condition] + else: + dlcs = condition.get_descendent_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + diff --git a/autosklearn/pipeline/components/feature_preprocessing/__init__.py b/autosklearn/pipeline/components/feature_preprocessing/__init__.py new file mode 100644 index 0000000000..a4ce03c5af --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/__init__.py @@ -0,0 +1,167 @@ +from collections import OrderedDict +import copy +import importlib +import inspect +import os +import pkgutil +import sys + +from ..base import AutoSklearnPreprocessingAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, AbstractConjunction + + +preprocessors_directory = os.path.split(__file__)[0] +_preprocessors = OrderedDict() + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([preprocessors_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnPreprocessingAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + preprocessor = obj + _preprocessors[module_name] = preprocessor + + +class FeaturePreprocessorChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _preprocessors + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + available_comp = cls.get_components() + + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + + # TODO check for task type classification and/or regression! + + components_dict = OrderedDict() + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Exclude itself to avoid infinite loop + if entry == FeaturePreprocessorChoice or hasattr(entry, 'get_components'): + continue + + target_type = data_prop['target_type'] + if target_type == 'classification': + if entry.get_properties()['handles_classification'] is False: + continue + if data_prop.get('multiclass') is True and \ + entry.get_properties()['handles_multiclass'] is False: + continue + if data_prop.get('multilabel') is True and \ + entry.get_properties()['handles_multilabel'] is False: + continue + + elif target_type == 'regression': + if entry.get_properties()['handles_regression'] is False: + continue + + else: + raise ValueError('Unknown target type %s' % target_type) + + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + cs = ConfigurationSpace() + + # Compile a list of legal preprocessors for this problem + available_preprocessors = cls.get_available_components( + data_prop=dataset_properties, + include=include, exclude=exclude) + + if len(available_preprocessors) == 0: + raise ValueError( + "No preprocessors found, please add NoPreprocessing") + + if default is None: + defaults = ['no_preprocessing', 'select_percentile', 'pca', + 'truncatedSVD'] + for default_ in defaults: + if default_ in available_preprocessors: + default = default_ + break + + preprocessor = CategoricalHyperparameter('__choice__', + list( + available_preprocessors.keys()), + default=default) + cs.add_hyperparameter(preprocessor) + for name in available_preprocessors: + preprocessor_configuration_space = available_preprocessors[name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in preprocessor_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % (name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(preprocessor_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, preprocessor, + name) + cs.add_condition(condition) + + for condition in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + if not isinstance(condition, AbstractConjunction): + dlcs = [condition] + else: + dlcs = condition.get_descendent_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(name): + dlc.child.name = "%s:%s" % (name, dlc.child.name) + if not dlc.parent.name.startswith(name): + dlc.parent.name = "%s:%s" % (name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_preprocessors[name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(name): + dlc.hyperparameter.name = "%s:%s" % (name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_preprocessors['preprocessor'] = FeaturePreprocessorChoice \ No newline at end of file diff --git a/autosklearn/pipeline/components/feature_preprocessing/densifier.py b/autosklearn/pipeline/components/feature_preprocessing/densifier.py new file mode 100644 index 0000000000..893c768ee9 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/densifier.py @@ -0,0 +1,46 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class Densifier(AutoSklearnPreprocessingAlgorithm): + def __init__(self, random_state=None): + pass + + def fit(self, X, y=None): + return self + + def transform(self, X): + from scipy import sparse + if sparse.issparse(X): + return X.todense().getA() + else: + return X + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'RandomTreesEmbedding', + 'name': 'Random Trees Embedding', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': False, + 'input': (SPARSE, UNSIGNED_DATA), + 'output': (DENSE, INPUT), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs + diff --git a/autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py b/autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py new file mode 100644 index 0000000000..6bed2c257c --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/extra_trees_preproc_for_classification.py @@ -0,0 +1,139 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class ExtraTreesPreprocessor(AutoSklearnPreprocessingAlgorithm): + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, + max_leaf_nodes_or_max_depth="max_depth", + bootstrap=False, max_leaf_nodes=None, max_depth="None", + min_weight_fraction_leaf=0.0, + oob_score=False, n_jobs=1, random_state=None, verbose=0, + class_weight=None): + + self.n_estimators = int(n_estimators) + self.estimator_increment = 10 + if criterion not in ("gini", "entropy"): + raise ValueError("'criterion' is not in ('gini', 'entropy'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + # if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None + else: + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.class_weight = class_weight + self.preprocessor = None + + def fit(self, X, Y, sample_weight=None): + from sklearn.ensemble import ExtraTreesClassifier + + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.preprocessor = ExtraTreesClassifier( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, verbose=self.verbose, + random_state=self.random_state, class_weight=self.class_weight, + warm_start=True + ) + # JTS TODO: I think we might have to copy here if we want self.estimator + # to always be consistent on sigabort + while len(self.preprocessor.estimators_) < self.n_estimators: + tmp = self.preprocessor # TODO copy ? + tmp.n_estimators += self.estimator_increment + tmp.fit(X, Y, sample_weight=sample_weight) + self.preprocessor = tmp + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'ET', + 'name': 'Extra Trees Classifier Preprocessing', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (INPUT,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(CategoricalHyperparameter( + "criterion", ["gini", "entropy"], default="gini")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) + + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter(Constant( + 'min_weight_fraction_leaf', 0.)) + + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) + + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/fast_ica.py b/autosklearn/pipeline/components/feature_preprocessing/fast_ica.py new file mode 100644 index 0000000000..01009dd5c9 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/fast_ica.py @@ -0,0 +1,86 @@ +import warnings + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class FastICA(AutoSklearnPreprocessingAlgorithm): + def __init__(self, algorithm, whiten, fun, n_components=None, + random_state=None): + self.n_components = None if n_components is None else int(n_components) + self.algorithm = algorithm + self.whiten = whiten == 'True' + self.fun = fun + self.random_state = random_state + + def fit(self, X, Y=None): + import sklearn.decomposition + + self.preprocessor = sklearn.decomposition.FastICA( + n_components=self.n_components, algorithm=self.algorithm, + fun=self.fun, whiten=self.whiten, random_state=self.random_state + ) + # Make the RuntimeWarning an Exception! + with warnings.catch_warnings(): + warnings.filterwarnings("error") + try: + self.preprocessor.fit(X) + except ValueError as e: + if e.message == 'array must not contain infs or NaNs': + raise ValueError("Bug in scikit-learn: https://github.com/scikit-learn/scikit-learn/pull/2738") + else: + import traceback + traceback.format_exc() + raise ValueError() + + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'FastICA', + 'name': 'Fast Independent Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_components = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_components", 10, 2000, default=100)) + algorithm = cs.add_hyperparameter(CategoricalHyperparameter('algorithm', + ['parallel', 'deflation'], 'parallel')) + whiten = cs.add_hyperparameter(CategoricalHyperparameter('whiten', + ['False', 'True'], 'False')) + fun = cs.add_hyperparameter(CategoricalHyperparameter( + 'fun', ['logcosh', 'exp', 'cube'], 'logcosh')) + + cs.add_condition(EqualsCondition(n_components, whiten, "True")) + + return cs + + diff --git a/autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py b/autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py new file mode 100644 index 0000000000..92ff1f0c75 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/feature_agglomeration.py @@ -0,0 +1,82 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenInClause, \ + ForbiddenAndConjunction, ForbiddenEqualsClause + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class FeatureAgglomeration(AutoSklearnPreprocessingAlgorithm): + def __init__(self, n_clusters, affinity, linkage, pooling_func, + random_state=None): + self.n_clusters = int(n_clusters) + self.affinity = affinity + self.linkage = linkage + self.pooling_func = pooling_func + self.random_state = random_state + + self.pooling_func_mapping = dict(mean=np.mean, + median=np.median, + max=np.max) + + def fit(self, X, Y=None): + import sklearn.cluster + + n_clusters = min(self.n_clusters, X.shape[1]) + if not callable(self.pooling_func): + self.pooling_func = self.pooling_func_mapping[self.pooling_func] + + self.preprocessor = sklearn.cluster.FeatureAgglomeration( + n_clusters=n_clusters, affinity=self.affinity, + linkage=self.linkage, pooling_func=self.pooling_func) + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Feature Agglomeration', + 'name': 'Feature Agglomeration', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + n_clusters = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_clusters", 2, 400, 25)) + affinity = cs.add_hyperparameter(CategoricalHyperparameter( + "affinity", ["euclidean", "manhattan", "cosine"], "euclidean")) + linkage = cs.add_hyperparameter(CategoricalHyperparameter( + "linkage", ["ward", "complete", "average"], "ward")) + pooling_func = cs.add_hyperparameter(CategoricalHyperparameter( + "pooling_func", ["mean", "median", "max"])) + + affinity_and_linkage = ForbiddenAndConjunction( + ForbiddenInClause(affinity, ["manhattan", "cosine"]), + ForbiddenEqualsClause(linkage, "ward")) + cs.add_forbidden_clause(affinity_and_linkage) + return cs + diff --git a/autosklearn/pipeline/components/feature_preprocessing/gem.py b/autosklearn/pipeline/components/feature_preprocessing/gem.py new file mode 100644 index 0000000000..e3cbdff135 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/gem.py @@ -0,0 +1,52 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, UniformFloatHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.implementations.gem import GEM as GEMImpl +from autosklearn.pipeline.constants import * + +class GEM(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, N, precond, random_state=None): + self.N = N + self.precond = precond + + def fit(self, X, Y): + self.preprocessor = GEMImpl(self.N, self.precond) + self.preprocessor.fit(X, Y) + return self + + + def transform(self, X): + return self.preprocessor.transform(X) + + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'GEM', + 'name': 'Generalized Eigenvector extraction', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), + 'preferred_dtype': None} + + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + N = UniformIntegerHyperparameter("N", 5, 20, default=10) + precond = UniformFloatHyperparameter("precond", 0, 0.5, default=0.1) + cs = ConfigurationSpace() + cs.add_hyperparameter(N) + cs.add_hyperparameter(precond) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py b/autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py new file mode 100644 index 0000000000..d7eddf86d6 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/kernel_pca.py @@ -0,0 +1,94 @@ +import warnings + +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter, UniformFloatHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition, InCondition + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class KernelPCA(AutoSklearnPreprocessingAlgorithm): + def __init__(self, n_components, kernel, degree=3, gamma=0.25, coef0=0.0, + random_state=None): + self.n_components = int(n_components) + self.kernel = kernel + self.degree = int(degree) + self.gamma = float(gamma) + self.coef0 = float(coef0) + self.random_state = random_state + + def fit(self, X, Y=None): + import scipy.sparse + import sklearn.decomposition + + self.preprocessor = sklearn.decomposition.KernelPCA( + n_components=self.n_components, kernel=self.kernel, + degree=self.degree, gamma=self.gamma, coef0=self.coef0, + remove_zero_eig=True) + # Make the RuntimeWarning an Exception! + if scipy.sparse.issparse(X): + X = X.astype(np.float64) + with warnings.catch_warnings(): + warnings.filterwarnings("error") + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + with warnings.catch_warnings(): + warnings.filterwarnings("error") + X_new = self.preprocessor.transform(X) + return X_new + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'KernelPCA', + 'name': 'Kernel Principal Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': False, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (DENSE, UNSIGNED_DATA), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_components = UniformIntegerHyperparameter( + "n_components", 10, 2000, default=100) + kernel = CategoricalHyperparameter('kernel', + ['poly', 'rbf', 'sigmoid', 'cosine'], 'rbf') + degree = UniformIntegerHyperparameter('degree', 2, 5, 3) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True, default=1.0) + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + cs = ConfigurationSpace() + cs.add_hyperparameter(n_components) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) + gamma_condition = InCondition(gamma, kernel, ["poly", "rbf"]) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coef0_condition) + cs.add_condition(gamma_condition) + return cs + + diff --git a/autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py b/autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py new file mode 100644 index 0000000000..d95568ddea --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/kitchen_sinks.py @@ -0,0 +1,64 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + +class RandomKitchenSinks(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, gamma, n_components, random_state=None): + """ Parameters: + gamma: float + Parameter of the rbf kernel to be approximated exp(-gamma * x^2) + + n_components: int + Number of components (output dimensionality) used to approximate the kernel + """ + self.gamma = gamma + self.n_components = n_components + self.random_state = random_state + + def fit(self, X, Y=None): + import sklearn.kernel_approximation + + self.preprocessor = sklearn.kernel_approximation.RBFSampler( + self.gamma, self.n_components, self.random_state) + self.preprocessor.fit(X) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'KitchenSink', + 'name': 'Random Kitchen Sinks', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT, UNSIGNED_DATA), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + gamma = UniformFloatHyperparameter( + "gamma", 0.3, 2., default=1.0) + n_components = UniformIntegerHyperparameter( + "n_components", 50, 10000, default=100, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(n_components) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py b/autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py new file mode 100644 index 0000000000..61071f1727 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/liblinear_svc_preprocessor.py @@ -0,0 +1,105 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant, UnParametrizedHyperparameter +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class LibLinear_Preprocessor(AutoSklearnPreprocessingAlgorithm): + # Liblinear is not deterministic as it uses a RNG inside + def __init__(self, penalty, loss, dual, tol, C, multi_class, + fit_intercept, intercept_scaling, class_weight=None, + random_state=None): + self.penalty = penalty + self.loss = loss + self.dual = dual + self.tol = tol + self.C = C + self.multi_class = multi_class + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling + self.class_weight = class_weight + self.random_state = random_state + self.preprocessor = None + + def fit(self, X, Y): + import sklearn.svm + + self.C = float(self.C) + self.tol = float(self.tol) + + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' + self.intercept_scaling = float(self.intercept_scaling) + + if self.class_weight == "None": + self.class_weight = None + + self.preprocessor = sklearn.svm.LinearSVC(penalty=self.penalty, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + class_weight=self.class_weight, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + multi_class=self.multi_class, + random_state=self.random_state) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Liblinear-Preprocessor', + 'name': 'Liblinear Support Vector Preprocessing', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + penalty = cs.add_hyperparameter(Constant("penalty", "l1")) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["hinge", "squared_hinge"], default="squared_hinge")) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # This is set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + multi_class = cs.add_hyperparameter(Constant("multi_class", "ovr")) + # These are set ad-hoc + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) + + penalty_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(penalty, "l1"), + ForbiddenEqualsClause(loss, "hinge") + ) + cs.add_forbidden_clause(penalty_and_loss) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py b/autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py new file mode 100644 index 0000000000..0caeb4e6ca --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/no_preprocessing.py @@ -0,0 +1,45 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class NoPreprocessing(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, random_state): + """ This preprocessors does not change the data """ + self.preprocessor = None + + def fit(self, X, Y=None): + self.preprocessor = 0 + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return X + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'no', + 'name': 'NoPreprocessing', + 'handles_missing_values': True, + 'handles_nominal_values': True, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py b/autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py new file mode 100644 index 0000000000..216017b362 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/nystroem_sampler.py @@ -0,0 +1,121 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter +from HPOlibConfigSpace.conditions import InCondition, EqualsCondition, AndConjunction + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class Nystroem(AutoSklearnPreprocessingAlgorithm): + def __init__(self, kernel, n_components, gamma=1.0, degree=3, + coef0=1, random_state=None): + self.kernel = kernel + self.n_components = int(n_components) + self.gamma = float(gamma) + self.degree = int(degree) + self.coef0 = float(coef0) + self.random_state = random_state + + def fit(self, X, Y=None): + import scipy.sparse + import sklearn.kernel_approximation + + self.preprocessor = sklearn.kernel_approximation.Nystroem( + kernel=self.kernel, n_components=self.n_components, + gamma=self.gamma, degree=self.degree, coef0=self.coef0, + random_state=self.random_state) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.kernel == 'chi2': + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + self.preprocessor.fit(X.astype(np.float64)) + return self + + def transform(self, X): + import scipy.sparse + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.kernel == 'chi2': + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA + return {'shortname': 'Nystroem', + 'name': 'Nystroem kernel approximation', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, data_type), + 'output': (INPUT, UNSIGNED_DATA), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + if dataset_properties is not None and \ + (dataset_properties.get("sparse") is True or + dataset_properties.get("signed") is False): + allow_chi2 = False + else: + allow_chi2 = True + + possible_kernels = ['poly', 'rbf', 'sigmoid', 'cosine'] + if allow_chi2: + possible_kernels.append("chi2") + kernel = CategoricalHyperparameter('kernel', possible_kernels, 'rbf') + degree = UniformIntegerHyperparameter('degree', 2, 5, 3) + gamma = UniformFloatHyperparameter("gamma", 3.0517578125e-05, 8, + log=True, default=0.1) + coef0 = UniformFloatHyperparameter("coef0", -1, 1, default=0) + n_components = UniformIntegerHyperparameter( + "n_components", 50, 10000, default=100, log=True) + + cs = ConfigurationSpace() + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(n_components) + + degree_depends_on_poly = EqualsCondition(degree, kernel, "poly") + coef0_condition = InCondition(coef0, kernel, ["poly", "sigmoid"]) + + gamma_kernels = ["poly", "rbf", "sigmoid"] + if allow_chi2: + gamma_kernels.append("chi2") + gamma_condition = InCondition(gamma, kernel, gamma_kernels) + cs.add_condition(degree_depends_on_poly) + cs.add_condition(coef0_condition) + cs.add_condition(gamma_condition) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/pca.py b/autosklearn/pipeline/components/feature_preprocessing/pca.py new file mode 100644 index 0000000000..26362ffc29 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/pca.py @@ -0,0 +1,68 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class PCA(AutoSklearnPreprocessingAlgorithm): + def __init__(self, keep_variance, whiten, random_state=None): + self.keep_variance = keep_variance + self.whiten = whiten + self.random_state = random_state + + def fit(self, X, Y=None): + import sklearn.decomposition + n_components = float(self.keep_variance) + self.preprocessor = sklearn.decomposition.PCA(n_components=n_components, + whiten=self.whiten, + copy=True) + self.preprocessor.fit(X) + + if not np.isfinite(self.preprocessor.components_).all(): + raise ValueError("PCA found non-finite components.") + + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'PCA', + 'name': 'Principle Component Analysis', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + # TODO write a test to make sure that the PCA scales data itself + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparsity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + # TODO document that we have to be very careful + 'is_deterministic': False, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (DENSE, UNSIGNED_DATA), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + keep_variance = UniformFloatHyperparameter( + "keep_variance", 0.5, 0.9999, default=0.9999) + whiten = CategoricalHyperparameter( + "whiten", ["False", "True"], default="False") + cs = ConfigurationSpace() + cs.add_hyperparameter(keep_variance) + cs.add_hyperparameter(whiten) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/polynomial.py b/autosklearn/pipeline/components/feature_preprocessing/polynomial.py new file mode 100644 index 0000000000..9596427801 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/polynomial.py @@ -0,0 +1,69 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class PolynomialFeatures(AutoSklearnPreprocessingAlgorithm): + def __init__(self, degree, interaction_only, include_bias, random_state=None): + self.degree = int(degree) + self.interaction_only = interaction_only.lower() == 'true' + self.include_bias = include_bias.lower() == 'true' + self.random_state = random_state + self.preprocessor = None + + def fit(self, X, Y): + import sklearn.preprocessing + + self.preprocessor = sklearn.preprocessing.PolynomialFeatures( + degree=self.degree, interaction_only=self.interaction_only, + include_bias=self.include_bias) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'PolynomialFeatures', + 'name': 'PolynomialFeatures', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + # TODO find out of this is right! + # this here suggests so http://scikit-learn.org/stable/modules/svm.html#tips-on-practical-use + 'handles_sparse': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # More than degree 3 is too expensive! + degree = UniformIntegerHyperparameter("degree", 2, 3, 2) + interaction_only = CategoricalHyperparameter("interaction_only", + ["False", "True"], "False") + include_bias = CategoricalHyperparameter("include_bias", + ["True", "False"], "True") + + cs = ConfigurationSpace() + cs.add_hyperparameter(degree) + cs.add_hyperparameter(interaction_only) + cs.add_hyperparameter(include_bias) + + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py b/autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py new file mode 100644 index 0000000000..9fe95e577b --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/random_trees_embedding.py @@ -0,0 +1,98 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class RandomTreesEmbedding(AutoSklearnPreprocessingAlgorithm): + + def __init__(self, n_estimators, max_depth, min_samples_split, + min_samples_leaf, min_weight_fraction_leaf, max_leaf_nodes, + sparse_output=True, n_jobs=1, random_state=None): + self.n_estimators = n_estimators + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.sparse_output = sparse_output + self.n_jobs = n_jobs + self.random_state = random_state + + def fit(self, X, Y=None): + import sklearn.ensemble + + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + + self.preprocessor = sklearn.ensemble.RandomTreesEmbedding( + n_estimators=self.n_estimators, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + sparse_output=self.sparse_output, + n_jobs=self.n_jobs, + random_state=self.random_state + ) + self.preprocessor.fit(X, Y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'RandomTreesEmbedding', + 'name': 'Random Trees Embedding', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (SPARSE, SIGNED_DATA), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + n_estimators = UniformIntegerHyperparameter(name="n_estimators", + lower=10, upper=100, + default=10) + max_depth = UniformIntegerHyperparameter(name="max_depth", + lower=2, upper=10, + default=5) + min_samples_split = UniformIntegerHyperparameter(name="min_samples_split", + lower=2, upper=20, + default=2) + min_samples_leaf = UniformIntegerHyperparameter(name="min_samples_leaf", + lower=1, upper=20, + default=1) + min_weight_fraction_leaf = Constant('min_weight_fraction_leaf', 1.0) + max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + value="None") + cs = ConfigurationSpace() + cs.add_hyperparameter(n_estimators) + cs.add_hyperparameter(max_depth) + cs.add_hyperparameter(min_samples_split) + cs.add_hyperparameter(min_samples_leaf) + cs.add_hyperparameter(min_weight_fraction_leaf) + cs.add_hyperparameter(max_leaf_nodes) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/select_percentile.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile.py new file mode 100644 index 0000000000..c928e2f471 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/select_percentile.py @@ -0,0 +1,19 @@ +class SelectPercentileBase(object): + + def fit(self, X, y): + import sklearn.feature_selection + + self.preprocessor = sklearn.feature_selection.SelectPercentile( + score_func=self.score_func, + percentile=self.percentile) + + self.preprocessor.fit(X, y) + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + Xt = self.preprocessor.transform(X) + if Xt.shape[1] == 0: + raise ValueError("%s removed all features." % self.__class__.__name__) + return Xt diff --git a/autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py new file mode 100644 index 0000000000..a5548c102b --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_classification.py @@ -0,0 +1,114 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, CategoricalHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.components.feature_preprocessing.select_percentile import SelectPercentileBase +from autosklearn.pipeline.constants import * + + +class SelectPercentileClassification(SelectPercentileBase, + AutoSklearnPreprocessingAlgorithm): + + def __init__(self, percentile, score_func="chi2", random_state=None): + """ Parameters: + random state : ignored + + score_func : callable, Function taking two arrays X and y, and + returning a pair of arrays (scores, pvalues). + """ + import sklearn.feature_selection + + self.random_state = random_state # We don't use this + self.percentile = int(float(percentile)) + if score_func == "chi2": + self.score_func = sklearn.feature_selection.chi2 + elif score_func == "f_classif": + self.score_func = sklearn.feature_selection.f_classif + else: + raise ValueError("score_func must be in ('chi2, 'f_classif'), " + "but is: %s" % score_func) + + def fit(self, X, y): + import scipy.sparse + import sklearn.feature_selection + + self.preprocessor = sklearn.feature_selection.SelectPercentile( + score_func=self.score_func, + percentile=self.percentile) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data<0] = 0.0 + else: + X[X<0] = 0.0 + + self.preprocessor.fit(X, y) + return self + + def transform(self, X): + import scipy.sparse + import sklearn.feature_selection + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + if self.preprocessor is None: + raise NotImplementedError() + Xt = self.preprocessor.transform(X) + if Xt.shape[1] == 0: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + return Xt + + + @staticmethod + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA + + return {'shortname': 'SPC', + 'name': 'Select Percentile Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, data_type), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + percentile = UniformFloatHyperparameter( + name="percentile", lower=1, upper=99, default=50) + + score_func = CategoricalHyperparameter( + name="score_func", choices=["chi2", "f_classif"], default="chi2") + if dataset_properties is not None: + # Chi2 can handle sparse data, so we respect this + if 'sparse' in dataset_properties and dataset_properties['sparse']: + score_func = Constant( + name="score_func", value="chi2") + + cs = ConfigurationSpace() + cs.add_hyperparameter(percentile) + cs.add_hyperparameter(score_func) + + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py new file mode 100644 index 0000000000..ba96074889 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/select_percentile_regression.py @@ -0,0 +1,59 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, UnParametrizedHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.components.feature_preprocessing.select_percentile import SelectPercentileBase +from autosklearn.pipeline.constants import * + + +class SelectPercentileRegression(SelectPercentileBase, + AutoSklearnPreprocessingAlgorithm): + + def __init__(self, percentile, score_func="f_classif", random_state=None): + """ Parameters: + random state : ignored + + score_func : callable, Function taking two arrays X and y, and + returning a pair of arrays (scores, pvalues). + """ + import sklearn.feature_selection + + self.random_state = random_state # We don't use this + self.percentile = int(float(percentile)) + if score_func == "f_regression": + self.score_func = sklearn.feature_selection.f_regression + else: + raise ValueError("Don't know this scoring function: %s" % score_func) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'SPR', + 'name': 'Select Percentile Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'handles_dense': True, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + percentile = UniformFloatHyperparameter( + "percentile", lower=1, upper=99, default=50) + + score_func = UnParametrizedHyperparameter( + name="score_func", value="f_regression") + + cs = ConfigurationSpace() + cs.add_hyperparameter(percentile) + cs.add_hyperparameter(score_func) + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/select_rates.py b/autosklearn/pipeline/components/feature_preprocessing/select_rates.py new file mode 100644 index 0000000000..243fa88e8b --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/select_rates.py @@ -0,0 +1,122 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant + +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class SelectRates(AutoSklearnPreprocessingAlgorithm): + def __init__(self, alpha, mode='fpr', + score_func="chi2", random_state=None): + import sklearn.feature_selection + + self.random_state = random_state # We don't use this + self.alpha = float(alpha) + + if score_func == "chi2": + self.score_func = sklearn.feature_selection.chi2 + elif score_func == "f_classif": + self.score_func = sklearn.feature_selection.f_classif + else: + raise ValueError("score_func must be in ('chi2, 'f_classif'), " + "but is: %s" % score_func) + + self.mode = mode + + def fit(self, X, y): + import scipy.sparse + import sklearn.feature_selection + + self.preprocessor = sklearn.feature_selection.GenericUnivariateSelect( + score_func=self.score_func, param=self.alpha, mode=self.mode) + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + self.preprocessor.fit(X, y) + return self + + def transform(self, X): + import scipy.sparse + import sklearn.feature_selection + + # Because the pipeline guarantees that each feature is positive, + # clip all values below zero to zero + if self.score_func == sklearn.feature_selection.chi2: + if scipy.sparse.issparse(X): + X.data[X.data < 0] = 0.0 + else: + X[X < 0] = 0.0 + + if self.preprocessor is None: + raise NotImplementedError() + try: + Xt = self.preprocessor.transform(X) + except ValueError as e: + if "zero-size array to reduction operation maximum which has no " \ + "identity" in e.message: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + else: + raise e + + if Xt.shape[1] == 0: + raise ValueError( + "%s removed all features." % self.__class__.__name__) + return Xt + + @staticmethod + def get_properties(dataset_properties=None): + data_type = UNSIGNED_DATA + + if dataset_properties is not None: + signed = dataset_properties.get('signed') + if signed is not None: + data_type = SIGNED_DATA if signed is True else UNSIGNED_DATA + + return {'shortname': 'SR', + 'name': 'Univariate Feature Selection based on rates', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': False, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': True, + 'input': (SPARSE, DENSE, data_type), + 'output': (INPUT,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + alpha = UniformFloatHyperparameter( + name="alpha", lower=0.01, upper=0.5, default=0.1) + + score_func = CategoricalHyperparameter( + name="score_func", choices=["chi2", "f_classif"], default="chi2") + if dataset_properties is not None: + # Chi2 can handle sparse data, so we respect this + if 'sparse' in dataset_properties and dataset_properties['sparse']: + score_func = Constant( + name="score_func", value="chi2") + + mode = CategoricalHyperparameter('mode', ['fpr', 'fdr', 'fwe'], 'fpr') + + cs = ConfigurationSpace() + cs.add_hyperparameter(alpha) + cs.add_hyperparameter(score_func) + cs.add_hyperparameter(mode) + + return cs diff --git a/autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py b/autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py new file mode 100644 index 0000000000..9108eee2c3 --- /dev/null +++ b/autosklearn/pipeline/components/feature_preprocessing/truncatedSVD.py @@ -0,0 +1,61 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnPreprocessingAlgorithm +from autosklearn.pipeline.constants import * + + +class TruncatedSVD(AutoSklearnPreprocessingAlgorithm): + def __init__(self, target_dim, random_state=None): + self.target_dim = int(target_dim) + self.random_state = random_state + self.preprocessor = None + + def fit(self, X, Y): + import sklearn.decomposition + + target_dim = min(self.target_dim, X.shape[1] - 1) + self.preprocessor = sklearn.decomposition.TruncatedSVD( + target_dim, algorithm='randomized') + # TODO: remove when migrating to sklearn 0.16 + # Circumvents a bug in sklearn + # https://github.com/scikit-learn/scikit-learn/commit/f08b8c8e52663167819f242f605db39f3b5a6d0c + # X = X.astype(np.float64) + self.preprocessor.fit(X, Y) + + return self + + def transform(self, X): + if self.preprocessor is None: + raise NotImplementedError() + return self.preprocessor.transform(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'TSVD', + 'name': 'Truncated Singular Value Decomposition', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': True, + 'handles_multiclass': True, + 'handles_multilabel': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'handles_dense': False, + 'input': (SPARSE, UNSIGNED_DATA), + 'output': (DENSE, INPUT), + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + target_dim = UniformIntegerHyperparameter( + "target_dim", 10, 256, default=128) + cs = ConfigurationSpace() + cs.add_hyperparameter(target_dim) + return cs diff --git a/autosklearn/pipeline/components/regression/__init__.py b/autosklearn/pipeline/components/regression/__init__.py new file mode 100644 index 0000000000..b1c488acb1 --- /dev/null +++ b/autosklearn/pipeline/components/regression/__init__.py @@ -0,0 +1,162 @@ +from collections import OrderedDict +import copy +import importlib +import inspect +import os +import pkgutil +import sys + +from ..base import AutoSklearnRegressionAlgorithm +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter +from HPOlibConfigSpace.conditions import EqualsCondition + +regressor_directory = os.path.split(__file__)[0] +_regressors = OrderedDict() + + +for module_loader, module_name, ispkg in pkgutil.iter_modules([regressor_directory]): + full_module_name = "%s.%s" % (__package__, module_name) + if full_module_name not in sys.modules and not ispkg: + module = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(module): + if inspect.isclass(obj) and AutoSklearnRegressionAlgorithm in obj.__bases__: + # TODO test if the obj implements the interface + # Keep in mind that this only instantiates the ensemble_wrapper, + # but not the real target classifier + classifier = obj + _regressors[module_name] = classifier + + +class RegressorChoice(object): + def __init__(self, **params): + choice = params['__choice__'] + del params['__choice__'] + self.choice = self.get_components()[choice](**params) + + @classmethod + def get_components(cls): + return _regressors + + @classmethod + def get_available_components(cls, data_prop, + include=None, + exclude=None): + available_comp = cls.get_components() + components_dict = OrderedDict() + + if include is not None and exclude is not None: + raise ValueError( + "The argument include and exclude cannot be used together.") + + if include is not None: + for incl in include: + if incl not in available_comp: + raise ValueError("Trying to include unknown component: " + "%s" % incl) + + for name in available_comp: + if include is not None and name not in include: + continue + elif exclude is not None and name in exclude: + continue + + entry = available_comp[name] + + # Avoid infinite loop + if entry == RegressorChoice: + continue + + if entry.get_properties()['handles_regression'] is False: + continue + components_dict[name] = entry + + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, dataset_properties, + default=None, + include=None, + exclude=None): + if include is not None and exclude is not None: + raise ValueError("The argument include and exclude cannot be used together.") + + cs = ConfigurationSpace() + + # Compile a list of all estimator objects for this problem + available_estimators = cls.get_available_components( + data_prop=dataset_properties, + include=include, + exclude=exclude) + + if len(available_estimators) == 0: + raise ValueError("No regressors found") + + if default is None: + defaults = ['random_forest', 'support_vector_regression'] + \ + list(available_estimators.keys()) + for default_ in defaults: + if default_ in available_estimators: + if include is not None and default_ not in include: + continue + if exclude is not None and default_ in exclude: + continue + default = default_ + break + + estimator = CategoricalHyperparameter('__choice__', + list(available_estimators.keys()), + default=default) + cs.add_hyperparameter(estimator) + for estimator_name in available_estimators.keys(): + + # We have to retrieve the configuration space every time because + # we change the objects it returns. If we reused it, we could not + # retrieve the conditions further down + # TODO implement copy for hyperparameters and forbidden and + # conditions! + + estimator_configuration_space = available_estimators[ + estimator_name]. \ + get_hyperparameter_search_space(dataset_properties) + for parameter in estimator_configuration_space.get_hyperparameters(): + new_parameter = copy.deepcopy(parameter) + new_parameter.name = "%s:%s" % ( + estimator_name, new_parameter.name) + cs.add_hyperparameter(new_parameter) + # We must only add a condition if the hyperparameter is not + # conditional on something else + if len(estimator_configuration_space. + get_parents_of(parameter)) == 0: + condition = EqualsCondition(new_parameter, estimator, + estimator_name) + cs.add_condition(condition) + + for condition in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).get_conditions(): + dlcs = condition.get_descendant_literal_conditions() + for dlc in dlcs: + if not dlc.child.name.startswith(estimator_name): + dlc.child.name = "%s:%s" % ( + estimator_name, dlc.child.name) + if not dlc.parent.name.startswith(estimator_name): + dlc.parent.name = "%s:%s" % ( + estimator_name, dlc.parent.name) + cs.add_condition(condition) + + for forbidden_clause in available_estimators[estimator_name]. \ + get_hyperparameter_search_space( + dataset_properties).forbidden_clauses: + dlcs = forbidden_clause.get_descendant_literal_clauses() + for dlc in dlcs: + if not dlc.hyperparameter.name.startswith(estimator_name): + dlc.hyperparameter.name = "%s:%s" % (estimator_name, + dlc.hyperparameter.name) + cs.add_forbidden_clause(forbidden_clause) + + return cs + + +_regressors['regressor'] = RegressorChoice \ No newline at end of file diff --git a/autosklearn/pipeline/components/regression/adaboost.py b/autosklearn/pipeline/components/regression/adaboost.py new file mode 100644 index 0000000000..c6b06e99c8 --- /dev/null +++ b/autosklearn/pipeline/components/regression/adaboost.py @@ -0,0 +1,83 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class AdaboostRegressor(AutoSklearnRegressionAlgorithm): + def __init__(self, n_estimators, learning_rate, loss, max_depth, + random_state=None): + self.n_estimators = int(n_estimators) + self.learning_rate = float(learning_rate) + self.loss = loss + self.random_state = random_state + self.max_depth = max_depth + self.estimator = None + + def fit(self, X, Y): + import sklearn.ensemble + import sklearn.tree + + self.n_estimators = int(self.n_estimators) + self.learning_rate = float(self.learning_rate) + self.max_depth = int(self.max_depth) + base_estimator = sklearn.tree.DecisionTreeClassifier( + max_depth=self.max_depth) + + self.estimator = sklearn.ensemble.AdaBoostRegressor( + base_estimator=base_estimator, + n_estimators=self.n_estimators, + learning_rate=self.learning_rate, + loss=self.loss, + random_state=self.random_state + ) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'AB', + 'name': 'AdaBoost Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS, ), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + # base_estimator = Constant(name="base_estimator", value="None") + n_estimators = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_estimators", lower=50, upper=500, default=50, log=False)) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=2, default=0.1, log=True)) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + name="loss", choices=["linear", "square", "exponential"], + default="linear")) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=1, log=False)) + return cs + diff --git a/autosklearn/pipeline/components/regression/decision_tree.py b/autosklearn/pipeline/components/regression/decision_tree.py new file mode 100644 index 0000000000..1fa5259aa8 --- /dev/null +++ b/autosklearn/pipeline/components/regression/decision_tree.py @@ -0,0 +1,100 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import \ + AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class DecisionTree(AutoSklearnRegressionAlgorithm): + def __init__(self, criterion, splitter, max_features, max_depth, + min_samples_split, min_samples_leaf, min_weight_fraction_leaf, + max_leaf_nodes, random_state=None): + self.criterion = criterion + self.splitter = splitter + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.max_leaf_nodes = max_leaf_nodes + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.random_state = random_state + self.estimator = None + + def fit(self, X, y, sample_weight=None): + from sklearn.tree import DecisionTreeRegressor + + self.max_features = float(self.max_features) + if self.max_depth == "None": + self.max_depth = None + else: + num_features = X.shape[1] + max_depth = max(1, int(np.round(self.max_depth * num_features, 0))) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + + self.estimator = DecisionTreeRegressor( + criterion=self.criterion, + max_depth=max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state) + self.estimator.fit(X, y, sample_weight=sample_weight) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'DT', + 'name': 'Decision Tree Classifier', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + criterion = cs.add_hyperparameter(Constant('criterion', 'mse')) + splitter = cs.add_hyperparameter(Constant("splitter", "best")) + max_features = cs.add_hyperparameter(Constant('max_features', 1.0)) + max_depth = cs.add_hyperparameter(UniformFloatHyperparameter( + 'max_depth', 0., 2., default=0.5)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + min_weight_fraction_leaf = cs.add_hyperparameter( + Constant("min_weight_fraction_leaf", 0.0)) + max_leaf_nodes = cs.add_hyperparameter( + UnParametrizedHyperparameter("max_leaf_nodes", "None")) + + return cs diff --git a/autosklearn/pipeline/components/regression/extra_trees.py b/autosklearn/pipeline/components/regression/extra_trees.py new file mode 100644 index 0000000000..f62ecb2143 --- /dev/null +++ b/autosklearn/pipeline/components/regression/extra_trees.py @@ -0,0 +1,181 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class ExtraTreesRegressor(AutoSklearnRegressionAlgorithm): + def __init__(self, n_estimators, criterion, min_samples_leaf, + min_samples_split, max_features, + max_leaf_nodes_or_max_depth="max_depth", + bootstrap=False, max_leaf_nodes=None, max_depth="None", + oob_score=False, n_jobs=1, random_state=None, verbose=0): + + self.n_estimators = int(n_estimators) + self.estimator_increment = 10 + if criterion not in ("mse"): + raise ValueError("'criterion' is not in ('mse'): " + "%s" % criterion) + self.criterion = criterion + + if max_leaf_nodes_or_max_depth == "max_depth": + self.max_leaf_nodes = None + if max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(max_depth) + #if use_max_depth == "True": + # self.max_depth = int(max_depth) + #elif use_max_depth == "False": + # self.max_depth = None + else: + if max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(max_leaf_nodes) + self.max_depth = None + + self.min_samples_leaf = int(min_samples_leaf) + self.min_samples_split = int(min_samples_split) + + self.max_features = float(max_features) + + if bootstrap == "True": + self.bootstrap = True + elif bootstrap == "False": + self.bootstrap = False + + self.oob_score = oob_score + self.n_jobs = int(n_jobs) + self.random_state = random_state + self.verbose = int(verbose) + self.estimator = None + + def fit(self, X, y, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.ensemble import ExtraTreesRegressor as ETR + + if refit: + self.estimator = None + + if self.estimator is None: + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + self.estimator = ETR( + n_estimators=0, criterion=self.criterion, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + bootstrap=self.bootstrap, + max_features=max_features, max_leaf_nodes=self.max_leaf_nodes, + oob_score=self.oob_score, n_jobs=self.n_jobs, + verbose=self.verbose, + random_state=self.random_state, + warm_start=True + ) + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y,) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + def predict_proba(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict_proba(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'ET', + 'name': 'Extra Trees Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + criterion = cs.add_hyperparameter(Constant("criterion", "mse")) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + + max_depth = cs.add_hyperparameter( + UnParametrizedHyperparameter(name="max_depth", value="None")) + + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + + # Unparametrized, we use min_samples as regularization + # max_leaf_nodes_or_max_depth = UnParametrizedHyperparameter( + # name="max_leaf_nodes_or_max_depth", value="max_depth") + # CategoricalHyperparameter("max_leaf_nodes_or_max_depth", + # choices=["max_leaf_nodes", "max_depth"], default="max_depth") + # min_weight_fraction_leaf = UniformFloatHyperparameter( + # "min_weight_fraction_leaf", 0.0, 0.1) + # max_leaf_nodes = UnParametrizedHyperparameter(name="max_leaf_nodes", + # value="None") + + bootstrap = cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="False")) + + # Conditions + # Not applicable because max_leaf_nodes is no legal value of the parent + #cond_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=max_leaf_nodes, + # parent=max_leaf_nodes_or_max_depth, + # value="max_leaf_nodes") + #cond2_max_leaf_nodes_or_max_depth = \ + # EqualsCondition(child=use_max_depth, + # parent=max_leaf_nodes_or_max_depth, + # value="max_depth") + + #cond_max_depth = EqualsCondition(child=max_depth, parent=use_max_depth, + #value="True") + #cs.add_condition(cond_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond2_max_leaf_nodes_or_max_depth) + #cs.add_condition(cond_max_depth) + + return cs diff --git a/autosklearn/pipeline/components/regression/gaussian_process.py b/autosklearn/pipeline/components/regression/gaussian_process.py new file mode 100644 index 0000000000..b74e1fdfcc --- /dev/null +++ b/autosklearn/pipeline/components/regression/gaussian_process.py @@ -0,0 +1,85 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class GaussianProcess(AutoSklearnRegressionAlgorithm): + def __init__(self, nugget, thetaL, thetaU, normalize=False, copy_X=False, + random_state=None): + self.nugget = float(nugget) + self.thetaL = float(thetaL) + self.thetaU = float(thetaU) + self.normalize = normalize + self.copy_X = copy_X + # We ignore it + self.random_state = random_state + self.estimator = None + self.scaler = None + + def fit(self, X, Y): + import sklearn.gaussian_process + import sklearn.preprocessing + + # Instanciate a Gaussian Process model + self.estimator = sklearn.gaussian_process.GaussianProcess( + corr='squared_exponential', + theta0=np.ones(X.shape[1]) * 1e-1, + thetaL=np.ones(X.shape[1]) * self.thetaL, + thetaU=np.ones(X.shape[1]) * self.thetaU, + nugget=self.nugget, + optimizer='Welch', + random_state=self.random_state) + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) + self.scaler.fit(Y) + Y_scaled = self.scaler.transform(Y) + self.estimator.fit(X, Y_scaled) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + if self.scaler is None: + raise NotImplementedError + Y_pred = self.estimator.predict(X, batch_size=512) + return self.scaler.inverse_transform(Y_pred) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'GP', + 'name': 'Gaussian Process', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + nugget = UniformFloatHyperparameter( + name="nugget", lower=0.0001, upper=10, default=0.1, log=True) + thetaL = UniformFloatHyperparameter( + name="thetaL", lower=1e-6, upper=1e-3, default=1e-4, log=True) + thetaU = UniformFloatHyperparameter( + name="thetaU", lower=0.2, upper=10, default=1.0, log=True) + + cs = ConfigurationSpace() + cs.add_hyperparameter(nugget) + cs.add_hyperparameter(thetaL) + cs.add_hyperparameter(thetaU) + return cs diff --git a/autosklearn/pipeline/components/regression/gradient_boosting.py b/autosklearn/pipeline/components/regression/gradient_boosting.py new file mode 100644 index 0000000000..370a535498 --- /dev/null +++ b/autosklearn/pipeline/components/regression/gradient_boosting.py @@ -0,0 +1,160 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, Constant, \ + UnParametrizedHyperparameter +from HPOlibConfigSpace.conditions import InCondition + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class GradientBoosting(AutoSklearnRegressionAlgorithm): + def __init__(self, loss, learning_rate, n_estimators, subsample, + min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, max_depth, max_features, + max_leaf_nodes, alpha=None, init=None, random_state=None, + verbose=0): + self.loss = loss + self.learning_rate = learning_rate + self.n_estimators = n_estimators + self.subsample = subsample + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.max_depth = max_depth + self.max_features = max_features + self.max_leaf_nodes = max_leaf_nodes + self.alpha = alpha + self.init = init + self.random_state = random_state + self.verbose = verbose + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight, + refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1, sample_weight=sample_weight) + return self + + def iterative_fit(self, X, y, sample_weight=None, n_iter=1, refit=False): + import sklearn.ensemble + + # Special fix for gradient boosting! + if isinstance(X, np.ndarray): + X = np.ascontiguousarray(X, dtype=X.dtype) + if refit: + self.estimator = None + + if self.estimator is None: + self.learning_rate = float(self.learning_rate) + self.n_estimators = int(self.n_estimators) + self.subsample = float(self.subsample) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + self.min_weight_fraction_leaf = float(self.min_weight_fraction_leaf) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + else: + self.max_leaf_nodes = int(self.max_leaf_nodes) + if self.alpha is not None: + self.alpha = float(self.alpha) + self.verbose = int(self.verbose) + + self.estimator = sklearn.ensemble.GradientBoostingRegressor( + loss=self.loss, + learning_rate=self.learning_rate, + n_estimators=0, + subsample=self.subsample, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + max_depth=self.max_depth, + max_features=max_features, + max_leaf_nodes=self.max_leaf_nodes, + init=self.init, + random_state=self.random_state, + verbose=self.verbose, + warm_start=True, + ) + + tmp = self.estimator # TODO copy ? + tmp.n_estimators += n_iter + tmp.fit(X, y, sample_weight=sample_weight) + self.estimator = tmp + + return self + + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'GB', + 'name': 'Gradient Boosting Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'prefers_data_normalized': False, + 'is_deterministic': True, + 'handles_sparse': False, + 'input': (DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["ls", "lad", "huber", "quantile"], default="ls")) + learning_rate = cs.add_hyperparameter(UniformFloatHyperparameter( + name="learning_rate", lower=0.0001, upper=1, default=0.1, log=True)) + n_estimators = cs.add_hyperparameter(Constant("n_estimators", 100)) + max_depth = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="max_depth", lower=1, upper=10, default=3)) + min_samples_split = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_split", lower=2, upper=20, default=2, log=False)) + min_samples_leaf = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="min_samples_leaf", lower=1, upper=20, default=1, log=False)) + min_weight_fraction_leaf = cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + subsample = cs.add_hyperparameter(UniformFloatHyperparameter( + name="subsample", lower=0.01, upper=1.0, default=1.0, log=False)) + max_features = cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + max_leaf_nodes = cs.add_hyperparameter(UnParametrizedHyperparameter( + name="max_leaf_nodes", value="None")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", lower=0.75, upper=0.99, default=0.9)) + + cs.add_condition(InCondition(alpha, loss, ['huber', 'quantile'])) + return cs \ No newline at end of file diff --git a/autosklearn/pipeline/components/regression/k_nearest_neighbors.py b/autosklearn/pipeline/components/regression/k_nearest_neighbors.py new file mode 100644 index 0000000000..d73819c4e5 --- /dev/null +++ b/autosklearn/pipeline/components/regression/k_nearest_neighbors.py @@ -0,0 +1,64 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter, \ + Constant, UniformIntegerHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class KNearestNeighborsRegressor(AutoSklearnRegressionAlgorithm): + def __init__(self, n_neighbors, weights, p, random_state=None): + self.n_neighbors = n_neighbors + self.weights = weights + self.p = p + self.random_state = random_state + + def fit(self, X, Y): + import sklearn.neighbors + + self.estimator = \ + sklearn.neighbors.KNeighborsClassifier( + n_neighbors=self.n_neighbors, + weights=self.weights, + p=self.p) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'KNN', + 'name': 'K-Nearest Neighbor Classification', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + n_neighbors = cs.add_hyperparameter(UniformIntegerHyperparameter( + name="n_neighbors", lower=1, upper=100, log=True, default=1)) + weights = cs.add_hyperparameter(CategoricalHyperparameter( + name="weights", choices=["uniform", "distance"], default="uniform")) + p = cs.add_hyperparameter(CategoricalHyperparameter( + name="p", choices=[1, 2], default=2)) + + return cs diff --git a/autosklearn/pipeline/components/regression/liblinear_svr.py b/autosklearn/pipeline/components/regression/liblinear_svr.py new file mode 100644 index 0000000000..cf9766bbb3 --- /dev/null +++ b/autosklearn/pipeline/components/regression/liblinear_svr.py @@ -0,0 +1,97 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, Constant +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, \ + ForbiddenAndConjunction + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class LibLinear_SVR(AutoSklearnRegressionAlgorithm): + # Liblinear is not deterministic as it uses a RNG inside + def __init__(self, loss, epsilon, dual, tol, C, fit_intercept, + intercept_scaling, random_state=None): + self.epsilon = epsilon + self.loss = loss + self.dual = dual + self.tol = tol + self.C = C + self.fit_intercept = fit_intercept + self.intercept_scaling = intercept_scaling + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + import sklearn.svm + + self.C = float(self.C) + self.tol = float(self.tol) + self.epsilon = float(self.epsilon) + + self.dual = self.dual == 'True' + self.fit_intercept = self.fit_intercept == 'True' + self.intercept_scaling = float(self.intercept_scaling) + + self.estimator = sklearn.svm.LinearSVR(epsilon=self.epsilon, + loss=self.loss, + dual=self.dual, + tol=self.tol, + C=self.C, + fit_intercept=self.fit_intercept, + intercept_scaling=self.intercept_scaling, + random_state=self.random_state) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Liblinear-SVR', + 'name': 'Liblinear Support Vector Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # Find out if this is good because of sparsity + 'prefers_data_normalized': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': False, + 'handles_sparse': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + C = cs.add_hyperparameter(UniformFloatHyperparameter( + "C", 0.03125, 32768, log=True, default=1.0)) + loss = cs.add_hyperparameter(CategoricalHyperparameter( + "loss", ["epsilon_insensitive", "squared_epsilon_insensitive"], + default="squared_epsilon_insensitive")) + # Random Guess + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + name="epsilon", lower=0.001, upper=1, default=0.1, log=True)) + dual = cs.add_hyperparameter(Constant("dual", "False")) + # These are set ad-hoc + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + fit_intercept = cs.add_hyperparameter(Constant("fit_intercept", "True")) + intercept_scaling = cs.add_hyperparameter(Constant( + "intercept_scaling", 1)) + + dual_and_loss = ForbiddenAndConjunction( + ForbiddenEqualsClause(dual, "False"), + ForbiddenEqualsClause(loss, "epsilon_insensitive") + ) + cs.add_forbidden_clause(dual_and_loss) + + return cs diff --git a/autosklearn/pipeline/components/regression/libsvm_svr.py b/autosklearn/pipeline/components/regression/libsvm_svr.py new file mode 100644 index 0000000000..977242d077 --- /dev/null +++ b/autosklearn/pipeline/components/regression/libsvm_svr.py @@ -0,0 +1,158 @@ +import resource + +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.conditions import InCondition +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class LibSVM_SVR(AutoSklearnRegressionAlgorithm): + def __init__(self, kernel, C, epsilon, tol, shrinking, gamma=0.0, + degree=3, coef0=0.0, verbose=False, + max_iter=-1, random_state=None): + self.kernel = kernel + self.C = C + self.epsilon = epsilon + self.tol = tol + self.shrinking = shrinking + self.degree = degree + self.gamma = gamma + self.coef0 = coef0 + self.verbose = verbose + self.max_iter = max_iter + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + import sklearn.svm + + try: + soft, hard = resource.getrlimit(resource.RLIMIT_AS) + if soft > 0: + soft /= 1024 * 1024 + maxrss = resource.getrusage(resource.RUSAGE_SELF)[2] / 1024 + cache_size = (soft - maxrss) / 1.5 + else: + cache_size = 200 + except Exception: + cache_size = 200 + + self.C = float(self.C) + self.epsilon = float(self.epsilon) + self.tol = float(self.tol) + self.shrinking = self.shrinking == 'True' + self.degree = int(self.degree) + self.gamma = float(self.gamma) + if self.coef0 is None: + self.coef0 = 0.0 + else: + self.coef0 = float(self.coef0) + self.verbose = int(self.verbose) + self.max_iter = int(self.max_iter) + + self.estimator = sklearn.svm.SVR( + kernel=self.kernel, + C=self.C, + epsilon=self.epsilon, + tol=self.tol, + shrinking=self.shrinking, + degree=self.degree, + gamma=self.gamma, + coef0=self.coef0, + cache_size=cache_size, + verbose=self.verbose, + max_iter=self.max_iter + ) + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) + + self.scaler.fit(Y) + Y_scaled = self.scaler.transform(Y) + self.estimator.fit(X, Y_scaled) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + if self.scaler is None: + raise NotImplementedError + Y_pred = self.estimator.predict(X) + return self.scaler.inverse_transform(Y_pred) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'SVR', + 'name': 'Support Vector Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'prefers_data_normalized': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + # Copied from libsvm_c + C = UniformFloatHyperparameter( + name="C", lower=0.03125, upper=32768, log=True, default=1.0) + + kernel = CategoricalHyperparameter( + name="kernel", choices=['linear', 'poly', 'rbf', 'sigmoid'], + default="rbf") + degree = UniformIntegerHyperparameter( + name="degree", lower=1, upper=5, default=3) + + # Changed the gamma value to 0.0 (is 0.1 for classification) + gamma = UniformFloatHyperparameter( + name="gamma", lower=3.0517578125e-05, upper=8, log=True, default=0.1) + + # TODO this is totally ad-hoc + coef0 = UniformFloatHyperparameter( + name="coef0", lower=-1, upper=1, default=0) + # probability is no hyperparameter, but an argument to the SVM algo + shrinking = CategoricalHyperparameter( + name="shrinking", choices=["True", "False"], default="True") + tol = UniformFloatHyperparameter( + name="tol", lower=1e-5, upper=1e-1, default=1e-3, log=True) + max_iter = UnParametrizedHyperparameter("max_iter", -1) + + # Random Guess + epsilon = UniformFloatHyperparameter(name="epsilon", lower=0.001, + upper=1, default=0.1, log=True) + cs = ConfigurationSpace() + cs.add_hyperparameter(C) + cs.add_hyperparameter(kernel) + cs.add_hyperparameter(degree) + cs.add_hyperparameter(gamma) + cs.add_hyperparameter(coef0) + cs.add_hyperparameter(shrinking) + cs.add_hyperparameter(tol) + cs.add_hyperparameter(max_iter) + cs.add_hyperparameter(epsilon) + + degree_depends_on_kernel = InCondition(child=degree, parent=kernel, + values=('poly', 'rbf', 'sigmoid')) + gamma_depends_on_kernel = InCondition(child=gamma, parent=kernel, + values=('poly', 'rbf')) + coef0_depends_on_kernel = InCondition(child=coef0, parent=kernel, + values=('poly', 'sigmoid')) + cs.add_condition(degree_depends_on_kernel) + cs.add_condition(gamma_depends_on_kernel) + cs.add_condition(coef0_depends_on_kernel) + return cs diff --git a/autosklearn/pipeline/components/regression/random_forest.py b/autosklearn/pipeline/components/regression/random_forest.py new file mode 100644 index 0000000000..fb7ee082bc --- /dev/null +++ b/autosklearn/pipeline/components/regression/random_forest.py @@ -0,0 +1,137 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UniformIntegerHyperparameter, CategoricalHyperparameter, \ + UnParametrizedHyperparameter, Constant + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class RandomForest(AutoSklearnRegressionAlgorithm): + def __init__(self, n_estimators, criterion, max_features, + max_depth, min_samples_split, min_samples_leaf, + min_weight_fraction_leaf, bootstrap, max_leaf_nodes, + random_state=None, n_jobs=1): + self.n_estimators = n_estimators + self.estimator_increment = 10 + self.criterion = criterion + self.max_features = max_features + self.max_depth = max_depth + self.min_samples_split = min_samples_split + self.min_samples_leaf = min_samples_leaf + self.min_weight_fraction_leaf = min_weight_fraction_leaf + self.bootstrap = bootstrap + self.max_leaf_nodes = max_leaf_nodes + self.random_state = random_state + self.n_jobs = n_jobs + self.estimator = None + + def fit(self, X, y, sample_weight=None, refit=False): + if self.estimator is None or refit: + self.iterative_fit(X, y, n_iter=1, refit=refit) + + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.ensemble import RandomForestRegressor + + if refit: + self.estimator = None + + if self.estimator is None: + self.n_estimators = int(self.n_estimators) + if self.max_depth == "None": + self.max_depth = None + else: + self.max_depth = int(self.max_depth) + self.min_samples_split = int(self.min_samples_split) + self.min_samples_leaf = int(self.min_samples_leaf) + if self.max_features not in ("sqrt", "log2", "auto"): + num_features = X.shape[1] + max_features = int( + float(self.max_features) * (np.log(num_features) + 1)) + # Use at most half of the features + max_features = max(1, min(int(X.shape[1] / 2), max_features)) + else: + max_features = self.max_features + if self.bootstrap == "True": + self.bootstrap = True + else: + self.bootstrap = False + if self.max_leaf_nodes == "None": + self.max_leaf_nodes = None + + self.estimator = RandomForestRegressor( + n_estimators=0, + criterion=self.criterion, + max_features=max_features, + max_depth=self.max_depth, + min_samples_split=self.min_samples_split, + min_samples_leaf=self.min_samples_leaf, + min_weight_fraction_leaf=self.min_weight_fraction_leaf, + bootstrap=self.bootstrap, + max_leaf_nodes=self.max_leaf_nodes, + random_state=self.random_state, + n_jobs=self.n_jobs, + warm_start=True) + + tmp = self.estimator + tmp.n_estimators += n_iter + tmp.fit(X, y) + self.estimator = tmp + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + + return not len(self.estimator.estimators_) < self.n_estimators + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'RF', + 'name': 'Random Forest Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': False, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'prefers_data_normalized': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + cs.add_hyperparameter(Constant("n_estimators", 100)) + cs.add_hyperparameter(Constant("criterion", "mse")) + cs.add_hyperparameter(UniformFloatHyperparameter( + "max_features", 0.5, 5, default=1)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_depth", "None")) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_split", 2, 20, default=2)) + cs.add_hyperparameter(UniformIntegerHyperparameter( + "min_samples_leaf", 1, 20, default=1)) + cs.add_hyperparameter( + UnParametrizedHyperparameter("min_weight_fraction_leaf", 0.)) + cs.add_hyperparameter(UnParametrizedHyperparameter("max_leaf_nodes", "None")) + cs.add_hyperparameter(CategoricalHyperparameter( + "bootstrap", ["True", "False"], default="True")) + return cs diff --git a/autosklearn/pipeline/components/regression/ridge_regression.py b/autosklearn/pipeline/components/regression/ridge_regression.py new file mode 100644 index 0000000000..95b15918ed --- /dev/null +++ b/autosklearn/pipeline/components/regression/ridge_regression.py @@ -0,0 +1,65 @@ +import numpy as np + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + UnParametrizedHyperparameter + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class RidgeRegression(AutoSklearnRegressionAlgorithm): + def __init__(self, alpha, fit_intercept, tol, random_state=None): + self.alpha = float(alpha) + self.fit_intercept = fit_intercept == 'True' + self.tol = float(tol) + self.random_state = random_state + self.estimator = None + + def fit(self, X, Y): + import sklearn.linear_model + self.estimator = sklearn.linear_model.Ridge(alpha=self.alpha, + fit_intercept=self.fit_intercept, + tol=self.tol, + copy_X=False, + normalize=False) + self.estimator.fit(X, Y) + return self + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError + return self.estimator.predict(X) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'Rigde', + 'name': 'Ridge Regression', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + # TODO find out if this is good because of sparcity... + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'prefers_data_normalized': True, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (SPARSE, DENSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + # But rather fortran or C-contiguous? + 'preferred_dtype': np.float32} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10 ** -5, 10., log=True, default=1.)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + tol = cs.add_hyperparameter(UniformFloatHyperparameter( + "tol", 1e-5, 1e-1, default=1e-4, log=True)) + return cs diff --git a/autosklearn/pipeline/components/regression/sgd.py b/autosklearn/pipeline/components/regression/sgd.py new file mode 100644 index 0000000000..d034a055fc --- /dev/null +++ b/autosklearn/pipeline/components/regression/sgd.py @@ -0,0 +1,157 @@ +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import UniformFloatHyperparameter, \ + CategoricalHyperparameter, UnParametrizedHyperparameter, \ + UniformIntegerHyperparameter +from HPOlibConfigSpace.conditions import InCondition, EqualsCondition + +from autosklearn.pipeline.components.base import AutoSklearnRegressionAlgorithm +from autosklearn.pipeline.constants import * + + +class SGD(AutoSklearnRegressionAlgorithm): + def __init__(self, loss, penalty, alpha, fit_intercept, n_iter, + learning_rate, l1_ratio=0.15, epsilon=0.1, + eta0=0.01, power_t=0.5, average=False, random_state=None): + self.loss = loss + self.penalty = penalty + self.alpha = alpha + self.fit_intercept = fit_intercept + self.n_iter = n_iter + self.learning_rate = learning_rate + self.l1_ratio = l1_ratio + self.epsilon = epsilon + self.eta0 = eta0 + self.power_t = power_t + self.random_state = random_state + self.average = average + + self.estimator = None + self.scaler = None + + def fit(self, X, y): + while not self.configuration_fully_fitted(): + self.iterative_fit(X, y, n_iter=1) + + return self + + def iterative_fit(self, X, y, n_iter=1, refit=False): + from sklearn.linear_model.stochastic_gradient import SGDRegressor + import sklearn.preprocessing + + if refit: + self.estimator = None + self.scaler = None + + if self.estimator is None: + self.alpha = float(self.alpha) + self.fit_intercept = self.fit_intercept == 'True' + self.n_iter = int(self.n_iter) + self.l1_ratio = float( + self.l1_ratio) if self.l1_ratio is not None else 0.15 + self.epsilon = float( + self.epsilon) if self.epsilon is not None else 0.1 + self.eta0 = float(self.eta0) + self.power_t = float( + self.power_t) if self.power_t is not None else 0.25 + self.average = self.average == 'True' + self.estimator = SGDRegressor(loss=self.loss, + penalty=self.penalty, + alpha=self.alpha, + fit_intercept=self.fit_intercept, + n_iter=self.n_iter, + learning_rate=self.learning_rate, + l1_ratio=self.l1_ratio, + epsilon=self.epsilon, + eta0=self.eta0, + power_t=self.power_t, + shuffle=True, + average=self.average, + random_state=self.random_state) + + self.scaler = sklearn.preprocessing.StandardScaler(copy=True) + self.scaler.fit(y) + + Y_scaled = self.scaler.transform(y) + + self.estimator.n_iter += n_iter + self.estimator.fit(X, Y_scaled) + return self + + def configuration_fully_fitted(self): + if self.estimator is None: + return False + return not self.estimator.n_iter < self.n_iter + + def predict(self, X): + if self.estimator is None: + raise NotImplementedError() + Y_pred = self.estimator.predict(X) + return self.scaler.inverse_transform(Y_pred) + + @staticmethod + def get_properties(dataset_properties=None): + return {'shortname': 'SGD Regressor', + 'name': 'Stochastic Gradient Descent Regressor', + 'handles_missing_values': False, + 'handles_nominal_values': False, + 'handles_numerical_features': True, + 'prefers_data_scaled': True, + 'prefers_data_normalized': True, + 'handles_regression': True, + 'handles_classification': False, + 'handles_multiclass': False, + 'handles_multilabel': False, + 'is_deterministic': True, + 'handles_sparse': True, + 'input': (DENSE, SPARSE, UNSIGNED_DATA), + 'output': (PREDICTIONS,), + # TODO find out what is best used here! + 'preferred_dtype': None} + + @staticmethod + def get_hyperparameter_search_space(dataset_properties=None): + cs = ConfigurationSpace() + + loss = cs.add_hyperparameter(CategoricalHyperparameter("loss", + ["squared_loss", "huber", "epsilon_insensitive", "squared_epsilon_insensitive"], + default="squared_loss")) + penalty = cs.add_hyperparameter(CategoricalHyperparameter( + "penalty", ["l1", "l2", "elasticnet"], default="l2")) + alpha = cs.add_hyperparameter(UniformFloatHyperparameter( + "alpha", 10e-7, 1e-1, log=True, default=0.01)) + l1_ratio = cs.add_hyperparameter(UniformFloatHyperparameter( + "l1_ratio", 1e-9, 1., log=True, default=0.15)) + fit_intercept = cs.add_hyperparameter(UnParametrizedHyperparameter( + "fit_intercept", "True")) + n_iter = cs.add_hyperparameter(UniformIntegerHyperparameter( + "n_iter", 5, 1000, log=True, default=20)) + epsilon = cs.add_hyperparameter(UniformFloatHyperparameter( + "epsilon", 1e-5, 1e-1, default=1e-4, log=True)) + learning_rate = cs.add_hyperparameter(CategoricalHyperparameter( + "learning_rate", ["optimal", "invscaling", "constant"], + default="optimal")) + eta0 = cs.add_hyperparameter(UniformFloatHyperparameter( + "eta0", 10 ** -7, 0.1, default=0.01)) + power_t = cs.add_hyperparameter(UniformFloatHyperparameter( + "power_t", 1e-5, 1, default=0.5)) + average = cs.add_hyperparameter(CategoricalHyperparameter( + "average", ["False", "True"], default="False")) + + # TODO add passive/aggressive here, although not properly documented? + elasticnet = EqualsCondition(l1_ratio, penalty, "elasticnet") + epsilon_condition = InCondition(epsilon, loss, + ["huber", "epsilon_insensitive", "squared_epsilon_insensitive"]) + # eta0 seems to be always active according to the source code; when + # learning_rate is set to optimial, eta0 is the starting value: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/linear_model/sgd_fast.pyx + # eta0_and_inv = EqualsCondition(eta0, learning_rate, "invscaling") + #eta0_and_constant = EqualsCondition(eta0, learning_rate, "constant") + #eta0_condition = OrConjunction(eta0_and_inv, eta0_and_constant) + power_t_condition = EqualsCondition(power_t, learning_rate, + "invscaling") + + cs.add_condition(elasticnet) + cs.add_condition(epsilon_condition) + cs.add_condition(power_t_condition) + + return cs diff --git a/autosklearn/pipeline/constants.py b/autosklearn/pipeline/constants.py new file mode 100644 index 0000000000..2cd95287be --- /dev/null +++ b/autosklearn/pipeline/constants.py @@ -0,0 +1,40 @@ +"""Constants which are used as dataset properties. +""" +BINARY_CLASSIFICATION = 1 +MULTICLASS_CLASSIFICATION = 2 +MULTILABEL_CLASSIFICATION = 3 +REGRESSION = 4 + +REGRESSION_TASKS = [REGRESSION] +CLASSIFICATION_TASKS = [BINARY_CLASSIFICATION, MULTICLASS_CLASSIFICATION, + MULTILABEL_CLASSIFICATION] + +TASK_TYPES = REGRESSION_TASKS + CLASSIFICATION_TASKS + +TASK_TYPES_TO_STRING = \ + {BINARY_CLASSIFICATION: "binary.classification", + MULTICLASS_CLASSIFICATION: "multiclass.classification", + MULTILABEL_CLASSIFICATION: "multilabel.classification", + REGRESSION: "regression"} + +STRING_TO_TASK_TYPES = \ + {"binary.classification": BINARY_CLASSIFICATION, + "multiclass.classification": MULTICLASS_CLASSIFICATION, + "multilabel.classification": MULTILABEL_CLASSIFICATION, + "regression": REGRESSION} + +DENSE = 5 +SPARSE = 6 +PREDICTIONS = 7 +INPUT = 8 + +SIGNED_DATA = 9 +UNSIGNED_DATA = 10 + +DATASET_PROPERTIES_TO_STRING = \ + {DENSE: 'dense', + SPARSE: 'sparse', + PREDICTIONS: 'predictions', + INPUT: 'input', + SIGNED_DATA: 'signed data', + UNSIGNED_DATA: 'unsigned data'} \ No newline at end of file diff --git a/autosklearn/pipeline/create_searchspace_util.py b/autosklearn/pipeline/create_searchspace_util.py new file mode 100644 index 0000000000..544b57e379 --- /dev/null +++ b/autosklearn/pipeline/create_searchspace_util.py @@ -0,0 +1,260 @@ +import itertools + +import numpy as np + +from HPOlibConfigSpace.forbidden import ForbiddenAndConjunction +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause + +from autosklearn.pipeline.constants import * + + +def get_match_array(pipeline, dataset_properties, + include=None, exclude=None): + sparse = dataset_properties.get('sparse') + signed = dataset_properties.get('signed') + + # Duck typing, not sure if it's good... + node_i_is_choice = [] + node_i_choices = [] + node_i_choices_names = [] + all_nodes = [] + for node_name, node in pipeline: + all_nodes.append(node) + is_choice = hasattr(node, "get_available_components") + node_i_is_choice.append(is_choice) + + node_include = include.get( + node_name) if include is not None else None + node_exclude = exclude.get( + node_name) if exclude is not None else None + + if is_choice: + node_i_choices_names.append(list(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).keys())) + node_i_choices.append(list(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).values())) + + else: + node_i_choices.append([node]) + + matches_dimensions = [len(choices) for choices in node_i_choices] + # Start by allowing every combination of nodes. Go through all + # combinations/pipelines and erase the illegal ones + matches = np.ones(matches_dimensions, dtype=int) + + pipeline_idxs = [range(dim) for dim in matches_dimensions] + for pipeline_instantiation_idxs in itertools.product(*pipeline_idxs): + pipeline_instantiation = [node_i_choices[i][idx] for i, idx in + enumerate(pipeline_instantiation_idxs)] + + data_is_sparse = sparse + dataset_is_signed = signed + for node in pipeline_instantiation: + node_input = node.get_properties()['input'] + node_output = node.get_properties()['output'] + + # First check if these two instantiations of this node can work + # together. Do this in multiple if statements to maintain + # readability + if (data_is_sparse and SPARSE not in node_input) or \ + not data_is_sparse and DENSE not in node_input: + matches[pipeline_instantiation_idxs] = 0 + break + # No need to check if the node can handle SIGNED_DATA; this is + # always assumed to be true + elif not dataset_is_signed and UNSIGNED_DATA not in node_input: + matches[pipeline_instantiation_idxs] = 0 + break + + if (INPUT in node_output and DENSE not in node_output and + SPARSE not in node_output) or \ + PREDICTIONS in node_output or\ + (not data_is_sparse and DENSE in node_input and + DENSE in node_output) or \ + (data_is_sparse and SPARSE in node_input and + SPARSE in node_output): + # Don't change the data_is_sparse flag + pass + elif data_is_sparse and DENSE in node_output: + data_is_sparse = False + elif not data_is_sparse and SPARSE in node_output: + data_is_sparse = True + else: + print(node) + print("Data is sparse", data_is_sparse) + print(node_input, node_output) + raise ValueError("This combination is not allowed!") + + if PREDICTIONS in node_output: + pass + elif (INPUT in node_output and SIGNED_DATA not in node_output and + UNSIGNED_DATA not in node_output): + pass + elif SIGNED_DATA in node_output: + dataset_is_signed = True + elif UNSIGNED_DATA in node_output: + dataset_is_signed = False + else: + print(node) + print("Data is signed", dataset_is_signed) + print(node_input, node_output) + raise ValueError("This combination is not allowed!") + + return matches + + +def find_active_choices(matches, node, node_idx, dataset_properties, \ + include=None, exclude=None): + if not hasattr(node, "get_available_components"): + raise ValueError() + available_components = node.get_available_components(dataset_properties, + include=include, + exclude=exclude) + assert matches.shape[node_idx] == len(available_components), \ + (matches.shape[node_idx], len(available_components)) + + choices = [] + for c_idx, component in enumerate(available_components): + slices = [slice(None) if idx != node_idx else slice(c_idx, c_idx+1) + for idx in range(len(matches.shape))] + + if np.sum(matches[slices]) > 0: + choices.append(component) + return choices + + +def add_forbidden(conf_space, pipeline, matches, dataset_properties, + include, exclude): + # Not sure if this works for 3D + node_i_is_choice = [] + node_i_choices_names = [] + node_i_choices = [] + all_nodes = [] + for node_name, node in pipeline: + all_nodes.append(node) + is_choice = hasattr(node, "get_available_components") + node_i_is_choice.append(is_choice) + + node_include = include.get( + node_name) if include is not None else None + node_exclude = exclude.get( + node_name) if exclude is not None else None + + if is_choice: + node_i_choices_names.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).keys()) + node_i_choices.append(node.get_available_components( + dataset_properties, include=node_include, + exclude=node_exclude).values()) + + else: + node_i_choices_names.append([node_name]) + node_i_choices.append([node]) + + # Find out all chains of choices. Only in such a chain its possible to + # have several forbidden constraints + choices_chains = [] + idx = 0 + while idx < len(pipeline): + if node_i_is_choice[idx]: + chain_start = idx + idx += 1 + while idx < len(pipeline) and node_i_is_choice[idx]: + idx += 1 + chain_stop = idx + choices_chains.append((chain_start, chain_stop)) + idx += 1 + + for choices_chain in choices_chains: + constraints = set() + + chain_start = choices_chain[0] + chain_stop = choices_chain[1] + chain_length = chain_stop - chain_start + + # Add one to have also have chain_length in the range + for sub_chain_length in range(2, chain_length + 1): + for start_idx in range(chain_start, chain_stop - sub_chain_length + 1): + indices = range(start_idx, start_idx + sub_chain_length) + node_names = [pipeline[idx][0] for idx in indices] + + num_node_choices = [] + node_choice_names = [] + skip_array_shape = [] + + for idx in indices: + node = all_nodes[idx] + available_components = node.get_available_components( + dataset_properties, + include=node_i_choices_names[idx]) + assert len(available_components) > 0, len(available_components) + skip_array_shape.append(len(available_components)) + num_node_choices.append(range(len(available_components))) + node_choice_names.append([name for name in available_components]) + + # Figure out which choices were already abandoned + skip_array = np.zeros(skip_array_shape) + for product in itertools.product(*num_node_choices): + for node_idx, choice_idx in enumerate(product): + node_idx += start_idx + slices_ = [ + slice(None) if idx != node_idx else + slice(choice_idx, choice_idx + 1) for idx in + range(len(matches.shape))] + + if np.sum(matches[slices_]) == 0: + skip_array[product] = 1 + + for product in itertools.product(*num_node_choices): + if skip_array[product]: + continue + + slices = [] + for idx in range(len(matches.shape)): + if idx not in indices: + slices.append(slice(None)) + else: + slices.append(slice(product[idx - start_idx], + product[idx - start_idx] + 1)) + + # This prints the affected nodes + # print [node_choice_names[i][product[i]] + # for i in range(len(product))], \ + # np.sum(matches[slices]) + + if np.sum(matches[slices]) == 0: + constraint = tuple([(node_names[i], + node_choice_names[i][product[i]]) + for i in range(len(product))]) + + # Check if a more general constraint/forbidden clause + # was already added + continue_ = False + for constraint_length in range(2, len(constraint)): + for constraint_start_idx in range(len(constraint) + - constraint_length + 1): + sub_constraint = constraint[ + constraint_start_idx:constraint_start_idx + constraint_length] + if sub_constraint in constraints: + continue_ = True + break + if continue_: + break + if continue_: + continue + + constraints.add(constraint) + + forbiddens = [] + for i in range(len(product)): + forbiddens.append( + ForbiddenEqualsClause(conf_space.get_hyperparameter( + node_names[i] + ":__choice__"), + node_choice_names[i][product[i]])) + forbidden = ForbiddenAndConjunction(*forbiddens) + conf_space.add_forbidden_clause(forbidden) + + return conf_space diff --git a/autosklearn/pipeline/implementations/Imputation.py b/autosklearn/pipeline/implementations/Imputation.py new file mode 100644 index 0000000000..8f37e02da9 --- /dev/null +++ b/autosklearn/pipeline/implementations/Imputation.py @@ -0,0 +1,381 @@ +# Authors: Nicolas Tresegnie +# License: BSD 3 clause + +import warnings + +import numpy as np +import numpy.ma as ma +from scipy import sparse +from scipy import stats + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import check_array +from sklearn.utils import as_float_array +from sklearn.utils.fixes import astype +from sklearn.utils.sparsefuncs import _get_median +from sklearn.utils.validation import check_is_fitted + +from sklearn.externals import six + +zip = six.moves.zip +map = six.moves.map + +__all__ = [ + 'Imputer', +] + + +def _get_mask(X, value_to_mask): + """Compute the boolean mask X == missing_values.""" + if value_to_mask == "NaN" or np.isnan(value_to_mask): + return np.isnan(X) + else: + return X == value_to_mask + + +def _most_frequent(array, extra_value, n_repeat): + """Compute the most frequent value in a 1d array extended with + [extra_value] * n_repeat, where extra_value is assumed to be not part + of the array.""" + # Compute the most frequent value in array only + if array.size > 0: + mode = stats.mode(array) + most_frequent_value = mode[0][0] + most_frequent_count = mode[1][0] + else: + most_frequent_value = 0 + most_frequent_count = 0 + + # Compare to array + [extra_value] * n_repeat + if most_frequent_count == 0 and n_repeat == 0: + return np.nan + elif most_frequent_count < n_repeat: + return extra_value + elif most_frequent_count > n_repeat: + return most_frequent_value + elif most_frequent_count == n_repeat: + # Ties the breaks. Copy the behaviour of scipy.stats.mode + if most_frequent_value < extra_value: + return most_frequent_value + else: + return extra_value + + +class Imputer(BaseEstimator, TransformerMixin): + """Imputation transformer for completing missing values. + + Parameters + ---------- + missing_values : integer or "NaN", optional (default="NaN") + The placeholder for the missing values. All occurrences of + `missing_values` will be imputed. For missing values encoded as np.nan, + use the string value "NaN". + + strategy : string, optional (default="mean") + The imputation strategy. + + - If "mean", then replace missing values using the mean along + the axis. + - If "median", then replace missing values using the median along + the axis. + - If "most_frequent", then replace missing using the most frequent + value along the axis. + + axis : integer, optional (default=0) + The axis along which to impute. + + - If `axis=0`, then impute along columns. + - If `axis=1`, then impute along rows. + + dtype : np.dtype (default=np.float64) + Determines the dtype of the transformed array if it is dense. Has no + effect otherwise. + + verbose : integer, optional (default=0) + Controls the verbosity of the imputer. + + copy : boolean, optional (default=True) + If True, a copy of X will be created. If False, imputation will + be done in-place whenever possible. Note that, in the following cases, + a new copy will always be made, even if `copy=False`: + + - If X is not an array of floating values; + - If X is sparse and `missing_values=0`; + - If `axis=0` and X is encoded as a CSR matrix; + - If `axis=1` and X is encoded as a CSC matrix. + + Attributes + ---------- + statistics_ : array of shape (n_features,) + The imputation fill value for each feature if axis == 0. + + Notes + ----- + - When ``axis=0``, columns which only contained missing values at `fit` + are discarded upon `transform`. + - When ``axis=1``, an exception is raised if there are rows for which it is + not possible to fill in the missing values (e.g., because they only + contain missing values). + """ + + def __init__(self, missing_values="NaN", strategy="mean", + axis=0, dtype=np.float64, verbose=0, copy=True): + self.missing_values = missing_values + self.strategy = strategy + self.axis = axis + self.dtype=dtype + self.verbose = verbose + self.copy = copy + + def fit(self, X, y=None): + """Fit the imputer on X. + + Parameters + ---------- + X : {array-like, sparse matrix}, shape (n_samples, n_features) + Input data, where ``n_samples`` is the number of samples and + ``n_features`` is the number of features. + + Returns + ------- + self : object + Returns self. + """ + # Check parameters + allowed_strategies = ["mean", "median", "most_frequent"] + if self.strategy not in allowed_strategies: + raise ValueError("Can only use these strategies: {0} " + " got strategy={1}".format(allowed_strategies, + self.strategy)) + + if self.axis not in [0, 1]: + raise ValueError("Can only impute missing values on axis 0 and 1, " + " got axis={0}".format(self.axis)) + + # Since two different arrays can be provided in fit(X) and + # transform(X), the imputation data will be computed in transform() + # when the imputation is done per sample (i.e., when axis=1). + if self.axis == 0: + if sparse.issparse(X): + X = check_array(X, accept_sparse='csc', dtype=np.float64, + force_all_finite=False) + self.statistics_ = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + X = check_array(X, dtype=type(self.dtype), + force_all_finite=False) + self.statistics_ = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) + return self + + def _sparse_fit(self, X, strategy, missing_values, axis): + """Fit the transformer on sparse data.""" + # Imputation is done "by column", so if we want to do it + # by row we only need to convert the matrix to csr format. + if axis == 1: + X = X.tocsr() + else: + X = X.tocsc() + + # Count the zeros + if missing_values == 0: + n_zeros_axis = np.zeros(X.shape[not axis], dtype=int) + else: + n_zeros_axis = X.shape[axis] - np.diff(X.indptr) + + # Mean + if strategy == "mean": + if missing_values != 0: + n_non_missing = n_zeros_axis + + # Mask the missing elements + mask_missing_values = _get_mask(X.data, missing_values) + mask_valids = np.logical_not(mask_missing_values) + + # Sum only the valid elements + new_data = X.data.copy() + new_data[mask_missing_values] = 0 + X = sparse.csc_matrix((new_data, X.indices, X.indptr), + copy=False) + sums = X.sum(axis=0) + + # Count the elements != 0 + mask_non_zeros = sparse.csc_matrix( + (mask_valids.astype(np.float64), + X.indices, + X.indptr), copy=False) + s = mask_non_zeros.sum(axis=0) + n_non_missing = np.add(n_non_missing, s) + + else: + sums = X.sum(axis=axis) + n_non_missing = np.diff(X.indptr) + + # Ignore the error, columns with a np.nan statistics_ + # are not an error at this point. These columns will + # be removed in transform + with np.errstate(all="ignore"): + return np.ravel(sums) / np.ravel(n_non_missing) + + # Median + Most frequent + else: + # Remove the missing values, for each column + columns_all = np.hsplit(X.data, X.indptr[1:-1]) + mask_missing_values = _get_mask(X.data, missing_values) + mask_valids = np.hsplit(np.logical_not(mask_missing_values), + X.indptr[1:-1]) + + # astype necessary for bug in numpy.hsplit before v1.9 + columns = [col[astype(mask, bool, copy=False)] + for col, mask in zip(columns_all, mask_valids)] + + # Median + if strategy == "median": + median = np.empty(len(columns)) + for i, column in enumerate(columns): + median[i] = _get_median(column, n_zeros_axis[i]) + + return median + + # Most frequent + elif strategy == "most_frequent": + most_frequent = np.empty(len(columns)) + + for i, column in enumerate(columns): + most_frequent[i] = _most_frequent(column, + 0, + n_zeros_axis[i]) + + return most_frequent + + def _dense_fit(self, X, strategy, missing_values, axis): + """Fit the transformer on dense data.""" + X = check_array(X, force_all_finite=False) + mask = _get_mask(X, missing_values) + masked_X = ma.masked_array(X, mask=mask) + + # Mean + if strategy == "mean": + mean_masked = np.ma.mean(masked_X, axis=axis) + # Avoid the warning "Warning: converting a masked element to nan." + mean = np.ma.getdata(mean_masked) + mean[np.ma.getmask(mean_masked)] = np.nan + + return mean + + # Median + elif strategy == "median": + if tuple(int(v) for v in np.__version__.split('.')[:2]) < (1, 5): + # In old versions of numpy, calling a median on an array + # containing nans returns nan. This is different is + # recent versions of numpy, which we want to mimic + masked_X.mask = np.logical_or(masked_X.mask, + np.isnan(X)) + median_masked = np.ma.median(masked_X, axis=axis) + # Avoid the warning "Warning: converting a masked element to nan." + median = np.ma.getdata(median_masked) + median[np.ma.getmaskarray(median_masked)] = np.nan + + return median + + # Most frequent + elif strategy == "most_frequent": + # scipy.stats.mstats.mode cannot be used because it will no work + # properly if the first element is masked and if it's frequency + # is equal to the frequency of the most frequent valid element + # See https://github.com/scipy/scipy/issues/2636 + + # To be able access the elements by columns + if axis == 0: + X = X.transpose() + mask = mask.transpose() + + most_frequent = np.empty(X.shape[0]) + + for i, (row, row_mask) in enumerate(zip(X[:], mask[:])): + row_mask = np.logical_not(row_mask).astype(np.bool) + row = row[row_mask] + most_frequent[i] = _most_frequent(row, np.nan, 0) + + return most_frequent + + def transform(self, X): + """Impute all missing values in X. + + Parameters + ---------- + X : {array-like, sparse matrix}, shape = [n_samples, n_features] + The input data to complete. + """ + if self.axis == 0: + check_is_fitted(self, 'statistics_') + + # Copy just once + X = as_float_array(X, copy=self.copy, force_all_finite=False) + + # Since two different arrays can be provided in fit(X) and + # transform(X), the imputation data need to be recomputed + # when the imputation is done per sample + if self.axis == 1: + X = check_array(X, accept_sparse='csr', force_all_finite=False, + copy=False) + + if sparse.issparse(X): + statistics = self._sparse_fit(X, + self.strategy, + self.missing_values, + self.axis) + + else: + statistics = self._dense_fit(X, + self.strategy, + self.missing_values, + self.axis) + else: + X = check_array(X, accept_sparse='csc', force_all_finite=False, + copy=False) + statistics = self.statistics_ + + # Delete the invalid rows/columns + invalid_mask = np.isnan(statistics) + valid_mask = np.logical_not(invalid_mask) + valid_statistics = statistics[valid_mask] + valid_statistics_indexes = np.where(valid_mask)[0] + missing = np.arange(X.shape[not self.axis])[invalid_mask] + + if self.axis == 0 and invalid_mask.any(): + if self.verbose: + warnings.warn("Deleting features without " + "observed values: %s" % missing) + X = X[:, valid_statistics_indexes] + elif self.axis == 1 and invalid_mask.any(): + raise ValueError("Some rows only contain " + "missing values: %s" % missing) + + # Do actual imputation + if sparse.issparse(X) and self.missing_values != 0: + mask = _get_mask(X.data, self.missing_values) + indexes = np.repeat(np.arange(len(X.indptr) - 1, dtype=np.int), + np.diff(X.indptr))[mask] + + X.data[mask] = valid_statistics[indexes].astype(X.dtype) + else: + if sparse.issparse(X): + X = X.toarray() + + mask = _get_mask(X, self.missing_values) + n_missing = np.sum(mask, axis=self.axis) + values = np.repeat(valid_statistics, n_missing) + + if self.axis == 0: + coordinates = np.where(mask.transpose())[::-1] + else: + coordinates = mask + + X[coordinates] = values + + return X diff --git a/autosklearn/pipeline/implementations/MinMaxScaler.py b/autosklearn/pipeline/implementations/MinMaxScaler.py new file mode 100644 index 0000000000..ff6215fcaf --- /dev/null +++ b/autosklearn/pipeline/implementations/MinMaxScaler.py @@ -0,0 +1,141 @@ +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils.validation import check_is_fitted, check_array, warn_if_not_float + + +class MinMaxScaler(BaseEstimator, TransformerMixin): + """Transforms features by scaling each feature to a given range. + + This estimator scales and translates each feature individually such + that it is in the given range on the training set, i.e. between + zero and one. + + The transformation is given by:: + + X_std = (X - X.min(axis=0)) / (X.max(axis=0) - X.min(axis=0)) + X_scaled = X_std * (max - min) + min + + where min, max = feature_range. + + This transformation is often used as an alternative to zero mean, + unit variance scaling. + + Read more in the :ref:`User Guide `. + + Parameters + ---------- + feature_range: tuple (min, max), default=(0, 1) + Desired range of transformed data. + + copy : boolean, optional, default True + Set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array). + + Attributes + ---------- + min_ : ndarray, shape (n_features,) + Per feature adjustment for minimum. + + scale_ : ndarray, shape (n_features,) + Per feature relative scaling of the data. + """ + + def __init__(self, feature_range=(0, 1), copy=True): + self.feature_range = feature_range + self.copy = copy + + def fit(self, X, y=None): + """Compute the minimum and maximum to be used for later scaling. + Parameters + ---------- + X : array-like, shape [n_samples, n_features] + The data used to compute the per-feature minimum and maximum + used for later scaling along the features axis. + """ + X = check_array(X, copy=self.copy, ensure_2d=True, + accept_sparse="csc", dtype=np.float32, + ensure_min_samples=2) + + if warn_if_not_float(X, estimator=self): + # Costly conversion, but otherwise the pipeline will break: + # https://github.com/scikit-learn/scikit-learn/issues/1709 + X = X.astype(np.float) + + feature_range = self.feature_range + if feature_range[0] >= feature_range[1]: + raise ValueError("Minimum of desired feature range must be smaller" + " than maximum. Got %s." % str(feature_range)) + if sparse.issparse(X): + data_min = [] + data_max = [] + data_range = [] + for i in range(X.shape[1]): + if X.indptr[i] == X.indptr[i+1]: + data_min.append(0) + data_max.append(0) + data_range.append(0) + else: + data_min.append(X.data[X.indptr[i]:X.indptr[i + 1]].min()) + data_max.append(X.data[X.indptr[i]:X.indptr[i + 1]].max()) + data_min = np.array(data_min, dtype=np.float32) + data_max = np.array(data_max, dtype=np.float32) + data_range = data_max - data_min + + else: + data_min = np.min(X, axis=0) + data_range = np.max(X, axis=0) - data_min + + # Do not scale constant features + if isinstance(data_range, np.ndarray): + # For a sparse matrix, constant features will be set to one! + if sparse.issparse(X): + for i in range(len(data_min)): + if data_range[i] == 0.0: + data_min[i] = data_min[i] - 1 + data_range[data_range == 0.0] = 1.0 + elif data_range == 0.: + data_range = 1. + + self.scale_ = (feature_range[1] - feature_range[0]) / data_range + self.min_ = feature_range[0] - data_min * self.scale_ + self.data_range = data_range + self.data_min = data_min + return self + + def transform(self, X): + """Scaling features of X according to feature_range. + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + Input data that will be transformed. + """ + check_is_fitted(self, 'scale_') + + X = check_array(X, accept_sparse="csc", copy=self.copy) + + if sparse.issparse(X): + for i in range(X.shape[1]): + X.data[X.indptr[i]:X.indptr[i + 1]] *= self.scale_[i] + X.data[X.indptr[i]:X.indptr[i + 1]] += self.min_[i] + else: + X *= self.scale_ + X += self.min_ + return X + + def inverse_transform(self, X): + """Undo the scaling of X according to feature_range. + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + Input data that will be transformed. + """ + check_is_fitted(self, 'scale_') + + X = check_array(X, copy=self.copy, accept_sparse="csc", ensure_2d=False) + X -= self.min_ + X /= self.scale_ + return X \ No newline at end of file diff --git a/autosklearn/pipeline/implementations/MultilabelClassifier.py b/autosklearn/pipeline/implementations/MultilabelClassifier.py new file mode 100644 index 0000000000..de9b1c4030 --- /dev/null +++ b/autosklearn/pipeline/implementations/MultilabelClassifier.py @@ -0,0 +1,67 @@ +import warnings +import numpy as np + +from sklearn.base import clone +from sklearn.preprocessing import LabelBinarizer +from sklearn.multiclass import OneVsRestClassifier, _ConstantPredictor + + +def _fit_binary(estimator, X, y, classes=None, sample_weight=None): + """Fit a single binary estimator.""" + unique_y = np.unique(y) + if len(unique_y) == 1: + if classes is not None: + if y[0] == -1: + c = 0 + else: + c = y[0] + warnings.warn("Label %s is present in all training examples." % + str(classes[c])) + estimator = _ConstantPredictor().fit(X, unique_y) + else: + estimator = clone(estimator) + estimator.fit(X, y, sample_weight=None) + return estimator + + +class MultilabelClassifier(OneVsRestClassifier): + """Subclasses sklearn.multiclass.OneVsRestClassifier in order to add + sample weights. Works as original code, but forwards sample_weihts to + base estimator + + Taken from: + https://github.com/scikit-learn/scikit-learn/blob/a95203b/sklearn/multiclass.py#L203 + """ + + def fit(self, X, y, sample_weight=None): + """Fit underlying estimators. + Parameters + ---------- + X : (sparse) array-like, shape = [n_samples, n_features] + Data. + y : (sparse) array-like, shape = [n_samples] or [n_samples, n_classes] + Multi-class targets. An indicator matrix turns on multilabel + classification. + Returns + ------- + self + """ + # A sparse LabelBinarizer, with sparse_output=True, has been shown to + # outpreform or match a dense label binarizer in all cases and has also + # resulted in less or equal memory consumption in the fit_ovr function + # overall. + self.label_binarizer_ = LabelBinarizer(sparse_output=True) + Y = self.label_binarizer_.fit_transform(y) + Y = Y.tocsc() + columns = (col.toarray().ravel() for col in Y.T) + # In cases where individual estimators are very fast to train setting + # n_jobs > 1 in can results in slower performance due to the overhead + # of spawning threads. See joblib issue #112. + self.estimators_ = [_fit_binary(estimator=self.estimator, + X=X, y=column, + classes=["not %s" % self.label_binarizer_.classes_[i], self.label_binarizer_.classes_[i]], + sample_weight=sample_weight) + for i, column in enumerate(columns)] + + return self + diff --git a/autosklearn/pipeline/implementations/Normalizer.py b/autosklearn/pipeline/implementations/Normalizer.py new file mode 100644 index 0000000000..0d2be626f1 --- /dev/null +++ b/autosklearn/pipeline/implementations/Normalizer.py @@ -0,0 +1,138 @@ +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils import check_array +from sklearn.utils import warn_if_not_float +from sklearn.utils.extmath import row_norms +from sklearn.utils.sparsefuncs_fast import inplace_csr_row_normalize_l1 +from sklearn.utils.sparsefuncs_fast import inplace_csr_row_normalize_l2 + +import numpy as np +from scipy import sparse + +def normalize(X, norm='l2', axis=1, copy=True): + """Scale input vectors individually to unit norm (vector length). + + Parameters + ---------- + X : array or scipy.sparse matrix with shape [n_samples, n_features] + The data to normalize, element by element. + scipy.sparse matrices should be in CSR format to avoid an + un-necessary copy. + + norm : 'l1' or 'l2', optional ('l2' by default) + The norm to use to normalize each non zero sample (or each non-zero + feature if axis is 0). + + axis : 0 or 1, optional (1 by default) + axis used to normalize the data along. If 1, independently normalize + each sample, otherwise (if 0) normalize each feature. + + copy : boolean, optional, default True + set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array or a scipy.sparse + CSR matrix and if axis is 1). + + See also + -------- + :class:`sklearn.preprocessing.Normalizer` to perform normalization + using the ``Transformer`` API (e.g. as part of a preprocessing + :class:`sklearn.pipeline.Pipeline`) + """ + if norm not in ('l1', 'l2'): + raise ValueError("'%s' is not a supported norm" % norm) + + if axis == 0: + sparse_format = 'csc' + elif axis == 1: + sparse_format = 'csr' + else: + raise ValueError("'%d' is not a supported axis" % axis) + + X = check_array(X, sparse_format, copy=copy) + warn_if_not_float(X, 'The normalize function') + if axis == 0: + X = X.T + + if sparse.issparse(X): + X = check_array(X, accept_sparse=sparse_format, dtype=np.float64) + if norm == 'l1': + inplace_csr_row_normalize_l1(X) + elif norm == 'l2': + inplace_csr_row_normalize_l2(X) + else: + if norm == 'l1': + norms = np.abs(X).sum(axis=1) + norms[norms == 0.0] = 1.0 + elif norm == 'l2': + norms = row_norms(X) + norms[norms == 0.0] = 1.0 + X /= norms[:, np.newaxis] + + if axis == 0: + X = X.T + + return X + + +class Normalizer(BaseEstimator, TransformerMixin): + """Normalize samples individually to unit norm. + + Each sample (i.e. each row of the data matrix) with at least one + non zero component is rescaled independently of other samples so + that its norm (l1 or l2) equals one. + + This transformer is able to work both with dense numpy arrays and + scipy.sparse matrix (use CSR format if you want to avoid the burden of + a copy / conversion). + + Scaling inputs to unit norms is a common operation for text + classification or clustering for instance. For instance the dot + product of two l2-normalized TF-IDF vectors is the cosine similarity + of the vectors and is the base similarity metric for the Vector + Space Model commonly used by the Information Retrieval community. + + Parameters + ---------- + norm : 'l1' or 'l2', optional ('l2' by default) + The norm to use to normalize each non zero sample. + + copy : boolean, optional, default True + set to False to perform inplace row normalization and avoid a + copy (if the input is already a numpy array or a scipy.sparse + CSR matrix). + + Notes + ----- + This estimator is stateless (besides constructor parameters), the + fit method does nothing but is useful when used in a pipeline. + + See also + -------- + :func:`sklearn.preprocessing.normalize` equivalent function + without the object oriented API + """ + + def __init__(self, norm='l2', copy=True): + self.norm = norm + self.copy = copy + + def fit(self, X, y=None): + """Do nothing and return the estimator unchanged + + This method is just there to implement the usual API and hence + work in pipelines. + """ + X = check_array(X, accept_sparse='csr') + return self + + def transform(self, X, y=None, copy=None): + """Scale each non zero row of X to unit norm + + Parameters + ---------- + X : array or scipy.sparse matrix with shape [n_samples, n_features] + The data to normalize, row by row. scipy.sparse matrices should be + in CSR format to avoid an un-necessary copy. + """ + copy = copy if copy is not None else self.copy + X = check_array(X, accept_sparse='csr') + return normalize(X, norm=self.norm, axis=1, copy=copy) diff --git a/autosklearn/pipeline/implementations/OneHotEncoder.py b/autosklearn/pipeline/implementations/OneHotEncoder.py new file mode 100644 index 0000000000..89f7bdec97 --- /dev/null +++ b/autosklearn/pipeline/implementations/OneHotEncoder.py @@ -0,0 +1,371 @@ +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin + +from sklearn.utils import check_array + + +def _transform_selected(X, transform, selected="all", copy=True): + """Apply a transform function to portion of selected features + + Parameters + ---------- + X : array-like or sparse matrix, shape=(n_samples, n_features) + Dense array or sparse matrix. + + transform : callable + A callable transform(X) -> X_transformed + + copy : boolean, optional + Copy X even if it could be avoided. + + selected: "all" or array of indices or mask + Specify which features to apply the transform to. + + Returns + ------- + X : array or sparse matrix, shape=(n_samples, n_features_new) + """ + if selected == "all": + return transform(X) + + if len(selected) == 0: + return X + + X = check_array(X, accept_sparse='csc', force_all_finite=False) + + n_features = X.shape[1] + ind = np.arange(n_features) + sel = np.zeros(n_features, dtype=bool) + sel[np.asarray(selected)] = True + not_sel = np.logical_not(sel) + n_selected = np.sum(sel) + + if n_selected == 0: + # No features selected. + return X + elif n_selected == n_features: + # All features selected. + return transform(X) + else: + X_sel = transform(X[:, ind[sel]]) + X_not_sel = X[:, ind[not_sel]] + + if sparse.issparse(X_sel) or sparse.issparse(X_not_sel): + return sparse.hstack((X_sel, X_not_sel), format='csr') + else: + return np.hstack((X_sel, X_not_sel)) + + +class OneHotEncoder(BaseEstimator, TransformerMixin): + """Encode categorical integer features using a one-hot aka one-of-K scheme. + + The input to this transformer should be a matrix of integers, denoting + the values taken on by categorical (discrete) features. The output will be + a sparse matrix were each column corresponds to one possible value of one + feature. It is assumed that input features take on values in the range + [0, n_values). + + This encoding is needed for feeding categorical data to many scikit-learn + estimators, notably linear models and SVMs with the standard kernels. + + Parameters + ---------- + + categorical_features: "all" or array of indices or mask + Specify what features are treated as categorical. + + - 'all' (default): All features are treated as categorical. + - array of indices: Array of categorical feature indices. + - mask: Array of length n_features and with dtype=bool. + + Non-categorical features are always stacked to the right of the matrix. + + dtype : number type, default=np.float + Desired dtype of output. + + sparse : boolean, default=True + Will return sparse matrix if set True else will return an array. + + Attributes + ---------- + `active_features_` : array + Indices for active features, meaning values that actually occur + in the training set. Only available when n_values is ``'auto'``. + + `feature_indices_` : array of shape (n_features,) + Indices to feature ranges. + Feature ``i`` in the original data is mapped to features + from ``feature_indices_[i]`` to ``feature_indices_[i+1]`` + (and then potentially masked by `active_features_` afterwards) + + `n_values_` : array of shape (n_features,) + Maximum number of values per feature. + + Examples + -------- + Given a dataset with three features and two samples, we let the encoder + find the maximum value per feature and transform the data to a binary + one-hot encoding. + + >>> from sklearn.preprocessing import OneHotEncoder + >>> enc = OneHotEncoder() + >>> enc.fit([[0, 0, 3], [1, 1, 0], [0, 2, 1], \ +[1, 0, 2]]) # doctest: +ELLIPSIS + OneHotEncoder(categorical_features='all', dtype=<... 'float'>, + sparse=True, minimum_fraction=None) + >>> enc.n_values_ + array([2, 3, 4]) + >>> enc.feature_indices_ + array([0, 2, 5, 9]) + >>> enc.transform([[0, 1, 1]]).toarray() + array([[ 1., 0., 0., 1., 0., 0., 1., 0., 0.]]) + + See also + -------- + sklearn.feature_extraction.DictVectorizer : performs a one-hot encoding of + dictionary items (also handles string-valued features). + sklearn.feature_extraction.FeatureHasher : performs an approximate one-hot + encoding of dictionary items or strings. + """ + + def __init__(self, categorical_features="all", dtype=np.float, + sparse=True, minimum_fraction=None): + self.categorical_features = categorical_features + self.dtype = dtype + self.sparse = sparse + self.minimum_fraction = minimum_fraction + + def fit(self, X, y=None): + """Fit OneHotEncoder to X. + + Parameters + ---------- + X : array-like, shape=(n_samples, n_feature) + Input array of type int. + + Returns + ------- + self + """ + self.fit_transform(X) + return self + + def _fit_transform(self, X): + """Assumes X contains only categorical features.""" + + # First increment everything by three to account for the fact that + # np.NaN will get an index of two, and 'other' values will get index of + # one, index of zero is not assigned to also work with sparse data + if sparse.issparse(X): + X.data += 3 + X.data[~np.isfinite(X.data)] = 2 + else: + X += 3 + X[~np.isfinite(X)] = 2 + + X = check_array(X, accept_sparse='csc', force_all_finite=False, + dtype=int) + + if X.min() < 0: + raise ValueError("X needs to contain only non-negative integers.") + n_samples, n_features = X.shape + + # Remember which values should not be replaced by the value 'other' + if self.minimum_fraction is not None: + do_not_replace_by_other = list() + for column in range(X.shape[1]): + do_not_replace_by_other.append(list()) + + + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + unique = np.unique(X.data[indptr_start:indptr_end]) + colsize = indptr_end - indptr_start + else: + unique = np.unique(X[:, column]) + colsize = X.shape[0] + + for unique_value in unique: + if np.isfinite(unique_value): + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + count = np.nansum(unique_value == + X.data[indptr_start:indptr_end]) + else: + count = np.nansum(unique_value == X[:, column]) + else: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + count = np.nansum(~np.isfinite( + X.data[indptr_start:indptr_end])) + else: + count = np.nansum(~np.isfinite(X[:, column])) + + fraction = float(count) / colsize + if fraction >= self.minimum_fraction: + do_not_replace_by_other[-1].append(unique_value) + + for unique_value in unique: + if unique_value not in do_not_replace_by_other[-1]: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + X.data[indptr_start:indptr_end][ + X.data[indptr_start:indptr_end] == + unique_value] = 1 + else: + X[:, column][X[:, column] == unique_value] = 1 + + self.do_not_replace_by_other_ = do_not_replace_by_other + + if sparse.issparse(X): + n_values = X.max(axis=0).toarray().flatten() + 2 + else: + n_values = np.max(X, axis=0) + 2 + + self.n_values_ = n_values + n_values = np.hstack([[0], n_values]) + indices = np.cumsum(n_values) + self.feature_indices_ = indices + + if sparse.issparse(X): + row_indices = X.indices + column_indices = [] + for i in range(len(X.indptr) - 1): + nbr = X.indptr[i+1] - X.indptr[i] + column_indices_ = [indices[i]] * nbr + column_indices_ += X.data[X.indptr[i]:X.indptr[i+1]] + column_indices.extend(column_indices_) + data = np.ones(X.data.size) + else: + column_indices = (X + indices[:-1]).ravel() + row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), + n_features) + data = np.ones(n_samples * n_features) + + out = sparse.coo_matrix((data, (row_indices, column_indices)), + shape=(n_samples, indices[-1]), + dtype=self.dtype).tocsc() + + mask = np.array(out.sum(axis=0)).ravel() != 0 + active_features = np.where(mask)[0] + out = out[:, active_features] + self.active_features_ = active_features + return out.tocsr() if self.sparse else out.toarray() + + def fit_transform(self, X, y=None): + """Fit OneHotEncoder to X, then transform X. + + Equivalent to self.fit(X).transform(X), but more convenient and more + efficient. See fit for the parameters, transform for the return value. + """ + return _transform_selected(X, self._fit_transform, + self.categorical_features, copy=True) + + def _transform(self, X): + """Asssumes X contains only categorical features.""" + + # First increment everything by three to account for the fact that + # np.NaN will get an index of two, and 'other' values will get index of + # one, index of zero is not assigned to also work with sparse data + if sparse.issparse(X): + X.data += 3 + X.data[~np.isfinite(X.data)] = 2 + else: + X += 3 + X[~np.isfinite(X)] = 2 + + X = check_array(X, accept_sparse='csc', force_all_finite=False, + dtype=int) + if X.min() < 0: + raise ValueError("X needs to contain only non-negative integers.") + n_samples, n_features = X.shape + + indices = self.feature_indices_ + if n_features != indices.shape[0] - 1: + raise ValueError("X has different shape than during fitting." + " Expected %d, got %d." + % (indices.shape[0] - 1, n_features)) + + # Replace all indicators which were below `minimum_fraction` in the + # training set by 'other' + if self.minimum_fraction is not None: + for column in range(X.shape[1]): + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + unique = np.unique(X.data[indptr_start:indptr_end]) + else: + unique = np.unique(X[:, column]) + + for unique_value in unique: + if unique_value not in self.do_not_replace_by_other_[ + column]: + if sparse.issparse(X): + indptr_start = X.indptr[column] + indptr_end = X.indptr[column + 1] + X.data[indptr_start:indptr_end][ + X.data[indptr_start:indptr_end] == + unique_value] = 1 + else: + X[:, column][X[:, column] == unique_value] = 1 + + if sparse.issparse(X): + n_values_check = X.max(axis=0).toarray().flatten() + 1 + else: + n_values_check = np.max(X, axis=0) + 1 + + # Replace all indicators which are out of bounds by 'other' (index 0) + if (n_values_check > self.n_values_).any(): + # raise ValueError("Feature out of bounds. Try setting n_values.") + for i, n_value_check in enumerate(n_values_check): + if (n_value_check - 1) >= self.n_values_[i]: + if sparse.issparse(X): + indptr_start = X.indptr[i] + indptr_end = X.indptr[i+1] + X.data[indptr_start:indptr_end][X.data + [indptr_start:indptr_end] >= self.n_values_[i]] = 0 + else: + X[:, i][X[:, i] >= self.n_values_[i]] = 0 + + if sparse.issparse(X): + row_indices = X.indices + column_indices = [] + for i in range(len(X.indptr) - 1): + nbr = X.indptr[i + 1] - X.indptr[i] + column_indices_ = [indices[i]] * nbr + column_indices_ += X.data[X.indptr[i]:X.indptr[i + 1]] + column_indices.extend(column_indices_) + data = np.ones(X.data.size) + else: + column_indices = (X + indices[:-1]).ravel() + row_indices = np.repeat(np.arange(n_samples, dtype=np.int32), + n_features) + data = np.ones(n_samples * n_features) + out = sparse.coo_matrix((data, (row_indices, column_indices)), + shape=(n_samples, indices[-1]), + dtype=self.dtype).tocsc() + + out = out[:, self.active_features_] + return out.tocsr() if self.sparse else out.toarray() + + def transform(self, X): + """Transform X using one-hot encoding. + + Parameters + ---------- + X : array-like, shape=(n_samples, n_features) + Input array of type int. + + Returns + ------- + X_out : sparse matrix if sparse=True else a 2-d array, dtype=int + Transformed input. + """ + return _transform_selected(X, self._transform, + self.categorical_features, copy=True) diff --git a/autosklearn/pipeline/implementations/ProjLogit.py b/autosklearn/pipeline/implementations/ProjLogit.py new file mode 100644 index 0000000000..cf12df75d9 --- /dev/null +++ b/autosklearn/pipeline/implementations/ProjLogit.py @@ -0,0 +1,90 @@ +import numpy as np +import numpy.random as npr + +# from http://arxiv.org/pdf/1309.1541v1.pdf +def proj_simplex(Y): + N,D = np.shape(Y) + # sort in descending order + X = -np.sort(-Y) + Xsum = np.cumsum(X, axis = 1) - 1 + Xsum = Xsum * (1./np.arange(1,D+1)) + biggest = np.sum(X > Xsum, axis = 1) + # TODO last step could be made faster + # via ravel / linear indexing + subtract = np.zeros((N, 1)) + for i in range(N): + subtract[i] = Xsum[i, biggest[i]-1] + return np.maximum(Y - subtract, 0) + + +class ProjLogit(object): + + def __init__(self, max_epochs = 10, verbose = False): + self.w0 = None + self.ws_all = [] + self.w_all = [] + self.max_epochs = max_epochs + self.verbose = verbose + + def fit(self, X, Y): + # get one hot encoding and add a bias + n = X.shape[0] + trainx = np.hstack([np.ones((n, 1)), X]) + k = np.max(Y) + 1 + if self.verbose: + print("Using {} samples of {} classes".format(n,k)) + yt = np.zeros((n, k)) + for i in range(n): + yt[i, Y[i]] = 1 + # initialize with linear regression + precond = np.eye(trainx.shape[1]) * np.sqrt(n) + C = np.linalg.cholesky(0.5 * np.dot(trainx.T,trainx) + precond) + wp = np.linalg.solve(C, np.dot(trainx.T, yt)) + w = np.linalg.solve(C.T, wp) + self.w0 = np.copy(w) + pred_train = np.dot(trainx, w) + for i in range(self.max_epochs): + # expand prediction + res = np.hstack([pred_train, np.power(pred_train, 2) / 2., np.power(pred_train, 3) / 6., np.power(pred_train, 4) / 24.]) + # solve with linear regression + precond = np.eye(res.shape[1]) * np.sqrt(n) + Cp = np.linalg.cholesky(np.dot(res.T,res) + precond) + ws = np.linalg.solve(Cp.T, np.linalg.solve(Cp, np.dot(res.T, yt))) + self.ws_all.append(np.copy(ws)) + # project to probability simplex + p_res = proj_simplex(np.dot(res, ws)) + # and solve again with updated residual + wp = np.linalg.solve(C, np.dot(trainx.T, (yt - p_res))) + w = np.linalg.solve(C.T, wp) + self.w_all.append(np.copy(w)) + pred_train = p_res + np.dot(trainx, w) + obj = np.linalg.norm(yt - pred_train) + + # compute train error + errort = np.sum(np.argmax(pred_train, axis = 1) != Y) + # print training error + if self.verbose: + print("Epoch {} obj: {} train error: {}".format(i,obj,1.*errort/n)) + return self + + + def predict(self, X): + res = self.predict_proba(X) + return np.argmax(res, axis = 1) + + def predict_proba(self, X): + if self.w0 == None: + raise NotImplementedError + testx = np.hstack([np.ones((X.shape[0], 1)), X]) + pred = np.dot(testx, self.w0) + for ws, w in zip(self.ws_all, self.w_all): + res = np.hstack([pred, np.power(pred, 2) / 2., np.power(pred, 3) / 6., np.power(pred, 4) / 24.]) + p_res = proj_simplex(np.dot(res, ws)) + pred = p_res + np.dot(testx, w) + return proj_simplex(pred) + + def predict_log_proba(self, X): + if self.w == None: + return np.zeros(X.shape[0]) + res = np.log(self.predict_proba(X)) + return res diff --git a/autosklearn/pipeline/implementations/StandardScaler.py b/autosklearn/pipeline/implementations/StandardScaler.py new file mode 100644 index 0000000000..511f1f983c --- /dev/null +++ b/autosklearn/pipeline/implementations/StandardScaler.py @@ -0,0 +1,224 @@ +import numpy as np +from scipy import sparse + +from sklearn.base import BaseEstimator, TransformerMixin +from sklearn.utils.validation import check_array, warn_if_not_float, check_is_fitted +from sklearn.utils.sparsefuncs import inplace_column_scale, \ + mean_variance_axis + + +def _mean_and_std(X, axis=0, with_mean=True, with_std=True): + """Compute mean and std deviation for centering, scaling. + Zero valued std components are reset to 1.0 to avoid NaNs when scaling. + """ + X = np.asarray(X) + Xr = np.rollaxis(X, axis) + + if with_mean: + mean_ = Xr.mean(axis=0) + else: + mean_ = None + + if with_std: + std_ = Xr.std(axis=0) + if isinstance(std_, np.ndarray): + std_[std_ == 0.] = 1.0 + elif std_ == 0.: + std_ = 1. + else: + std_ = None + + return mean_, std_ + + +class StandardScaler(BaseEstimator, TransformerMixin): + """Standardize features by removing the mean and scaling to unit variance + Centering and scaling happen independently on each feature by computing + the relevant statistics on the samples in the training set. Mean and + standard deviation are then stored to be used on later data using the + `transform` method. + Standardization of a dataset is a common requirement for many + machine learning estimators: they might behave badly if the + individual feature do not more or less look like standard normally + distributed data (e.g. Gaussian with 0 mean and unit variance). + For instance many elements used in the objective function of + a learning algorithm (such as the RBF kernel of Support Vector + Machines or the L1 and L2 regularizers of linear models) assume that + all features are centered around 0 and have variance in the same + order. If a feature has a variance that is orders of magnitude larger + that others, it might dominate the objective function and make the + estimator unable to learn from other features correctly as expected. + Parameters + ---------- + with_mean : boolean, True by default + If True, center the data before scaling. + This does not work (and will raise an exception) when attempted on + sparse matrices, because centering them entails building a dense + matrix which in common use cases is likely to be too large to fit in + memory. + + with_std : boolean, True by default + If True, scale the data to unit variance (or equivalently, + unit standard deviation). + + copy : boolean, optional, default True + If False, try to avoid a copy and do inplace scaling instead. + This is not guaranteed to always work inplace; e.g. if the data is + not a NumPy array or scipy.sparse CSR matrix, a copy may still be + returned. + + Attributes + ---------- + mean_ : array of floats with shape [n_features] + The mean value for each feature in the training set. + + std_ : array of floats with shape [n_features] + The standard deviation for each feature in the training set. + + See also + -------- + :func:`sklearn.preprocessing.scale` to perform centering and + scaling without using the ``Transformer`` object oriented API + + :class:`sklearn.decomposition.RandomizedPCA` with `whiten=True` + to further remove the linear correlation across features. + """ + + + def __init__(self, copy=True, with_mean=True, with_std=True, + center_sparse=True): + self.with_mean = with_mean + self.with_std = with_std + self.copy = copy + self.center_sparse = center_sparse + + def fit(self, X, y=None): + """Don't trust the documentation of this module! + + Compute the mean and std to be used for later scaling. + + Parameters + ---------- + X : array-like or CSR matrix with shape [n_samples, n_features] + The data used to compute the mean and standard deviation + used for later scaling along the features axis. + """ + X = check_array(X, copy=self.copy, accept_sparse="csc", + ensure_2d=False) + if warn_if_not_float(X, estimator=self): + # Costly conversion, but otherwise the pipeline will break: + # https://github.com/scikit-learn/scikit-learn/issues/1709 + X = X.astype(np.float32) + if sparse.issparse(X): + if self.center_sparse: + means = [] + vars = [] + + # This only works for csc matrices... + for i in range(X.shape[1]): + if X.indptr[i] == X.indptr[i + 1]: + means.append(0) + vars.append(1) + else: + vars.append( + X.data[X.indptr[i]:X.indptr[i + 1]].var()) + # If the variance is 0, set all occurences of this + # features to 1 + means.append( + X.data[X.indptr[i]:X.indptr[i + 1]].mean()) + if 0.0000001 >= vars[-1] >= -0.0000001: + means[-1] -= 1 + + self.std_ = np.sqrt(np.array(vars)) + self.std_[np.array(vars) == 0.0] = 1.0 + self.mean_ = np.array(means) + + return self + elif self.with_mean: + raise ValueError( + "Cannot center sparse matrices: pass `with_mean=False` " + "instead. See docstring for motivation and alternatives.") + else: + self.mean_ = None + + if self.with_std: + var = mean_variance_axis(X, axis=0)[1] + self.std_ = np.sqrt(var) + self.std_[var == 0.0] = 1.0 + else: + self.std_ = None + return self + else: + self.mean_, self.std_ = _mean_and_std( + X, axis=0, with_mean=self.with_mean, with_std=self.with_std) + return self + + def transform(self, X, y=None, copy=None): + """Perform standardization by centering and scaling + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + The data used to scale along the features axis. + """ + check_is_fitted(self, 'std_') + + copy = copy if copy is not None else self.copy + X = check_array(X, copy=copy, accept_sparse="csc", ensure_2d=False) + if warn_if_not_float(X, estimator=self): + X = X.astype(np.float) + if sparse.issparse(X): + if self.center_sparse: + for i in range(X.shape[1]): + X.data[X.indptr[i]:X.indptr[i + 1]] -= self.mean_[i] + + elif self.with_mean: + raise ValueError( + "Cannot center sparse matrices: pass `with_mean=False` " + "instead. See docstring for motivation and alternatives.") + + else: + pass + + if self.std_ is not None: + inplace_column_scale(X, 1 / self.std_) + else: + if self.with_mean: + X -= self.mean_ + if self.with_std: + X /= self.std_ + return X + + + def inverse_transform(self, X, copy=None): + """Scale back the data to the original representation + + Parameters + ---------- + X : array-like with shape [n_samples, n_features] + The data used to scale along the features axis. + """ + check_is_fitted(self, 'std_') + + copy = copy if copy is not None else self.copy + if sparse.issparse(X): + if self.with_mean: + raise ValueError( + "Cannot uncenter sparse matrices: pass `with_mean=False` " + "instead See docstring for motivation and alternatives.") + if not sparse.isspmatrix_csr(X): + X = X.tocsr() + copy = False + if copy: + X = X.copy() + if self.std_ is not None: + inplace_column_scale(X, self.std_) + else: + X = np.asarray(X) + if copy: + X = X.copy() + if self.with_std: + X *= self.std_ + if self.with_mean: + X += self.mean_ + return X diff --git a/autosklearn/pipeline/implementations/__init__.py b/autosklearn/pipeline/implementations/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/autosklearn/pipeline/implementations/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/autosklearn/pipeline/implementations/gem.py b/autosklearn/pipeline/implementations/gem.py new file mode 100644 index 0000000000..96d1b3e488 --- /dev/null +++ b/autosklearn/pipeline/implementations/gem.py @@ -0,0 +1,47 @@ +import numpy as np +from scipy.sparse.linalg import eigs + + +class GEM(object): + + + def __init__(self, N, precond): + self.N = N + self.precond = precond + self.W = None + self.verbose = False + + + def fit(self, X, Y): + print(X.shape, Y.shape) + self.N = min(self.N, X.shape[1]-2) + y_max = int(np.max(Y) + 1) + self.W = np.zeros((X.shape[1], self.N*y_max*(y_max-1)), dtype=X.dtype) + off = 0 + for i in range(y_max): + Xi = X[Y == i] + covi = np.dot(Xi.T, Xi) + covi /= np.float32(Xi.shape[0]) + for j in range(y_max): + if j == i: + continue + if self.verbose: + print("Finding eigenvectors for pair ({}/{})".format(i,j)) + Xj = X[Y == j] + covj = np.dot(Xj.T, Xj) / np.float32(Xj.shape[0]) + E = np.linalg.pinv(np.linalg.cholesky(covj + np.eye(covj.shape[0]) * self.precond).T) + C = np.dot(np.dot(E.T, covi), E) + C2 = 0.5 * (C + C.T) + S,U = eigs(C2, self.N) + gev = np.dot(E, U[:, :self.N]) + self.W[:, off:off+self.N] = gev + off += self.N + print("DONE") + return self + + + def transform(self, X, Y=None): + features = np.maximum(np.dot(X, self.W), 0) + return features + + diff --git a/autosklearn/pipeline/implementations/util.py b/autosklearn/pipeline/implementations/util.py new file mode 100644 index 0000000000..555fe3d323 --- /dev/null +++ b/autosklearn/pipeline/implementations/util.py @@ -0,0 +1,17 @@ +import numpy as np + + +def softmax(df): + if len(df.shape) == 1: + df[df > 20] = 20 + df[df < -20] = -20 + ppositive = 1 / (1 + np.exp(-df)) + ppositive[ppositive > 0.999999] = 1 + ppositive[ppositive < 0.0000001] = 0 + return np.transpose(np.array((1 - ppositive, ppositive))) + else: + # Compute the Softmax like it is described here: + # http://www.iro.umontreal.ca/~bengioy/dlbook/numerical.html + tmp = df - np.max(df, axis=1).reshape((-1, 1)) + tmp = np.exp(tmp) + return tmp / np.sum(tmp, axis=1).reshape((-1, 1)) \ No newline at end of file diff --git a/autosklearn/pipeline/regression.py b/autosklearn/pipeline/regression.py new file mode 100644 index 0000000000..492a706629 --- /dev/null +++ b/autosklearn/pipeline/regression.py @@ -0,0 +1,240 @@ +from collections import OrderedDict +import copy +from itertools import product + +from sklearn.base import RegressorMixin + +from HPOlibConfigSpace.forbidden import ForbiddenEqualsClause, ForbiddenAndConjunction +from HPOlibConfigSpace.configuration_space import ConfigurationSpace + +from autosklearn.pipeline import components as components +from autosklearn.pipeline.base import BasePipeline +from autosklearn.pipeline.constants import SPARSE + + +class SimpleRegressionPipeline(RegressorMixin, BasePipeline): + """This class implements the regression task. + + It implements a pipeline, which includes one preprocessing step and one + regression algorithm. It can render a search space including all known + regression and preprocessing algorithms. + + Contrary to the sklearn API it is not possible to enumerate the + possible parameters in the __init__ function because we only know the + available regressors at runtime. For this reason the user must + specifiy the parameters by passing an instance of + HPOlibConfigSpace.configuration_space.Configuration. + + Parameters + ---------- + configuration : HPOlibConfigSpace.configuration_space.Configuration + The configuration to evaluate. + + random_state : int, RandomState instance or None, optional (default=None) + If int, random_state is the seed used by the random number generator; + If RandomState instance, random_state is the random number generator; + If None, the random number generator is the RandomState instance + used by `np.random`. + + Attributes + ---------- + _estimator : The underlying scikit-learn regression model. This + variable is assigned after a call to the + :meth:`autosklearn.pipeline.regression.SimpleRegressionPipeline.fit` + method. + + _preprocessor : The underlying scikit-learn preprocessing algorithm. This + variable is only assigned if a preprocessor is specified and + after a call to the + :meth:`autosklearn.pipeline.regression.SimpleRegressionPipeline.fit` + method. + + See also + -------- + + References + ---------- + + Examples + -------- + + """ + + def pre_transform(self, X, Y, fit_params=None, init_params=None): + X, fit_params = super(SimpleRegressionPipeline, self).pre_transform( + X, Y, fit_params=fit_params, init_params=init_params) + self.num_targets = 1 if len(Y.shape) == 1 else Y.shape[1] + return X, fit_params + + @classmethod + def get_available_components(cls, available_comp, data_prop, inc, exc): + components_dict = OrderedDict() + for name in available_comp: + if inc is not None and name not in inc: + continue + elif exc is not None and name in exc: + continue + entry = available_comp[name] + + if not entry.get_properties()['handles_regression']: + continue + components_dict[name] = entry + return components_dict + + @classmethod + def get_hyperparameter_search_space(cls, include=None, exclude=None, + dataset_properties=None): + """Return the configuration space for the CASH problem. + + Parameters + ---------- + include_estimators : list of str + If include_estimators is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + only the SVM use :python:`include_regressors=['svr']`. + Cannot be used together with :python:`exclude_regressors`. + + exclude_estimators : list of str + If exclude_estimators is given, only the regressors specified + are used. Specify them by their module name; e.g., to include + all regressors except the SVM use + :python:`exclude_regressors=['svr']`. + Cannot be used together with :python:`include_regressors`. + + include_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + only the PCA use :python:`include_preprocessors=['pca']`. + Cannot be used together with :python:`exclude_preprocessors`. + + exclude_preprocessors : list of str + If include_preprocessors is given, only the preprocessors specified + are used. Specify them by their module name; e.g., to include + all preprocessors except the PCA use + :python:`exclude_preprocessors=['pca']`. + Cannot be used together with :python:`include_preprocessors`. + + Returns + ------- + cs : HPOlibConfigSpace.configuration_space.Configuration + The configuration space describing the SimpleRegressionClassifier. + """ + cs = ConfigurationSpace() + + if dataset_properties is None or not isinstance(dataset_properties, dict): + dataset_properties = dict() + if not 'target_type' in dataset_properties: + dataset_properties['target_type'] = 'regression' + if dataset_properties['target_type'] != 'regression': + dataset_properties['target_type'] = 'regression' + + if 'sparse' not in dataset_properties: + # This dataset is probaby dense + dataset_properties['sparse'] = False + + pipeline = cls._get_pipeline() + cs = cls._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + regressors = cs.get_hyperparameter('regressor:__choice__').choices + preprocessors = cs.get_hyperparameter('preprocessor:__choice__').choices + available_regressors = pipeline[-1][1].get_available_components( + dataset_properties) + available_preprocessors = pipeline[-2][1].get_available_components( + dataset_properties) + + possible_default_regressor = copy.copy(list( + available_regressors.keys())) + default = cs.get_hyperparameter('regressor:__choice__').default + del possible_default_regressor[ + possible_default_regressor.index(default)] + + # A regressor which can handle sparse data after the densifier + for key in regressors: + if SPARSE in available_regressors[key].get_properties(dataset_properties=None)['input']: + if 'densifier' in preprocessors: + while True: + try: + cs.add_forbidden_clause( + ForbiddenAndConjunction( + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'regressor:__choice__'), key), + ForbiddenEqualsClause( + cs.get_hyperparameter( + 'preprocessor:__choice__'), 'densifier') + )) + break + except ValueError: + # Change the default and try again + try: + default = possible_default_regressor.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'regressor:__choice__').default = default + + # which would take too long + # Combinations of tree-based models with feature learning: + regressors_ = ["adaboost", "decision_tree", "extra_trees", + "gaussian_process", "gradient_boosting", + "k_nearest_neighbors", "random_forest"] + feature_learning_ = ["kitchen_sinks", "kernel_pca", "nystroem_sampler"] + + for r, f in product(regressors_, feature_learning_): + if r not in regressors: + continue + if f not in preprocessors: + continue + while True: + try: + cs.add_forbidden_clause(ForbiddenAndConjunction( + ForbiddenEqualsClause(cs.get_hyperparameter( + "regressor:__choice__"), r), + ForbiddenEqualsClause(cs.get_hyperparameter( + "preprocessor:__choice__"), f))) + break + except KeyError: + break + except ValueError: + # Change the default and try again + try: + default = possible_default_regressor.pop() + except IndexError: + raise ValueError( + "Cannot find a legal default configuration.") + cs.get_hyperparameter( + 'regressor:__choice__').default = default + + return cs + + @staticmethod + def _get_estimator_components(): + return components.regression_components._regressors + + @classmethod + def _get_pipeline(cls): + steps = [] + + # Add the always active preprocessing components + steps.extend( + [["one_hot_encoding", + components.data_preprocessing._preprocessors['one_hot_encoding']], + ["imputation", + components.data_preprocessing._preprocessors['imputation']], + ["rescaling", + components.data_preprocessing._preprocessors['rescaling']]]) + + # Add the preprocessing component + steps.append(['preprocessor', + components.feature_preprocessing._preprocessors[ + 'preprocessor']]) + + # Add the classification component + steps.append(['regressor', + components.regression_components._regressors['regressor']]) + return steps + + def _get_estimator_hyperparameter_name(self): + return "regressor" diff --git a/autosklearn/pipeline/util.py b/autosklearn/pipeline/util.py new file mode 100644 index 0000000000..fcc3e01ce9 --- /dev/null +++ b/autosklearn/pipeline/util.py @@ -0,0 +1,239 @@ +import importlib +import inspect +import os +import pkgutil +import unittest + +import numpy as np +import scipy.sparse +import sklearn +import sklearn.base +import sklearn.datasets + + +def find_sklearn_classes(class_): + classifiers = set() + all_subdirectories = [] + sklearn_path = sklearn.__path__[0] + for root, dirs, files in os.walk(sklearn_path): + all_subdirectories.append(root) + + for module_loader, module_name, ispkg in \ + pkgutil.iter_modules(all_subdirectories): + + # Work around some issues... + if module_name in ["hmm", "mixture"]: + print("Skipping %s" % module_name) + continue + + module_file = module_loader.__dict__["path"] + sklearn_module = module_file.replace(sklearn_path, "").replace("/", ".") + full_module_name = "sklearn" + sklearn_module + "." + module_name + + pkg = importlib.import_module(full_module_name) + + for member_name, obj in inspect.getmembers(pkg): + if inspect.isclass(obj) and \ + issubclass(obj, class_): + classifier = obj + # print member_name, obj + classifiers.add(classifier) + + print() + for classifier in sorted([str(cls) for cls in classifiers]): + print(classifier) + + +def get_dataset(dataset='iris', make_sparse=False, add_NaNs=False, + train_size_maximum=150): + iris = getattr(sklearn.datasets, "load_%s" % dataset)() + X = iris.data.astype(np.float32) + Y = iris.target + rs = np.random.RandomState(42) + indices = np.arange(X.shape[0]) + train_size = min(int(len(indices) / 3. * 2.), train_size_maximum) + rs.shuffle(indices) + X = X[indices] + Y = Y[indices] + X_train = X[:train_size] + Y_train = Y[:train_size] + X_test = X[train_size:] + Y_test = Y[train_size:] + + if add_NaNs: + mask = rs.choice([True, False], size=(X_train.shape)) + X_train[mask] = np.NaN + + if make_sparse: + X_train[:,0] = 0 + X_train[rs.random_sample(X_train.shape) > 0.5] = 0 + X_train = scipy.sparse.csc_matrix(X_train) + X_train.eliminate_zeros() + X_test[:,0] = 0 + X_test[rs.random_sample(X_test.shape) > 0.5] = 0 + X_test = scipy.sparse.csc_matrix(X_test) + X_test.eliminate_zeros() + + return X_train, Y_train, X_test, Y_test + + +def _test_classifier(classifier, dataset='iris', sparse=False, + train_size_maximum=150): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse, + train_size_maximum=train_size_maximum) + configuration_space = classifier.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) + default = configuration_space.get_default_configuration() + classifier = classifier(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + return predictions, Y_test + + +def _test_classifier_iterative_fit(classifier, dataset='iris', sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse) + configuration_space = classifier.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) + default = configuration_space.get_default_configuration() + classifier = classifier(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + while not classifier.configuration_fully_fitted(): + predictor = classifier.iterative_fit(X_train, Y_train) + predictions = predictor.predict(X_test) + return predictions, Y_test + + +def _test_classifier_predict_proba(classifier, dataset='iris', sparse=False, + train_size_maximum=150): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse, + train_size_maximum=train_size_maximum) + configuration_space = classifier.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + classifier = classifier(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict_proba(X_test) + return predictions, Y_test + + +def _test_preprocessing(Preprocessor, dataset='iris', make_sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=make_sparse) + original_X_train = X_train.copy() + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + return transformer.transform(X_train), original_X_train + + +class PreprocessingTestCase(unittest.TestCase): + def _test_preprocessing_dtype(self, Preprocessor, add_NaNs=False, + test_sparse=True, dataset='iris'): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset(dataset, add_NaNs=add_NaNs) + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset(dataset, add_NaNs=add_NaNs) + X_train = X_train.astype(np.float64) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + if test_sparse is True: + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset(dataset, make_sparse=True, + add_NaNs=add_NaNs) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name + in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset(dataset, + make_sparse=True, + add_NaNs=add_NaNs) + X_train = X_train.astype(np.float64) + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name + in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + +def _test_regressor(Regressor, dataset='diabetes', sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse) + configuration_space = Regressor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + regressor = Regressor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + # Dumb incomplete hacky test to check that we do not alter the data + X_train_hash = hash(str(X_train)) + X_test_hash = hash(str(X_test)) + Y_train_hash = hash(str(Y_train)) + predictor = regressor.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + if X_train_hash != hash(str(X_train)) or \ + X_test_hash != hash(str(X_test)) or \ + Y_train_hash != hash(str(Y_train)): + raise ValueError("Model modified data") + return predictions, Y_test + + +def _test_regressor_iterative_fit(Regressor, dataset='diabetes', sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=sparse) + configuration_space = Regressor.get_hyperparameter_search_space( + dataset_properties={'sparse': sparse}) + default = configuration_space.get_default_configuration() + regressor = Regressor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + while not regressor.configuration_fully_fitted(): + regressor = regressor.iterative_fit(X_train, Y_train) + predictions = regressor.predict(X_test) + return predictions, Y_test + + +if __name__ == "__main__": + find_sklearn_classes(sklearn.base.ClassifierMixin) + find_sklearn_classes(sklearn.base.RegressorMixin) + find_sklearn_classes(sklearn.base.TransformerMixin) diff --git a/autosklearn/util/backend.py b/autosklearn/util/backend.py index 4bd9456682..585eb4385f 100644 --- a/autosklearn/util/backend.py +++ b/autosklearn/util/backend.py @@ -161,7 +161,7 @@ def load_all_models(self, seed): basename = os.path.basename(model_file) automl_seed = int(basename.split('.')[0]) idx = int(basename.split('.')[1]) - with open(os.path.join(model_directory, model_file), 'rb') as fh: + with open(os.path.join(model_directory, basename), 'rb') as fh: models[(automl_seed, idx)] = (pickle.load(fh)) return models diff --git a/autosklearn/util/paramsklearn.py b/autosklearn/util/pipeline.py similarity index 80% rename from autosklearn/util/paramsklearn.py rename to autosklearn/util/pipeline.py index 46227750e8..9f3f3be67c 100755 --- a/autosklearn/util/paramsklearn.py +++ b/autosklearn/util/pipeline.py @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- from autosklearn.constants import * -from ParamSklearn.classification import ParamSklearnClassifier -from ParamSklearn.regression import ParamSklearnRegressor +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.regression import SimpleRegressionPipeline __all__ = [ @@ -31,7 +31,7 @@ def _get_regression_configuration_space(info, include): sparse = False if info['is_sparse'] == 1: sparse = True - configuration_space = ParamSklearnRegressor. \ + configuration_space = SimpleRegressionPipeline. \ get_hyperparameter_search_space(include=include, dataset_properties={'sparse': sparse}) return configuration_space @@ -62,20 +62,20 @@ def _get_classification_configuration_space(info, include): 'sparse': sparse } - return ParamSklearnClassifier.get_hyperparameter_search_space( + return SimpleClassificationPipeline.get_hyperparameter_search_space( dataset_properties=dataset_properties, include=include) def get_model(configuration, seed): if 'classifier' in configuration: - return ParamSklearnClassifier(configuration, seed) + return SimpleClassificationPipeline(configuration, seed) elif 'regressor' in configuration: - return ParamSklearnRegressor(configuration, seed) + return SimpleRegressionPipeline(configuration, seed) def get_class(info): if info['task'] in REGRESSION_TASKS: - return ParamSklearnRegressor + return SimpleRegressionPipeline else: - return ParamSklearnClassifier + return SimpleClassificationPipeline diff --git a/autosklearn/util/submit_process.py b/autosklearn/util/submit_process.py index f4a3dbe74b..dbffd7b1b8 100644 --- a/autosklearn/util/submit_process.py +++ b/autosklearn/util/submit_process.py @@ -4,9 +4,8 @@ import shlex import subprocess -import lockfile +import psutil -import autosklearn from autosklearn.constants import * from autosklearn.util import logging_ as logging @@ -35,7 +34,9 @@ def submit_call(call, seed, logger, log_dir=None): logger.critical('Problem starting subprocess, see error message ' 'above. PATH is %s' % os.environ['PATH']) - return proc + pid = proc.pid + process = psutil.Process(pid) + return process def run_ensemble_builder(tmp_dir, dataset_name, task_type, metric, limit, diff --git a/make.bat b/make.bat new file mode 100644 index 0000000000..10e0b3175a --- /dev/null +++ b/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source +set I18NSPHINXOPTS=%SPHINXOPTS% source +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\ParamSklearn.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\ParamSklearn.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/misc/classifiers.csv b/misc/classifiers.csv new file mode 100644 index 0000000000..aad2610b00 --- /dev/null +++ b/misc/classifiers.csv @@ -0,0 +1,45 @@ +class,added,comment +,False,Mixin class which adds no functionality except the score function +,, +,False,Outlier detection +,False,Please read the module name +,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. +,True, +,False,Scikit-learn source code says: This class should not be used directly +,True, +,True, +,True,Added with decision stumps(up to depth ten) +,True, +,False,Mixin but no full model +,False,Is implemented using LibLinear +,, +,True,I don't know how similar to SGD this one is +,False,n fact, Perceptron() is equivalent to SGDClassifier(...) +,True, +,False,Contains CV +,False,This class has abstract methods +,True, +,False,This classifier is in a test module +,False,This classifier is in a test module +,False,Is a meta-estimator +,False,Is a meta-estimator +,False,Is a meta-estimator +,False,Abstract base class for naive Bayes estimators +,False,Abstract base class for naive Bayes on discrete/categorical data +,True, +,True, +,True, +,True, +,False,Has no predict_proba method, method cannot be easily added +,False,Has no predict_proba method, method cannot be easily added +,True, +,False,semi-supervised learning +,False,semi-supervised learning +,False,semi-supervised learning +,False,ABC for LibSVM-based classifiers +,True, +,False,Equivalent to SVC +,True, +,False,This classifier is in a test module +,True, +,FALSE,Extra-trees should only be used within ensemble methods. diff --git a/misc/create_hyperparameter_table.py b/misc/create_hyperparameter_table.py new file mode 100644 index 0000000000..6e95289804 --- /dev/null +++ b/misc/create_hyperparameter_table.py @@ -0,0 +1,223 @@ +from argparse import ArgumentParser +from collections import OrderedDict +import os +import shlex +import subprocess + +import HPOlibConfigSpace.hyperparameters +import autosklearn.pipeline.classification +import autosklearn.pipeline.regression + +# Some macros +COND = "conditional" +CAT = "categorical" +CONT = "continuous" +CONST = "constant" +UN = "unparameterized" + +template_string = \ +""" +\documentclass{article} %% For LaTeX2 +\usepackage[a4paper, left=5mm, right=5mm, top=5mm, bottom=5mm]{geometry} + +%%\\usepackage[landscape]{geometry} +\\usepackage{multirow} %% import command \multicolmun +\\usepackage{tabularx} %% Convenient table formatting +\\usepackage{booktabs} %% provides \\toprule, \midrule and \\bottomrule + +\\begin{document} + +%s + +\\end{document} +""" + +caption_str = "Number of Hyperparameters for each possible %s " \ + "for a dataset with these properties: %s" + +table_str = \ +""" +\\begin{table}[t!] +\\centering +\\scriptsize +\\caption{ %s } +\\begin{tabularx}{\\textwidth}{ X X X X X X } +\\toprule +name & \#$\lambda$ & cat (cond) & cont (cond) & const & un \\\\ +\\toprule +\\\\ +%s +\\\\ +\\toprule +\\bottomrule +\\end{tabularx} +\\end{table} +""" + + +def get_dict(task_type="classifier", **kwargs): + assert task_type in ("classifier", "regressor") + + if task_type == "classifier": + cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ + .get_hyperparameter_search_space(dataset_properties=kwargs) + elif task_type == "regressor": + cs = autosklearn.pipeline.regression.SimpleRegressionPipeline\ + .get_hyperparameter_search_space(dataset_properties=kwargs) + else: + raise ValueError("'task_type' is not in ('classifier', 'regressor')") + + preprocessor = None + estimator = None + + for h in cs.get_hyperparameters(): + if h.name == "preprocessor:__choice__": + preprocessor = h + elif h.name == (task_type + ':__choice__'): + estimator = h + + if estimator is None: + raise ValueError("No classifier found") + elif preprocessor is None: + raise ValueError("No preprocessor found") + + estimator_dict = OrderedDict() + for i in sorted(estimator.choices): + estimator_dict[i] = OrderedDict() + estimator_dict[i][COND] = OrderedDict() + for t in (CAT, CONT, CONST): + estimator_dict[i][t] = 0 + estimator_dict[i][COND][t] = 0 + estimator_dict[i][UN] = 0 + + preprocessor_dict = OrderedDict() + for i in sorted(preprocessor.choices): + preprocessor_dict[i] = OrderedDict() + preprocessor_dict[i][COND] = OrderedDict() + for t in (CAT, CONT, CONST): + preprocessor_dict[i][t] = 0 + preprocessor_dict[i][COND][t] = 0 + preprocessor_dict[i][UN] = 0 + + for h in cs.get_hyperparameters(): + if h.name == "preprocessor:__choice__" or \ + h.name == (task_type + ':__choice__'): + continue + # walk over both dicts + for d in (estimator_dict, preprocessor_dict): + est = h.name.split(":")[1] + if est not in d: + continue + if isinstance(h, HPOlibConfigSpace.hyperparameters.UniformIntegerHyperparameter): + d[est][CONT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.UniformFloatHyperparameter): + d[est][CONT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.CategoricalHyperparameter): + d[est][CAT] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.Constant): + d[est][CONST] += 1 + elif isinstance(h, HPOlibConfigSpace.hyperparameters.UnParametrizedHyperparameter): + d[est][UN] += 1 + else: + raise ValueError("Don't know that type: %s" % type(h)) + + for h in cs.get_conditions(): + if h.parent.name == (task_type + ':__choice__') or h.parent.name == \ + "preprocessor:__choice__": + # ignore this condition + # print "IGNORE", h + continue + + # walk over both dicts and collect hyperparams + for d in (estimator_dict, preprocessor_dict): + est = h.child.name.split(":")[1] + if est not in d: + #print "Could not find %s" % est + continue + + #print "####" + #print vars(h) + #print h.parent + #print type(h) + if isinstance(h.child, HPOlibConfigSpace.hyperparameters.UniformIntegerHyperparameter): + d[est][COND][CONT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.UniformFloatHyperparameter): + d[est][COND][CONT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.CategoricalHyperparameter): + d[est][COND][CAT] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.Constant): + d[est][COND][CONST] += 1 + elif isinstance(h.child, HPOlibConfigSpace.hyperparameters.UnParametrizedHyperparameter): + d[est][COND][UN] += 1 + else: + raise ValueError("Don't know that type: %s" % type(h)) + print(preprocessor_dict) + return (estimator_dict, preprocessor_dict) + + +def build_table(d): + lines = list() + for est in d.keys(): + sum_ = 0 + t_list = list([est.replace("_", " "), ]) + for t in (CAT, CONT): + sum_ += d[est][t] + t_list.append("%d (%d)" % (d[est][t], d[est][COND][t])) + t_list.append("%d" % d[est][CONST]) + t_list.append("%d" % d[est][UN]) + sum_ += d[est][CONST] + d[est][UN] + t_list.insert(1, "%d" % sum_) + lines.append(" & ".join(t_list)) + return "\\\\ \n".join(lines) + + +def main(): + parser = ArgumentParser() + + # General Options + parser.add_argument("-s", "--save", dest="save", default=None, + help="Where to save plot instead of showing it?") + parser.add_argument("-t", "--type", dest="task_type", default="classifier", + choices=("classifier", ), help="Type of dataset") + parser.add_argument("--sparse", dest="sparse", default=False, + action="store_true", help="dataset property") + prop = parser.add_mutually_exclusive_group(required=True) + prop.add_argument("--multilabel", dest="multilabel", default=False, + action="store_true", help="dataset property") + prop.add_argument("--multiclass", dest="multiclass", default=False, + action="store_true", help="dataset property") + prop.add_argument("--binary", dest="binary", default=False, + action="store_true", help="dataset property") + + args, unknown = parser.parse_known_args() + + props = {"sparse": args.sparse, + "multilabel": args.multilabel, + "multiclass": args.multiclass} + est_dict, preproc_dict = get_dict(task_type=args.task_type, **props) + + est_table = build_table(est_dict) + preproc_table = build_table(preproc_dict) + + est_table = table_str % (caption_str % (args.task_type, str(props)), est_table) + preproc_table = table_str % (caption_str % ("preprocessor", str(props)), preproc_table) + + tex_doc = template_string % "\n".join([est_table, preproc_table]) + if args.save is None: + print(tex_doc) + else: + fh = open(args.save, "w") + fh.write(tex_doc) + fh.close() + proc = subprocess.Popen(shlex.split('pdflatex %s' % args.save)) + proc.communicate() + try: + os.remove(args.save.replace(".tex", ".aux")) + os.remove(args.save.replace(".tex", ".log")) + except OSError: + # This is fine + pass + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/misc/regressors.csv b/misc/regressors.csv new file mode 100644 index 0000000000..83a162e65c --- /dev/null +++ b/misc/regressors.csv @@ -0,0 +1,50 @@ +class,added,comment +,False,BaseClass +,, +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,Is a preprocessing method +,False,See module name +,False,This can blow up the configuration space; because we need to define a configured base object. Maybe consider later. +,True, +,False,Is a base class +,True, +,True, +,True, +,True,Crashes when getting two similar inputs +,False,Calibration instead of prediction method +,False,Add +,False,No +,False,Wait for Tobias' feedback +,False,Wait for Tobias' feedback +,False,Wait for Tobias' feedback +,False, +,False,Wait for Tobias' feedback +,False,We want to perform CV ourselves +,False,MultiTask +,False,We want to perform CV ourselves +,False,MultiTask +,False,MultiTask +,,No +,False,We want to perform CV ourselves +,,No +,False,We want to perform CV ourselves +,False,We want to perform CV ourselves +,, +,False,We want to perform CV ourselves +,, +,,no +,True, +,False,We want to perform CV ourselves +,False, +,True, +,FALSE,This regressor is inside a test module +,, +,True, +,False,Can crash when there is no neighbour within the radius +,True, +,False,Mathematical idental to SVR +,True, +,TRUE, +,FALSE,Not to be used as a base regressor diff --git a/misc/support_for_imbalanced_classes.txt b/misc/support_for_imbalanced_classes.txt new file mode 100644 index 0000000000..e69229c5a0 --- /dev/null +++ b/misc/support_for_imbalanced_classes.txt @@ -0,0 +1,22 @@ +AdaBoost: Sample weights. If None, the sample weights are initialized to 1 / n_samples. +Bernoulli_NB: Weights applied to individual samples (1. for unweighted). +DecisionTree: Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. +ExtraTrees: Sample weights. If None, then samples are equally weighted. Splits that would create child nodes with net zero or negative weight are ignored while searching for a split in each node. In the case of classification, splits are also ignored if they would result in any single class carrying a negative weight in either child node. +GaussianNB: - +GB: - +kNN: - +LDA: priors : array, optional, shape = [n_classes] ? +LibLinear: class_weight : {dict, ‘auto’}, optional +SVC: class_weight : {dict, ‘auto’}, optional; Per-sample weights. Rescale C per sample. Higher weights force the classifier to put more emphasis on these points. +MultinomialNB: - +PA: sample_weight : array-like, shape = [n_samples], optional +QDA: - +RF: sample_weight : array-like, shape = [n_samples] or None +RidgeClassifier:class_weight : dict, optional +SGD :class_weight : dict, {class_label + + + + +Preprocessors: + diff --git a/misc/transformers.csv b/misc/transformers.csv new file mode 100644 index 0000000000..422bd67484 --- /dev/null +++ b/misc/transformers.csv @@ -0,0 +1,99 @@ +class,added,comment +,FALSE,BaseClass +,FALSE,Mixin class for feature agglomeration. +,, +,TRUE,try out +,FALSE,deprecated +,FALSE,We expect this to not work better than PCA +,False,BaseClass +,FALSE,works only if there are known correlated data points +,FALSE,Base class; https://www.stat.washington.edu/research/reports/2000/tr371.pdf +,FALSE,Regression only +,FALSE,Regression only +,FALSE,Regression only +,, +,TRUE,try out +,FALSE, +,FALSE,What is the difference to Tobis implementation? +,FALSE,Mixin class for sparse coding +,FALSE, +,TRUE,try out +,, +,TRUE,try out +,FALSE,Special case of sparse coding +,FALSE,see above +,TRUE, +,FALSE,same output as above +,FALSE,? +,FALSE,? +,FALSE,should not use according to the scikit-learn docs +,True, +,FALSE,Base class +,TRUE,try out +,FALSE,Only classification so far +,FALSE,Base class +,FALSE,Base class +,FALSE,use ExtraTreesClassifier +,FALSE,Only classification so far +,True, +,FALSE,Base class +,FALSE,use ExtraTreesClassifier +,FALSE,use ExtraTreesClassifier +,FALSE,Similar to 1HotEncoding +,FALSE,Useful when working with strings +,FALSE,no text classification atm +,FALSE,subclass of TransformerMixin +,FALSE,subclass of TransformerMixin +,FALSE,"Metaclass, can blow up the configuration space" +,FALSE,"Metaclass, with cross validation" +,FALSE,lives in the test module +,FALSE,Base class +,FALSE,Base class +,TRUE,try out +,TRUE,try out +,TRUE,try out +,FALSE,as as below but different formulation +,True, +,FALSE,Same as above but different formulation +,FALSE,output transformation +,FALSE,very special case +,TRUE,try out +,True, +,FALSE,very specia case +,FALSE,Special case of GEM +,FALSE,same as LibLinear +,, +,FALSE,same as SGD +,FALSE,Base class +,FALSE,not right now +,FALSE,not right now +,FALSE,not right now +,FALSE,not right now +,FALSE,test class +,FALSE,test class +,FALSE,only look at if clustering helps +,FALSE,only look at if clustering helps +,, +,FALSE,handles only binary input +,FALSE,"Right now, we do not have different feature sources." +,FALSE,"Right now, we have no need to binarize data" +,FALSE,"Right now, we have no need to center a kernel" +,TRUE, +,TRUE, +,TRUE, +,TRUE, +,TRUE, +,TRUE, +,FALSE,"Right now, we have 1HotEncoding" +,FALSE,This should be done before passing data to scikit-learn and thus not configured. +,FALSE,… +,FALSE,Base class +,FALSE,use kitchen sinks instead +,FALSE,use kitchen sinks +,TRUE, +,FALSE,Is in a test package +,FALSE,Base class +,FALSE,Use forests +,FALSE,Use forests +,FALSE,Use forests +,FALSE,Use forests diff --git a/requ.txt b/requ.txt index 8f3d335341..c8a3ddae5f 100644 --- a/requ.txt +++ b/requ.txt @@ -2,7 +2,7 @@ setuptools mock nose -numpy>=0.16.0 +numpy>=1.9.0 scipy>=0.14.1 scikit-learn==0.16.1 @@ -18,4 +18,3 @@ pandas Cython git+https://github.com/automl/HPOlibConfigSpace@master -git+https://github.com/automl/paramsklearn@development diff --git a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py index 90cc65fce5..530f734b98 100644 --- a/scripts/update_metadata/03_autosklearn_retrieve_metadata.py +++ b/scripts/update_metadata/03_autosklearn_retrieve_metadata.py @@ -11,7 +11,7 @@ FloatHyperparameter, CategoricalHyperparameter, Constant from autosklearn.constants import * -from autosklearn.util import paramsklearn +from autosklearn.util import pipeline def retrieve_matadata(validation_directory, metric, configuration_space, @@ -165,9 +165,9 @@ def retrieve_matadata(validation_directory, metric, configuration_space, configuration = Configuration( configuration_space, configuration) except Exception as e: - print "Configuration %s not applicable " \ + print("Configuration %s not applicable " \ "because of %s!" \ - % (row[1], e) + % (row[1], e)) break if str(configuration) in \ @@ -295,7 +295,7 @@ def main(): output_dir_ = os.path.join(output_dir, '%s_%s_%s' % ( metric, TASK_TYPES_TO_STRING[task], 'sparse' if sparse else 'dense')) - configuration_space = paramsklearn.get_configuration_space( + configuration_space = pipeline.get_configuration_space( {'is_sparse': sparse, 'task': task} ) diff --git a/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py b/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py index 95235f8aa5..419bb96877 100644 --- a/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py +++ b/scripts/update_metadata/04_autosklearn_calculate_metafeatures.py @@ -23,12 +23,12 @@ def calculate_metafeatures(dataset, output_dir, cache_only): mf = DatasetMetafeatures.load(fh) elif cache_only is False: - print dataset_name + print(dataset_name) call = "python -m autosklearn.metalearning.calculate_metafeatures " \ "--data-format automl-competition-format --dataset %s " \ "--output-dir %s" % (dataset, output_dir) - print call + print(call) retval = subprocess.call(call, shell=True) if retval != 0: diff --git a/source/api.rst b/source/api.rst new file mode 100644 index 0000000000..23b72523e2 --- /dev/null +++ b/source/api.rst @@ -0,0 +1,21 @@ +:orphan: + +.. _api: + +APIs +**** + +Main modules +============ + +.. autoclass:: ParamSklearn.classification.ParamSklearnClassifier + + +Extension Interfaces +==================== + +.. autoclass:: ParamSklearn.components.classification_base.ParamSklearnClassificationAlgorithm + +.. autoclass:: ParamSklearn.components.regression_base.ParamSklearnRegressionAlgorithm + +.. autoclass:: ParamSklearn.components.preprocessor_base.ParamSklearnPreprocessingAlgorithm diff --git a/source/components.rst b/source/components.rst new file mode 100644 index 0000000000..52b14bc0a0 --- /dev/null +++ b/source/components.rst @@ -0,0 +1,97 @@ +:orphan: + +.. _components: + +Available Components +******************** + +Classification +============== + +A list of all classification algorithms considered in the ParamSklearn search space. + +.. autoclass:: ParamSklearn.components.classification.adaboost.AdaboostClassifier + :members: + +.. autoclass:: ParamSklearn.components.classification.bernoulli_nb.BernoulliNB + :members: + +.. autoclass:: ParamSklearn.components.classification.extra_trees.ExtraTreesClassifier + :members: + +.. autoclass:: ParamSklearn.components.classification.gaussian_nb.GaussianNB + :members: + +.. autoclass:: ParamSklearn.components.classification.gradient_boosting.GradientBoostingClassifier + :members: + +.. autoclass:: ParamSklearn.components.classification.k_nearest_neighbors.KNearestNeighborsClassifier + :members: + +.. autoclass:: ParamSklearn.components.classification.liblinear.LibLinear_SVC + :members: + +.. autoclass:: ParamSklearn.components.classification.libsvm_svc.LibSVM_SVC + :members: + +.. autoclass:: ParamSklearn.components.classification.multinomial_nb.MultinomialNB + :members: + +.. autoclass:: ParamSklearn.components.classification.random_forest.RandomForest + :members: + +.. autoclass:: ParamSklearn.components.classification.sgd.SGD + :members: + +Regression +========== + +A list of all regression algorithms considered in the ParamSklearn search space. + +.. autoclass:: ParamSklearn.components.regression.gaussian_process.GaussianProcess + :members: + +.. autoclass:: ParamSklearn.components.regression.gradient_boosting.GradientBoosting + :members: + +.. autoclass:: ParamSklearn.components.regression.random_forest.RandomForest + :members: + +.. autoclass:: ParamSklearn.components.regression.ridge_regression.RidgeRegression + :members: + + +Preprocessing +============= + +.. autoclass:: ParamSklearn.components.preprocessing.densifier.Densifier + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.imputation.Imputation + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.kitchen_sinks.RandomKitchenSinks + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.no_preprocessing.NoPreprocessing + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.pca.PCA + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.random_trees_embedding.RandomTreesEmbedding + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.rescaling.Rescaling + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.select_percentile_classification.SelectPercentileClassification + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.select_percentile_regression.SelectPercentileRegression + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.sparse_filtering.SparseFiltering + :members: + +.. autoclass:: ParamSklearn.components.preprocessing.truncatedSVD.TruncatedSVD diff --git a/source/conf.py b/source/conf.py index a274af2fad..9381aebdef 100644 --- a/source/conf.py +++ b/source/conf.py @@ -15,7 +15,6 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ @@ -59,13 +58,13 @@ class BaseEstimator(object): 'sklearn.utils', 'psutil','pyyaml','pandas', 'matplotlib', - 'ParamSklearn', - 'ParamSklearn.implementations', - 'ParamSklearn.implementations.OneHotEncoder', - 'ParamSklearn.implementations.Imputation', - 'ParamSklearn.implementations.StandardScaler', - 'ParamSklearn.classification', - 'ParamSklearn.regression', + 'autosklearn.pipeline', + 'autosklearn.pipeline.implementations', + 'autosklearn.pipeline.implementations.OneHotEncoder', + 'autosklearn.pipeline.implementations.Imputation', + 'autosklearn.pipeline.implementations.StandardScaler', + 'autosklearn.pipeline.classification', + 'autosklearn.pipeline.regression', 'HPOlibConfigSpace', 'HPOlibConfigSpace.converters', 'HPOlibConfigSpace.configuration_space'] diff --git a/source/extending_ParamSklearn.rst b/source/extending_ParamSklearn.rst new file mode 100644 index 0000000000..4b1123bf49 --- /dev/null +++ b/source/extending_ParamSklearn.rst @@ -0,0 +1,4 @@ +Extending ParamSklearn +********************** + +.. automodule:: ParamSklearn.components diff --git a/source/index.rst b/source/index.rst index 4defb25999..5dec85da48 100644 --- a/source/index.rst +++ b/source/index.rst @@ -55,11 +55,9 @@ with Ubuntu. It should run on other Linux distributions, but won't work on a MAC or on a windows PC. It requires scikit-learn 0.16.1, which in turn requires numpy and scipy. -*auto-sklearn* has several dependencies, which are not yet automatically -resolved: +*auto-sklearn* has a dependency, which are not yet automatically resolved: * `HPOlibConfigSpace `_ -* `ParamSklearn `_ Please install these manually with: diff --git a/source/installation.rst b/source/installation.rst new file mode 100644 index 0000000000..9c8eaa0d42 --- /dev/null +++ b/source/installation.rst @@ -0,0 +1,4 @@ +Install ParamSklearn +******************** + +Please see the file `README.md`. \ No newline at end of file diff --git a/source/introduction.rst b/source/introduction.rst new file mode 100644 index 0000000000..43a62256ad --- /dev/null +++ b/source/introduction.rst @@ -0,0 +1,40 @@ +Introduction to ParamSklearn +**************************** + +What is ParamSklearn? +===================== + +.. automodule:: ParamSklearn + +Get involved +============ + +License +======= +We chose to license ParamSklearn the same way as scikit-learn. It is available under the open source and commercially usable 3-clause BSD license. + +Copyright (c) 2014, Matthias Feurer +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +* Neither the name of the University of Freiburg, nor the + names of its contributors may be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/test/automl/base.py b/test/automl/base.py index 924ac5d8b1..26579b9956 100644 --- a/test/automl/base.py +++ b/test/automl/base.py @@ -7,6 +7,9 @@ class Base(unittest.TestCase): + """All tests which are a subclass of this must define their own output + directory and call self._setUp.""" + def setUp(self): self.test_dir = os.path.dirname(__file__) diff --git a/test/automl/test_estimators.py b/test/automl/test_estimators.py index fdf5f1fe5f..fdd4ec07ff 100644 --- a/test/automl/test_estimators.py +++ b/test/automl/test_estimators.py @@ -5,7 +5,7 @@ import unittest import numpy as np -import ParamSklearn.util as putil +import autosklearn.pipeline.util as putil from autosklearn.classification import AutoSklearnClassifier from autosklearn.util.backend import Backend diff --git a/test/automl/test_start_automl.py b/test/automl/test_start_automl.py index ada9a21faf..2cd4765be8 100644 --- a/test/automl/test_start_automl.py +++ b/test/automl/test_start_automl.py @@ -6,13 +6,12 @@ import sys import time -import mock import numpy as np import six +import sklearn.datasets import autosklearn.automl -from autosklearn.util import Backend -import ParamSklearn.util as putil +import autosklearn.pipeline.util as putil from autosklearn.constants import * from autosklearn.cli.base_interface import store_and_or_load_data @@ -40,11 +39,40 @@ def test_fit(self): del automl self._tearDown(output) + def test_binary_score(self): + """ + Test fix for binary classification prediction + taking the index 1 of second dimension in prediction matrix + """ + if self.travis: + self.skipTest('This test does currently not run on travis-ci. ' + 'Make sure it runs locally on your machine!') + + output = os.path.join(self.test_dir, '..', '.tmp_test_binary_score') + self._setUp(output) + + data = sklearn.datasets.make_classification( + n_samples=1000, n_features=20, n_redundant=5, n_informative=5, + n_repeated=2, n_clusters_per_class=2, random_state=1) + X_train = data[0][:700] + Y_train = data[1][:700] + X_test = data[0][700:] + Y_test = data[1][700:] + + automl = autosklearn.automl.AutoML(output, output, 15, 15) + automl.fit(X_train, Y_train, task=BINARY_CLASSIFICATION) + self.assertEqual(automl._task, BINARY_CLASSIFICATION) + + score = automl.score(X_test, Y_test) + self.assertGreaterEqual(score, 0.5) + + del automl + self._tearDown(output) + def test_automl_outputs(self): output = os.path.join(self.test_dir, '..', '.tmp_test_automl_outputs') self._setUp(output) - name = '31_bac' dataset = os.path.join(self.test_dir, '..', '.data', name) data_manager_file = os.path.join(output, '.auto-sklearn', diff --git a/test/evaluation/test_cv_evaluator.py b/test/evaluation/test_cv_evaluator.py index ff73189014..460bba593b 100644 --- a/test/evaluation/test_cv_evaluator.py +++ b/test/evaluation/test_cv_evaluator.py @@ -11,8 +11,8 @@ from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.cv_evaluator import CVEvaluator -from autosklearn.util.paramsklearn import get_configuration_space -from ParamSklearn.util import get_dataset +from autosklearn.util.pipeline import get_configuration_space +from autosklearn.pipeline.util import get_dataset N_TEST_RUNS = 10 diff --git a/test/evaluation/test_holdout_evaluator.py b/test/evaluation/test_holdout_evaluator.py index 87d2a11b8e..9c184fe766 100644 --- a/test/evaluation/test_holdout_evaluator.py +++ b/test/evaluation/test_holdout_evaluator.py @@ -11,13 +11,13 @@ from numpy.linalg import LinAlgError import sklearn.datasets -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.util import get_dataset from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.holdout_evaluator import HoldoutEvaluator from autosklearn.util.data import convert_to_bin -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space N_TEST_RUNS = 10 diff --git a/test/evaluation/test_nested_cv_evaluator.py b/test/evaluation/test_nested_cv_evaluator.py index 3c9ca5f49c..c06fa8bd3f 100644 --- a/test/evaluation/test_nested_cv_evaluator.py +++ b/test/evaluation/test_nested_cv_evaluator.py @@ -11,8 +11,8 @@ from autosklearn.constants import * from autosklearn.data.competition_data_manager import CompetitionDataManager from autosklearn.evaluation.nested_cv_evaluator import NestedCVEvaluator -from autosklearn.util.paramsklearn import get_configuration_space -from ParamSklearn.util import get_dataset +from autosklearn.util.pipeline import get_configuration_space +from autosklearn.pipeline.util import get_dataset N_TEST_RUNS = 10 diff --git a/test/metalearning/pyMetaLearn/test_meta_base.py b/test/metalearning/pyMetaLearn/test_meta_base.py index 34741b9a52..22868a60cc 100644 --- a/test/metalearning/pyMetaLearn/test_meta_base.py +++ b/test/metalearning/pyMetaLearn/test_meta_base.py @@ -5,7 +5,7 @@ import numpy as np import pandas as pd -import ParamSklearn.classification +import autosklearn.pipeline.classification from autosklearn.metalearning.metalearning.meta_base import MetaBase, Run @@ -18,7 +18,8 @@ def setUp(self): data_dir = os.path.join(data_dir, 'test_meta_base_data') os.chdir(data_dir) - cs = ParamSklearn.classification.ParamSklearnClassifier.get_hyperparameter_search_space() + cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ + .get_hyperparameter_search_space() self.base = MetaBase(cs, data_dir) diff --git a/test/metalearning/pyMetaLearn/test_meta_features.py b/test/metalearning/pyMetaLearn/test_meta_features.py index d5869dee50..8b391b9dbe 100644 --- a/test/metalearning/pyMetaLearn/test_meta_features.py +++ b/test/metalearning/pyMetaLearn/test_meta_features.py @@ -9,8 +9,8 @@ from sklearn.preprocessing.imputation import Imputer from sklearn.datasets import make_multilabel_classification -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler from autosklearn.metalearning.metafeatures.metafeature import MetaFeatureValue import autosklearn.metalearning.metafeatures.metafeatures as meta_features diff --git a/test/metalearning/pyMetaLearn/test_meta_features_sparse.py b/test/metalearning/pyMetaLearn/test_meta_features_sparse.py index 9e99777d51..6ad3bb61d7 100644 --- a/test/metalearning/pyMetaLearn/test_meta_features_sparse.py +++ b/test/metalearning/pyMetaLearn/test_meta_features_sparse.py @@ -11,8 +11,8 @@ from scipy import sparse from sklearn.preprocessing.imputation import Imputer -from ParamSklearn.implementations.OneHotEncoder import OneHotEncoder -from ParamSklearn.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler import autosklearn.metalearning.metafeatures.metafeatures as meta_features import test_meta_features diff --git a/test/metalearning/pyMetaLearn/test_metalearner.py b/test/metalearning/pyMetaLearn/test_metalearner.py index e21abc078c..80bdfd3a65 100644 --- a/test/metalearning/pyMetaLearn/test_metalearner.py +++ b/test/metalearning/pyMetaLearn/test_metalearner.py @@ -9,7 +9,7 @@ import pandas as pd from HPOlibConfigSpace.configuration_space import Configuration -import ParamSklearn.classification +import autosklearn.pipeline.classification import autosklearn.metalearning.optimizers.metalearn_optimizer.metalearner as metalearner @@ -25,7 +25,7 @@ def setUp(self): data_dir = os.path.join(data_dir, 'test_meta_base_data') os.chdir(data_dir) - self.cs = ParamSklearn.classification.ParamSklearnClassifier\ + self.cs = autosklearn.pipeline.classification.SimpleClassificationPipeline\ .get_hyperparameter_search_space() self.meta_optimizer = metalearner.MetaLearningOptimizer( diff --git a/test/metalearning/test_metalearning.py b/test/metalearning/test_metalearning.py index ea537037f7..73a8dd175e 100644 --- a/test/metalearning/test_metalearning.py +++ b/test/metalearning/test_metalearning.py @@ -3,13 +3,13 @@ import unittest -from ParamSklearn.util import get_dataset +from autosklearn.pipeline.util import get_dataset from autosklearn.constants import * from autosklearn.metalearning.mismbo import calc_meta_features, \ calc_meta_features_encoded, \ create_metalearning_string_for_smac_call -from autosklearn.util.paramsklearn import get_configuration_space +from autosklearn.util.pipeline import get_configuration_space class MetafeatureValueDummy(object): diff --git a/test/test_pipeline/__init__.py b/test/test_pipeline/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/test_pipeline/components/__init__.py b/test/test_pipeline/components/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/test/test_pipeline/components/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/test/test_pipeline/components/classification/__init__.py b/test/test_pipeline/components/classification/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/test/test_pipeline/components/classification/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/test/test_pipeline/components/classification/test_adaboost.py b/test/test_pipeline/components/classification/test_adaboost.py new file mode 100644 index 0000000000..4905313498 --- /dev/null +++ b/test/test_pipeline/components/classification/test_adaboost.py @@ -0,0 +1,32 @@ +import unittest + +from autosklearn.pipeline.components.classification.adaboost import \ + AdaboostClassifier +from autosklearn.pipeline.util import _test_classifier + +import sklearn.metrics + + +class AdaBoostComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(AdaboostClassifier) + self.assertAlmostEqual(0.93999999999999995, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_iris_sparse(self): + for i in range(10): + predictions, targets = \ + _test_classifier(AdaboostClassifier, sparse=True) + self.assertAlmostEqual(0.88, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=AdaboostClassifier, + dataset='digits') + self.assertAlmostEqual(0.6915604128718883, + sklearn.metrics.accuracy_score(predictions, targets)) diff --git a/test/test_pipeline/components/classification/test_bernoulli_nb.py b/test/test_pipeline/components/classification/test_bernoulli_nb.py new file mode 100644 index 0000000000..498a40d832 --- /dev/null +++ b/test/test_pipeline/components/classification/test_bernoulli_nb.py @@ -0,0 +1,25 @@ +import unittest + +from autosklearn.pipeline.components.classification.bernoulli_nb import \ + BernoulliNB +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class BernoulliNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(BernoulliNB) + self.assertAlmostEqual(0.26000000000000001, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(BernoulliNB) + self.assertAlmostEqual(0.26000000000000001, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_decision_tree.py b/test/test_pipeline/components/classification/test_decision_tree.py new file mode 100644 index 0000000000..f8083cb17f --- /dev/null +++ b/test/test_pipeline/components/classification/test_decision_tree.py @@ -0,0 +1,30 @@ +import unittest + +from autosklearn.pipeline.components.classification.decision_tree import DecisionTree +from autosklearn.pipeline.util import _test_classifier, _test_classifier_predict_proba + +import sklearn.metrics + + +class DecisionTreetComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(DecisionTree, + dataset='iris') + self.assertAlmostEqual(0.92, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_classifier(DecisionTree, sparse=True) + self.assertAlmostEqual(0.69999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + DecisionTree, dataset='iris') + self.assertAlmostEqual(0.28069887755912964, + sklearn.metrics.log_loss(targets, predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_extra_trees.py b/test/test_pipeline/components/classification/test_extra_trees.py new file mode 100644 index 0000000000..fe926f1926 --- /dev/null +++ b/test/test_pipeline/components/classification/test_extra_trees.py @@ -0,0 +1,32 @@ +import unittest + +from autosklearn.pipeline.components.classification.extra_trees import \ + ExtraTreesClassifier +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class ExtraTreesComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(ExtraTreesClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_classifier(ExtraTreesClassifier, sparse=True) + self.assertAlmostEqual(0.71999999999999997, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(ExtraTreesClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_gaussian_nb.py b/test/test_pipeline/components/classification/test_gaussian_nb.py new file mode 100644 index 0000000000..79d1007724 --- /dev/null +++ b/test/test_pipeline/components/classification/test_gaussian_nb.py @@ -0,0 +1,25 @@ +import unittest + +from autosklearn.pipeline.components.classification.gaussian_nb import \ + GaussianNB +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class GaussianNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(GaussianNB) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(GaussianNB) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_gradient_boosting.py b/test/test_pipeline/components/classification/test_gradient_boosting.py new file mode 100644 index 0000000000..18137a6fa5 --- /dev/null +++ b/test/test_pipeline/components/classification/test_gradient_boosting.py @@ -0,0 +1,24 @@ +import unittest + +from autosklearn.pipeline.components.classification.gradient_boosting import \ + GradientBoostingClassifier +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class GradientBoostingComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(GradientBoostingClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(GradientBoostingClassifier) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_k_nearest_neighbor.py b/test/test_pipeline/components/classification/test_k_nearest_neighbor.py new file mode 100644 index 0000000000..dcc3d57e14 --- /dev/null +++ b/test/test_pipeline/components/classification/test_k_nearest_neighbor.py @@ -0,0 +1,31 @@ +import unittest + +from autosklearn.pipeline.components.classification.k_nearest_neighbors import \ + KNearestNeighborsClassifier +from autosklearn.pipeline.util import _test_classifier, _test_classifier_predict_proba + +import sklearn.metrics + + +class KNearestNeighborsComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(KNearestNeighborsClassifier) + self.assertAlmostEqual(0.959999999999999, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_sparse_data(self): + for i in range(10): + predictions, targets = \ + _test_classifier(KNearestNeighborsClassifier, sparse=True) + self.assertAlmostEqual(0.82, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = \ + _test_classifier_predict_proba(KNearestNeighborsClassifier) + self.assertAlmostEqual(1.381551055796429, + sklearn.metrics.log_loss(targets, predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_lda.py b/test/test_pipeline/components/classification/test_lda.py new file mode 100644 index 0000000000..28915f0e35 --- /dev/null +++ b/test/test_pipeline/components/classification/test_lda.py @@ -0,0 +1,24 @@ +import unittest + +from autosklearn.pipeline.components.classification.lda import LDA +from autosklearn.pipeline.util import _test_classifier + +import sklearn.metrics + + +class LDAComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(LDA) + self.assertAlmostEqual(1.0, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=LDA, dataset='digits') + self.assertAlmostEqual(0.88585306618093507, + sklearn.metrics.accuracy_score(predictions, + targets)) diff --git a/test/test_pipeline/components/classification/test_liblinear.py b/test/test_pipeline/components/classification/test_liblinear.py new file mode 100644 index 0000000000..de30c1405d --- /dev/null +++ b/test/test_pipeline/components/classification/test_liblinear.py @@ -0,0 +1,12 @@ +import unittest + +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.util import _test_classifier + + +class LibLinearComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(LibLinear_SVC, + dataset='iris') + self.assertTrue(all(targets == predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_libsvm_svc.py b/test/test_pipeline/components/classification/test_libsvm_svc.py new file mode 100644 index 0000000000..a62b464644 --- /dev/null +++ b/test/test_pipeline/components/classification/test_libsvm_svc.py @@ -0,0 +1,55 @@ +import unittest + +from autosklearn.pipeline.components.classification.libsvm_svc import LibSVM_SVC +from autosklearn.pipeline.util import _test_classifier, \ + _test_classifier_predict_proba, get_dataset + +import numpy as np +import sklearn.metrics + + +class LibSVM_SVCComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(LibSVM_SVC, dataset='iris') + self.assertAlmostEqual(0.96, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_predict_proba(self): + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + LibSVM_SVC, sparse=True, dataset='digits', + train_size_maximum=500) + self.assertAlmostEqual(4.6680593525563063, + sklearn.metrics.log_loss(targets, + predictions)) + + for i in range(10): + predictions, targets = _test_classifier_predict_proba( + LibSVM_SVC, sparse=True, dataset='iris') + self.assertAlmostEqual(0.8649665185853217, + sklearn.metrics.log_loss(targets, + predictions)) + + # 2 class + for i in range(10): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') + remove_training_data = Y_train == 2 + remove_test_data = Y_test == 2 + X_train = X_train[~remove_training_data] + Y_train = Y_train[~remove_training_data] + X_test = X_test[~remove_test_data] + Y_test = Y_test[~remove_test_data] + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = LibSVM_SVC.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + cls = LibSVM_SVC(random_state=1, **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + cls = cls.fit(X_train, Y_train) + prediction = cls.predict_proba(X_test) + self.assertAlmostEqual(sklearn.metrics.log_loss(Y_test, prediction), + 0.69323680119641773) diff --git a/test/test_pipeline/components/classification/test_multinomial_nb.py b/test/test_pipeline/components/classification/test_multinomial_nb.py new file mode 100644 index 0000000000..8f8bc42379 --- /dev/null +++ b/test/test_pipeline/components/classification/test_multinomial_nb.py @@ -0,0 +1,45 @@ +import unittest + +from autosklearn.pipeline.components.classification.multinomial_nb import \ + MultinomialNB +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit, \ + get_dataset + +import numpy as np +import sklearn.metrics + + +class MultinomialNBComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_classifier(MultinomialNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(MultinomialNB) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_negative_values(self): + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = MultinomialNB.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + cls = MultinomialNB(random_state=1, **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + cls = cls.fit(X_train, Y_train) + prediction = cls.predict(X_test) + self.assertAlmostEqual(np.nanmean(prediction == Y_test), + 0.88888888888888884) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_passive_aggressive.py b/test/test_pipeline/components/classification/test_passive_aggressive.py new file mode 100644 index 0000000000..56ec91b54a --- /dev/null +++ b/test/test_pipeline/components/classification/test_passive_aggressive.py @@ -0,0 +1,40 @@ +import unittest + +from autosklearn.pipeline.components.classification.passive_aggressive import \ + PassiveAggressive +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class PassiveAggressiveComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(PassiveAggressive) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + PassiveAggressive) + self.assertAlmostEqual(0.97999999999999998, + sklearn.metrics.accuracy_score( + predictions, targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=PassiveAggressive, dataset='digits') + self.assertAlmostEqual(0.91924711596842745, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit(classifier=PassiveAggressive, + dataset='digits') + self.assertAlmostEqual(0.91924711596842745, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_proj_logit.py b/test/test_pipeline/components/classification/test_proj_logit.py new file mode 100644 index 0000000000..d9972ea916 --- /dev/null +++ b/test/test_pipeline/components/classification/test_proj_logit.py @@ -0,0 +1,21 @@ +import unittest + +from autosklearn.pipeline.components.classification.proj_logit import ProjLogitCLassifier +from autosklearn.pipeline.util import _test_classifier + +import sklearn.metrics + + +class ProjLogitComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(ProjLogitCLassifier, dataset='iris') + self.assertAlmostEqual(0.98, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = _test_classifier(ProjLogitCLassifier, + dataset='digits') + self.assertAlmostEqual(0.8986035215543412, + sklearn.metrics.accuracy_score(predictions, targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_qda.py b/test/test_pipeline/components/classification/test_qda.py new file mode 100644 index 0000000000..c8c2c0e2cf --- /dev/null +++ b/test/test_pipeline/components/classification/test_qda.py @@ -0,0 +1,25 @@ +import unittest + +from autosklearn.pipeline.components.classification.qda import QDA +from autosklearn.pipeline.util import _test_classifier + +import sklearn.metrics + + +class QDAComponentTest(unittest.TestCase): + def test_default_configuration_iris(self): + for i in range(10): + predictions, targets = \ + _test_classifier(QDA) + self.assertAlmostEqual(1.0, + sklearn.metrics.accuracy_score(predictions, + targets)) + + #@unittest.skip("QDA fails on this one") + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(classifier=QDA, dataset='digits') + self.assertAlmostEqual(0.18882817243472982, + sklearn.metrics.accuracy_score(predictions, + targets)) diff --git a/test/test_pipeline/components/classification/test_random_forest.py b/test/test_pipeline/components/classification/test_random_forest.py new file mode 100644 index 0000000000..81bd0a4606 --- /dev/null +++ b/test/test_pipeline/components/classification/test_random_forest.py @@ -0,0 +1,29 @@ +import unittest + +from autosklearn.pipeline.components.classification.random_forest import RandomForest +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(RandomForest) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, targets)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_classifier(RandomForest, sparse=True) + self.assertAlmostEqual(0.85999999999999999, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_classifier_iterative_fit(RandomForest) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/classification/test_sgd.py b/test/test_pipeline/components/classification/test_sgd.py new file mode 100644 index 0000000000..883cbf7a59 --- /dev/null +++ b/test/test_pipeline/components/classification/test_sgd.py @@ -0,0 +1,40 @@ +import unittest + +from autosklearn.pipeline.components.classification.sgd import SGD +from autosklearn.pipeline.util import _test_classifier, _test_classifier_iterative_fit + +import sklearn.metrics + + +class SGDComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_classifier(SGD) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + SGD) + self.assertAlmostEqual(0.95999999999999996, + sklearn.metrics.accuracy_score( + predictions, targets)) + + def test_default_configuration_digits(self): + for i in range(10): + predictions, targets = \ + _test_classifier(SGD, dataset='digits') + self.assertAlmostEqual(0.91438979963570133, + sklearn.metrics.accuracy_score(predictions, + targets)) + + def test_default_configuration_digits_iterative_fit(self): + for i in range(10): + predictions, targets = _test_classifier_iterative_fit( + SGD, + dataset='digits') + self.assertAlmostEqual(0.91438979963570133, + sklearn.metrics.accuracy_score( + predictions, targets)) \ No newline at end of file diff --git a/test/test_pipeline/components/data_preprocessing/__init__.py b/test/test_pipeline/components/data_preprocessing/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/test/test_pipeline/components/data_preprocessing/dataset.pkl b/test/test_pipeline/components/data_preprocessing/dataset.pkl new file mode 100644 index 0000000000..a976726d5c --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/dataset.pkl @@ -0,0 +1,898 @@ +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.550000000000000000e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.299999952316284180e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.009999871253967285e-01 1.200099975585937500e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 3.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.560000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.399999976158142090e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 4.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.510000228881835938e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.150000000000000000e+02 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 1.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.100000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.130000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 5.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.100999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 1.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.000000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399000048637390137e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.899999976158142090e+00 1.135000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 9.010000228881835938e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.750000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 7.500000000000000000e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600999951362609863e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 4.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320099975585937500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.900000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 3.550000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.150000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.220000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 7.000000000000000000e+02 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.299999952316284180e+00 1.090000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.498999938964843750e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.989999771118164062e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.090000033378601074e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 2.551000061035156250e+02 2.690000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.000000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.950000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.399999976158142090e+00 1.310000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.799999952316284180e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 1.300000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.520000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 6.120000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.510000050067901611e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200999975204467773e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.500000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 6.140000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500999927520751953e+00 1.275000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 2.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.200000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 1.900000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500999927520751953e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 5.000000000000000000e+02 4.120000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000999755859375000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.009999990463256836e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 3.561000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.000000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.009999990463256836e-01 3.560000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.090000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.190000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000000000000000e+00 6.509999847412109375e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 2.800000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 1.274900024414062500e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.590000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.299000024795532227e+00 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.001000061035156250e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 1.275000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.320000000000000000e+03 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 2.550000000000000000e+02 2.700000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 3.000000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 6.100000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 3.748999938964843750e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 5.199000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 1.250000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.600000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.525000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 5.000000000000000000e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.009999990463256836e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 1.500000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 7.990000247955322266e-01 6.090000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.210000097751617432e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.274900024414062500e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.069000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 6.099000244140625000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.200999975204467773e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.525000000000000000e+03 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.500000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.300000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.598999977111816406e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 5.950000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 7.500000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 1.000000000000000000e+03 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.300000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.000000000000000000e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 3.350000000000000000e+02 6.110000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.748999938964843750e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200999975204467773e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.299999952316284180e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 2.500000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.999999761581420898e-01 9.660000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 5.000000000000000000e+01 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.001000061035156250e+02 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.319000244140625000e+02 8.810000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 3.851000061035156250e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.010000109672546387e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.320000000000000000e+03 7.620000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 1.500000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 5.000000000000000000e-01 3.350000000000000000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 3.010000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.009999990463256836e-01 1.220000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 5.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 2.500000000000000000e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.100000000000000000e+02 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 6.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 1.220000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.001000046730041504e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.000000000000000000e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 9.990000128746032715e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 0.000000000000000000e+00 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 5.000000000000000000e+02 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.000000059604644775e-01 1.320000000000000000e+03 4.880000000000000000e+03 1.000000000000000000e+00 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.000000000000000000e+00 1.250000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 7.610000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 6.099000244140625000e+02 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.010000109672546387e-01 6.090000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 6.000000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.999999761581420898e-01 1.050000000000000000e+03 1.220000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.525000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 5.999000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.500000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.600000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 8.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.100000023841857910e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.099000244140625000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 8.000000119209289551e-01 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.000000238418579102e-01 5.200000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.320000000000000000e+03 4.170000000000000000e+03 0.000000000000000000e+00 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 6.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.801000118255615234e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 4.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 6.099000244140625000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.990000009536743164e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 6.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 3.100000000000000000e+02 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 7.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 6.000000238418579102e-01 6.100000000000000000e+02 0.000000000000000000e+00 nan 2.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 5.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 2.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 4.500000000000000000e+01 nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 6.999999880790710449e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.999999880790710449e-01 1.220000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 2.399999946355819702e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.000000119209289551e-01 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 6.990000009536743164e-01 3.750000000000000000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +8.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 5.000000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 4.510000050067901611e-01 1.250000000000000000e+03 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 1.500000000000000000e+00 9.000000000000000000e+02 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.300000000000000000e+03 4.880000000000000000e+03 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan 1.000000000000000000e+00 nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 1.320000000000000000e+03 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 2.000000000000000000e+00 +nan 0.000000000000000000e+00 3.000000000000000000e+00 4.500000000000000000e+01 0.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 3.200000047683715820e+00 6.400000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 2.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 3.000000000000000000e+03 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 0.000000000000000000e+00 nan 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 6.099000244140625000e+02 6.120000000000000000e+02 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600000023841857910e+00 1.500000000000000000e+03 4.170000000000000000e+03 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +3.000000000000000000e+00 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan 2.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.200000047683715820e+00 6.099000244140625000e+02 1.000000000000000000e+00 nan 0.000000000000000000e+00 nan 4.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 6.100000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 2.000000000000000000e+00 0.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.600999951362609863e+00 8.300000000000000000e+02 8.800000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 7.000000000000000000e+00 0.000000000000000000e+00 0.000000000000000000e+00 nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 nan nan nan 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 1.000000000000000000e+00 1.598999977111816406e+00 1.500000000000000000e+02 7.620000000000000000e+02 nan 0.000000000000000000e+00 nan 1.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000059604644775e-01 2.000000000000000000e+01 0.000000000000000000e+00 nan 0.000000000000000000e+00 nan 5.000000000000000000e+00 +nan 0.000000000000000000e+00 1.000000000000000000e+00 0.000000000000000000e+00 8.500000000000000000e+01 0.000000000000000000e+00 nan nan 0.000000000000000000e+00 nan nan 3.000000000000000000e+00 nan nan nan 0.000000000000000000e+00 1.000000000000000000e+00 nan nan nan nan nan nan nan nan nan nan nan nan nan nan 0.000000000000000000e+00 4.000000000000000000e+00 6.100000000000000000e+02 0.000000000000000000e+00 nan 1.000000000000000000e+00 nan 5.000000000000000000e+00 diff --git a/test/test_pipeline/components/data_preprocessing/test_balancing.py b/test/test_pipeline/components/data_preprocessing/test_balancing.py new file mode 100644 index 0000000000..8da740bd53 --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/test_balancing.py @@ -0,0 +1,163 @@ +__author__ = 'feurerm' + +import copy +import unittest + +import numpy as np +import sklearn.datasets +import sklearn.metrics + +from autosklearn.pipeline.components.data_preprocessing.balancing import Balancing +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.components.classification.adaboost import AdaboostClassifier +from autosklearn.pipeline.components.classification.decision_tree import DecisionTree +from autosklearn.pipeline.components.classification.extra_trees import ExtraTreesClassifier +from autosklearn.pipeline.components.classification.gradient_boosting import GradientBoostingClassifier +from autosklearn.pipeline.components.classification.random_forest import RandomForest +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.components.classification.libsvm_svc import LibSVM_SVC +from autosklearn.pipeline.components.classification.sgd import SGD +from autosklearn.pipeline.components.feature_preprocessing\ + .extra_trees_preproc_for_classification import ExtraTreesPreprocessor +from autosklearn.pipeline.components.feature_preprocessing.liblinear_svc_preprocessor import LibLinear_Preprocessor + + +class BalancingComponentTest(unittest.TestCase): + def test_balancing_get_weights_treed_single_label(self): + Y = np.array([0] * 80 + [1] * 20) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'adaboost', None, None, None) + self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], + np.array([0.4] * 80 + [1.6] * 20))) + #init_params, fit_params = balancing.get_weights( + # Y, None, 'extra_trees_preproc_for_classification', None, None) + #self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], + # np.array([0.4] * 80 + [1.6] * 20))) + + def test_balancing_get_weights_treed_multilabel(self): + Y = np.array([[0, 0, 0]] * 100 + [[1, 0, 0]] * 100 + [[0, 1, 0]] * 100 + + [[1, 1, 0]] * 100 + [[0, 0, 1]] * 100 + [[1, 0, 1]] * 10) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'adaboost', None, None, None) + self.assertTrue(np.allclose(fit_params['classifier:sample_weight'], + np.array([0.4] * 500 + [4.0] * 10))) + #init_params, fit_params = balancing.get_weights( + # Y, None, 'extra_trees_preproc_for_classification', None, None) + #self.assertTrue(np.allclose(fit_params['preprocessor:sample_weight'], + # np.array([0.4] * 500 + [4.0] * 10))) + + def test_balancing_get_weights_svm_sgd(self): + Y = np.array([0] * 80 + [1] * 20) + balancing = Balancing(strategy='weighting') + init_params, fit_params = balancing.get_weights( + Y, 'libsvm_svc', None, None, None) + self.assertEqual(("classifier:class_weight", "auto"), + list(init_params.items())[0]) + init_params, fit_params = balancing.get_weights( + Y, None, 'liblinear_svc_preprocessor', None, None) + self.assertEqual(("preprocessor:class_weight", "auto"), + list(init_params.items())[0]) + + def test_weighting_effect(self): + data = sklearn.datasets.make_classification( + n_samples=1000, n_features=20, n_redundant=5, n_informative=5, + n_repeated=2, n_clusters_per_class=2, weights=[0.8, 0.2], + random_state=1) + + for name, clf, acc_no_weighting, acc_weighting in \ + [('adaboost', AdaboostClassifier, 0.709, 0.662), + ('decision_tree', DecisionTree, 0.683, 0.726), + ('extra_trees', ExtraTreesClassifier, 0.812, 0.812), + ('gradient_boosting', GradientBoostingClassifier, + 0.800, 0.760), + ('random_forest', RandomForest, 0.849, 0.780), + ('libsvm_svc', LibSVM_SVC, 0.571, 0.658), + ('liblinear_svc', LibLinear_SVC, 0.685, 0.699), + ('sgd', SGD, 0.602, 0.720)]: + for strategy, acc in [('none', acc_no_weighting), + ('weighting', acc_weighting)]: + # Fit + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + + cs = SimpleClassificationPipeline.\ + get_hyperparameter_search_space( + include={'classifier': [name]}) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = SimpleClassificationPipeline(default, random_state=1) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.f1_score(predictions, Y_test), + places=3) + + # pre_transform and fit_estimator + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'classifier': [name]}) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = SimpleClassificationPipeline(default, random_state=1) + Xt, fit_params = classifier.pre_transform(X_train, Y_train) + classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) + predictions = classifier.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.f1_score( + predictions, Y_test), + places=3) + + for name, pre, acc_no_weighting, acc_weighting in \ + [('extra_trees_preproc_for_classification', + ExtraTreesPreprocessor, 0.682, 0.634), + ('liblinear_svc_preprocessor', LibLinear_Preprocessor, + 0.714, 0.596)]: + for strategy, acc in [('none', acc_no_weighting), + ('weighting', acc_weighting)]: + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'classifier': ['sgd'], 'preprocessor': [name]}) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = SimpleClassificationPipeline(default, random_state=1) + predictor = classifier.fit(X_train, Y_train) + predictions = predictor.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.f1_score( + predictions, Y_test), + places=3) + + # pre_transform and fit_estimator + data_ = copy.copy(data) + X_train = data_[0][:700] + Y_train = data_[1][:700] + X_test = data_[0][700:] + Y_test = data_[1][700:] + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'classifier': ['sgd'], 'preprocessor': [name]}) + default = cs.get_default_configuration() + default._values['balancing:strategy'] = strategy + classifier = SimpleClassificationPipeline(default, random_state=1) + Xt, fit_params = classifier.pre_transform(X_train, Y_train) + classifier.fit_estimator(Xt, Y_train, fit_params=fit_params) + predictions = classifier.predict(X_test) + self.assertAlmostEqual(acc, + sklearn.metrics.f1_score( + predictions, Y_test), + places=3) \ No newline at end of file diff --git a/test/test_pipeline/components/data_preprocessing/test_imputation.py b/test/test_pipeline/components/data_preprocessing/test_imputation.py new file mode 100644 index 0000000000..1f94ab8a5f --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/test_imputation.py @@ -0,0 +1,32 @@ +import unittest + +from scipy import sparse + +from autosklearn.pipeline.components.data_preprocessing.imputation import Imputation +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase + + +class ImputationTest(PreprocessingTestCase): + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = _test_preprocessing(Imputation) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_sparse_data(self): + transformations = [] + transformation, original = _test_preprocessing(Imputation, + make_sparse=True) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation.data == original.data).all()) + self.assertIsInstance(transformation, sparse.csc_matrix) + transformations.append(transformation) + + def test_preprocessing_dtype(self): + super(ImputationTest, self)._test_preprocessing_dtype(Imputation, + add_NaNs=True) \ No newline at end of file diff --git a/test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py b/test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py new file mode 100644 index 0000000000..d062a202de --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/test_one_hot_encoding.py @@ -0,0 +1,120 @@ +import os +import unittest + +import numpy as np +from scipy import sparse + +from autosklearn.pipeline.components.data_preprocessing.one_hot_encoding import OneHotEncoder +from autosklearn.pipeline.util import _test_preprocessing + + +class OneHotEncoderTest(unittest.TestCase): + def setUp(self): + self.categorical = [True, + True, + True, + False, + False, + True, + True, + True, + False, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + True, + False, + False, + False, + True, + True, + True, + True] + this_directory = os.path.dirname(__file__) + self.X_train = np.loadtxt(os.path.join(this_directory, "dataset.pkl")) + + def test_default_configuration(self): + transformations = [] + for i in range(10): + configuration_space = OneHotEncoder.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = OneHotEncoder(random_state=1, + categorical_features=self.categorical, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + + transformer = preprocessor.fit(self.X_train.copy()) + Xt = transformer.transform(self.X_train.copy()) + transformations.append(Xt) + if len(transformations) > 1: + self.assertFalse( + (transformations[-1] != transformations[-2]).all()) + + def test_default_configuration_no_encoding(self): + transformations = [] + for i in range(10): + transformation, original = _test_preprocessing(OneHotEncoder) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_sparse_data(self): + transformations = [] + + self.X_train[~np.isfinite(self.X_train)] = 0 + self.X_train = sparse.csc_matrix(self.X_train) + + for i in range(10): + configuration_space = OneHotEncoder.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = OneHotEncoder(random_state=1, + categorical_features=self.categorical, + **{hp_name: default[hp_name] for + hp_name in + default if + default[hp_name] is not None}) + + transformer = preprocessor.fit(self.X_train.copy()) + Xt = transformer.transform(self.X_train.copy()) + transformations.append(Xt) + if len(transformations) > 1: + self.assertFalse( + (transformations[-1].todense() != transformations[ + -2].todense()).all()) + + def test_default_configuration_sparse_no_encoding(self): + transformations = [] + + for i in range(10): + transformation, original = _test_preprocessing(OneHotEncoder, + make_sparse=True) + self.assertEqual(transformation.shape, original.shape) + self.assertTrue((transformation.todense() == original.todense()).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1].todense() == transformations[-2].todense()).all()) diff --git a/test/test_pipeline/components/data_preprocessing/test_scaling.py b/test/test_pipeline/components/data_preprocessing/test_scaling.py new file mode 100644 index 0000000000..0182d26446 --- /dev/null +++ b/test/test_pipeline/components/data_preprocessing/test_scaling.py @@ -0,0 +1,53 @@ +import unittest + +import numpy as np +import sklearn.datasets + +from autosklearn.pipeline.components.data_preprocessing.rescaling import RescalingChoice +from autosklearn.pipeline.util import get_dataset + + +class ScalingComponentTest(unittest.TestCase): + def _test_helper(self, Preprocessor, dataset=None, make_sparse=False): + X_train, Y_train, X_test, Y_test = get_dataset(dataset=dataset, + make_sparse=make_sparse) + original_X_train = X_train.copy() + configuration_space = Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = Preprocessor(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + preprocessor = preprocessor.choice + transformer = preprocessor.fit(X_train, Y_train) + return transformer.transform(X_train), original_X_train + + def test_boston_is_not_scaled(self): + data = sklearn.datasets.load_boston()['data'] + self.assertGreaterEqual(np.max(data), 100) + + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = self._test_helper(RescalingChoice, + dataset='boston') + # The maximum is around 1.95 for the transformed array... + self.assertLessEqual(np.max(transformation), 2) + self.assertFalse((original == transformation).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue( + (transformations[-1] == transformations[-2]).all()) + + def test_default_configuration_with_sparse_data(self): + preprocessing = self._test_helper(RescalingChoice, dataset='boston', + make_sparse=True) + transformation, original = preprocessing + self.assertEqual(original.getnnz(), transformation.getnnz()) + self.assertAlmostEqual(1, transformation.max(), places=6) + self.assertTrue(~np.allclose(original.data, transformation.data)) + + @unittest.skip("Does not work at the moment.") + def test_preprocessing_dtype(self): + super(ScalingComponentTest, self)._test_helper( + RescalingChoice) diff --git a/test/test_pipeline/components/feature_preprocessing/__init__.py b/test/test_pipeline/components/feature_preprocessing/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py b/test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py new file mode 100644 index 0000000000..ff613d68aa --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_NoPreprocessing.py @@ -0,0 +1,23 @@ +import numpy as np +import unittest + +from autosklearn.pipeline.components.feature_preprocessing.no_preprocessing import NoPreprocessing +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase + + +class NoneComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(NoPreprocessing) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], original.shape[1]) + self.assertFalse((transformation == 0).all()) + self.assertEqual(np.sum(original), np.sum(transformation)) + self.assertEqual(np.min(original), np.min(transformation)) + self.assertEqual(np.max(original), np.max(transformation)) + self.assertEqual(np.std(original), np.std(transformation)) + self.assertEqual(np.mean(original), np.mean(transformation)) + + def test_preprocessing_dtype(self): + super(NoneComponentTest, self)._test_preprocessing_dtype(NoPreprocessing) + + diff --git a/test/test_pipeline/components/feature_preprocessing/test_choice.py b/test/test_pipeline/components/feature_preprocessing/test_choice.py new file mode 100644 index 0000000000..9ae503f82c --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_choice.py @@ -0,0 +1,34 @@ +from __future__ import print_function + +import unittest + +import autosklearn.pipeline.components.feature_preprocessing as fp + + +class FeatureProcessingTest(unittest.TestCase): + def test_get_available_components(self): + # Target type + for target_type, num_values in [('classification', 16), + ('regression', 12)]: + data_properties = {'target_type': target_type} + + available_components = fp.FeaturePreprocessorChoice\ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), num_values) + + # Multiclass + data_properties = {'target_type': 'classification', + 'multiclass': True} + available_components = fp.FeaturePreprocessorChoice \ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), 16) + + # Multilabel + data_properties = {'target_type': 'classification', + 'multilabel': True} + available_components = fp.FeaturePreprocessorChoice \ + .get_available_components(data_properties) + + self.assertEqual(len(available_components), 12) diff --git a/test/test_pipeline/components/feature_preprocessing/test_densifier.py b/test/test_pipeline/components/feature_preprocessing/test_densifier.py new file mode 100644 index 0000000000..996d655b91 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_densifier.py @@ -0,0 +1,18 @@ +import unittest + +import numpy as np + +from autosklearn.pipeline.components.feature_preprocessing.densifier import Densifier +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase + + +class DensifierComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(Densifier, make_sparse=True) + self.assertIsInstance(transformation, np.ndarray) + self.assertEqual(transformation.shape, original.shape) + self.assertIsInstance(transformation, np.ndarray) + + def test_preprocessing_dtype(self): + super(DensifierComponentTest, self)._test_preprocessing_dtype(Densifier) + diff --git a/test/test_pipeline/components/feature_preprocessing/test_extra_trees.py b/test/test_pipeline/components/feature_preprocessing/test_extra_trees.py new file mode 100644 index 0000000000..b1b9656b17 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_extra_trees.py @@ -0,0 +1,39 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from autosklearn.pipeline.components.feature_preprocessing.extra_trees_preproc_for_classification import \ + ExtraTreesPreprocessor +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class ExtreTreesComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(ExtraTreesPreprocessor) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = ExtraTreesPreprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = ExtraTreesPreprocessor(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.87310261080752882, places=2) + + def test_preprocessing_dtype(self): + super(ExtreTreesComponentTest, + self)._test_preprocessing_dtype(ExtraTreesPreprocessor) diff --git a/test/test_pipeline/components/feature_preprocessing/test_fast_ica.py b/test/test_pipeline/components/feature_preprocessing/test_fast_ica.py new file mode 100644 index 0000000000..c330ba5a9b --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_fast_ica.py @@ -0,0 +1,41 @@ +import unittest + +from sklearn.linear_model import Ridge +from autosklearn.pipeline.components.feature_preprocessing.fast_ica import \ + FastICA +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class FastICAComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(FastICA, + dataset="diabetes") + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_regression(self): + for i in range(5): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') + configuration_space = FastICA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = FastICA(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = Ridge() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.r2_score(Y_test, predictions) + self.assertAlmostEqual(accuracy, 0.32614416980439365) + + @unittest.skip("Always returns float64") + def test_preprocessing_dtype(self): + super(FastICAComponentTest, + self)._test_preprocessing_dtype(FastICA, dataset='diabetes') + diff --git a/test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py b/test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py new file mode 100644 index 0000000000..284bdd0754 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_feature_agglomeration.py @@ -0,0 +1,39 @@ +import unittest + +from sklearn.ensemble import RandomForestClassifier +from autosklearn.pipeline.components.feature_preprocessing.feature_agglomeration import FeatureAgglomeration +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class FeatureAgglomerationComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(FeatureAgglomeration) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(3): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = FeatureAgglomeration.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = FeatureAgglomeration(random_state=1, + **{hp_name: default[hp_name] for + hp_name in default}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RandomForestClassifier(random_state=1) + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.8026715) + + def test_preprocessing_dtype(self): + super(FeatureAgglomerationComponentTest, + self)._test_preprocessing_dtype(FeatureAgglomeration, + test_sparse=False) diff --git a/test/test_pipeline/components/feature_preprocessing/test_gem.py b/test/test_pipeline/components/feature_preprocessing/test_gem.py new file mode 100644 index 0000000000..2ad8115cbe --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_gem.py @@ -0,0 +1,39 @@ +import unittest + +from autosklearn.pipeline.components.classification.sgd import SGD +from autosklearn.pipeline.components.feature_preprocessing.gem import GEM +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, get_dataset +import sklearn.metrics + + +class GEMComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(GEM) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(3): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = GEM.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = GEM(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + config = SGD.get_hyperparameter_search_space( \ + ).get_default_configuration() + classifier = SGD(random_state=1, **config._values) + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertGreaterEqual(accuracy, 0.85) + + def test_preprocessing_dtype(self): + super(GEMComponentTest, self)._test_preprocessing_dtype(GEM, + test_sparse=False) diff --git a/test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py b/test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py new file mode 100644 index 0000000000..042d735f20 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_kernel_pca.py @@ -0,0 +1,49 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from autosklearn.pipeline.components.feature_preprocessing.kernel_pca import \ + KernelPCA +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class KernelPCAComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(KernelPCA, + dataset='digits') + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_sparse(self): + transformation, original = _test_preprocessing(KernelPCA, + make_sparse=True, + dataset='digits') + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(5): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = KernelPCA.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = KernelPCA(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.096539162112932606) + + @unittest.skip("Always returns float64") + def test_preprocessing_dtype(self): + super(KernelPCAComponentTest, + self)._test_preprocessing_dtype(KernelPCA) + diff --git a/test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py b/test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py new file mode 100644 index 0000000000..37ae4f5578 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_kitchen_sinks.py @@ -0,0 +1,19 @@ +import unittest + +import numpy as np + +from autosklearn.pipeline.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase + + +class KitchenSinkComponent(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(RandomKitchenSinks) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + self.assertFalse((transformation == 0).all()) + + @unittest.skip("Right now, the RBFSampler returns a float64 array!") + def test_preprocessing_dtype(self): + super(KitchenSinkComponent, + self)._test_preprocessing_dtype(RandomKitchenSinks) diff --git a/test/test_pipeline/components/feature_preprocessing/test_liblinear.py b/test/test_pipeline/components/feature_preprocessing/test_liblinear.py new file mode 100644 index 0000000000..543a557e47 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_liblinear.py @@ -0,0 +1,42 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from autosklearn.pipeline.components.feature_preprocessing.liblinear_svc_preprocessor import \ + LibLinear_Preprocessor +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class LiblinearComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(LibLinear_Preprocessor) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = LibLinear_Preprocessor.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = LibLinear_Preprocessor(random_state=1, + **{hp_name: default[hp_name] + for hp_name in + default if default[ + hp_name] is not None}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.87917425622343659, places=2) + + def test_preprocessing_dtype(self): + super(LiblinearComponentTest, + self)._test_preprocessing_dtype(LibLinear_Preprocessor, + test_sparse=False) diff --git a/test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py b/test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py new file mode 100644 index 0000000000..edc9cfff25 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_nystroem_sampler.py @@ -0,0 +1,96 @@ +import unittest + +import numpy as np +import sklearn.preprocessing + +from autosklearn.pipeline.components.feature_preprocessing.nystroem_sampler import \ + Nystroem +from autosklearn.pipeline.util import _test_preprocessing, get_dataset + + +class NystroemComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(Nystroem) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + self.assertFalse((transformation == 0).all()) + + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = Nystroem(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform( + X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 100) + + #@unittest.skip("Right now, the RBFSampler returns a float64 array!") + def _test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = Nystroem.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = Nystroem(random_state=1, + **{hp.hyperparameter.name: hp.value + for hp + in + default.values.values()}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/test/test_pipeline/components/feature_preprocessing/test_pca.py b/test/test_pipeline/components/feature_preprocessing/test_pca.py new file mode 100644 index 0000000000..c7b47f7818 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_pca.py @@ -0,0 +1,22 @@ +import unittest + +import numpy as np + +from autosklearn.pipeline.components.feature_preprocessing.pca import PCA +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase + + +class PCAComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformations = [] + for i in range(10): + transformation, original = _test_preprocessing(PCA) + self.assertEqual(transformation.shape, original.shape) + self.assertFalse((transformation == original).all()) + transformations.append(transformation) + if len(transformations) > 1: + self.assertTrue((transformations[-1] == transformations[-2]).all()) + + def test_preprocessing_dtype(self): + super(PCAComponentTest, self)._test_preprocessing_dtype(PCA, + test_sparse=False) \ No newline at end of file diff --git a/test/test_pipeline/components/feature_preprocessing/test_polynomial.py b/test/test_pipeline/components/feature_preprocessing/test_polynomial.py new file mode 100644 index 0000000000..855fc59bfd --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_polynomial.py @@ -0,0 +1,40 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from autosklearn.pipeline.components.feature_preprocessing.polynomial import \ + PolynomialFeatures +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class PolynomialFeaturesComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(PolynomialFeatures) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=False) + configuration_space = PolynomialFeatures.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = PolynomialFeatures(random_state=1, + **{hp_name: default[hp_name] for + hp_name in default}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.93564055859137829, places=2) + + def test_preprocessing_dtype(self): + super(PolynomialFeaturesComponentTest, + self)._test_preprocessing_dtype(PolynomialFeatures, + test_sparse=False) diff --git a/test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py b/test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py new file mode 100644 index 0000000000..da74cea977 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_random_trees_embedding.py @@ -0,0 +1,49 @@ +import unittest + +import numpy as np +import scipy.sparse + +from autosklearn.pipeline.components.feature_preprocessing.random_trees_embedding import \ + RandomTreesEmbedding +from autosklearn.pipeline.util import _test_preprocessing, get_dataset + + +class RandomTreesEmbeddingComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(RandomTreesEmbedding) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 213) + self.assertIsInstance(original, np.ndarray) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertTrue(all(transformation.data == 1)) + + @unittest.skip("Right now, the RTE returns a float64 array!") + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomTreesEmbedding(random_state=1, + **{hp_name: default[hp_name] for + hp_name in + default}) + preprocessor.fit(X_train) + Xt = preprocessor.transform(X_train) + + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = RandomTreesEmbedding.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = RandomTreesEmbedding(random_state=1, + **{hp_name: default[hp_name] for + hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) \ No newline at end of file diff --git a/test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py new file mode 100644 index 0000000000..c73786351f --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_classification.py @@ -0,0 +1,90 @@ +import unittest + +import numpy as np +import scipy.sparse +import sklearn.preprocessing + +from autosklearn.pipeline.components.feature_preprocessing.select_percentile_classification import SelectPercentileClassification +from autosklearn.pipeline.util import _test_preprocessing, get_dataset + + +class SelectPercentileClassificationTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(SelectPercentileClassification) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + self.assertFalse((transformation == 0).all()) + + transformation, original = _test_preprocessing(SelectPercentileClassification, make_sparse=True) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform(X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1] / 2)) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileClassification.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileClassification(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py new file mode 100644 index 0000000000..2404f08dbf --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_select_percentile_regression.py @@ -0,0 +1,41 @@ +import unittest + +import numpy as np + +from autosklearn.pipeline.components.feature_preprocessing.select_percentile_regression import SelectPercentileRegression +from autosklearn.pipeline.util import _test_preprocessing, get_dataset + + +class SelectPercentileRegressionTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(dataset="boston", Preprocessor=SelectPercentileRegression) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1]/2)) + self.assertFalse((transformation == 0).all()) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileRegression(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectPercentileRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectPercentileRegression(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/test/test_pipeline/components/feature_preprocessing/test_select_rates.py b/test/test_pipeline/components/feature_preprocessing/test_select_rates.py new file mode 100644 index 0000000000..5f40b2fe9e --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_select_rates.py @@ -0,0 +1,97 @@ +import unittest + +import numpy as np +import scipy.sparse +import sklearn.preprocessing + +from autosklearn.pipeline.components.feature_preprocessing.select_rates import \ + SelectRates +from autosklearn.pipeline.util import _test_preprocessing, get_dataset + + +class SelectRatesComponentTest(unittest.TestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(SelectRates) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], 3) + self.assertFalse((transformation == 0).all()) + + transformation, original = _test_preprocessing( + SelectRates, make_sparse=True) + self.assertTrue(scipy.sparse.issparse(transformation)) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertEqual(transformation.shape[1], int(original.shape[1] / 2)) + + # Custon preprocessing test to check if clipping to zero works + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + original_X_train = X_train.copy() + ss = sklearn.preprocessing.StandardScaler() + X_train = ss.fit_transform(X_train) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] + for hp_name in default + if default[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + transformation, original = transformer.transform( + X_train), original_X_train + self.assertEqual(transformation.shape[0], original.shape[0]) + # I don't know why its 52 here and not 32 which would be half of the + # number of features. Seems to be related to a runtime warning raised + # by sklearn + self.assertEqual(transformation.shape[1], 52) + + def test_preprocessing_dtype(self): + # Dense + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + self.assertEqual(X_train.dtype, np.float32) + + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris") + X_train = X_train.astype(np.float64) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) + + # Sparse + # np.float32 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + self.assertEqual(X_train.dtype, np.float32) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float32) + + # np.float64 + X_train, Y_train, X_test, Y_test = get_dataset("iris", make_sparse=True) + X_train = X_train.astype(np.float64) + configuration_space = SelectRates.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = SelectRates(random_state=1, + **{hp_name: default[hp_name] for hp_name in + default}) + preprocessor.fit(X_train, Y_train) + Xt = preprocessor.transform(X_train) + self.assertEqual(Xt.dtype, np.float64) diff --git a/test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py b/test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py new file mode 100644 index 0000000000..ce34950d03 --- /dev/null +++ b/test/test_pipeline/components/feature_preprocessing/test_truncatedSVD.py @@ -0,0 +1,43 @@ +import unittest + +from sklearn.linear_model import RidgeClassifier +from autosklearn.pipeline.components.feature_preprocessing.truncatedSVD import \ + TruncatedSVD +from autosklearn.pipeline.util import _test_preprocessing, PreprocessingTestCase, \ + get_dataset +import sklearn.metrics + + +class TruncatedSVDComponentTest(PreprocessingTestCase): + def test_default_configuration(self): + transformation, original = _test_preprocessing(TruncatedSVD) + self.assertEqual(transformation.shape[0], original.shape[0]) + self.assertFalse((transformation == 0).all()) + + def test_default_configuration_classify(self): + for i in range(2): + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + configuration_space = TruncatedSVD.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + preprocessor = TruncatedSVD(random_state=1, + **{hp_name: default[hp_name] + for hp_name in + default if default[ + hp_name] is not None}) + preprocessor.fit(X_train, Y_train) + X_train_trans = preprocessor.transform(X_train) + X_test_trans = preprocessor.transform(X_test) + + # fit a classifier on top + classifier = RidgeClassifier() + predictor = classifier.fit(X_train_trans, Y_train) + predictions = predictor.predict(X_test_trans) + accuracy = sklearn.metrics.accuracy_score(predictions, Y_test) + self.assertAlmostEqual(accuracy, 0.44201578627808136, places=2) + + @unittest.skip("Truncated SVD returns np.float64.") + def test_preprocessing_dtype(self): + super(TruncatedSVDComponentTest, + self)._test_preprocessing_dtype(TruncatedSVD, + test_sparse=False) diff --git a/test/test_pipeline/components/regression/__init__.py b/test/test_pipeline/components/regression/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/test_pipeline/components/regression/liblinear_svr.py b/test/test_pipeline/components/regression/liblinear_svr.py new file mode 100644 index 0000000000..9fa68b912a --- /dev/null +++ b/test/test_pipeline/components/regression/liblinear_svr.py @@ -0,0 +1,17 @@ +import unittest + +from autosklearn.pipeline.components.regression.liblinear_svr import \ + LibLinear_SVR +from autosklearn.pipeline.util import _test_regressor + +import sklearn.metrics + + +class SupportVectorComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(LibLinear_SVR, + dataset='boston') + self.assertAlmostEqual(0.54372712745256768, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) diff --git a/test/test_pipeline/components/regression/test_adaboost.py b/test/test_pipeline/components/regression/test_adaboost.py new file mode 100644 index 0000000000..c1baf78934 --- /dev/null +++ b/test/test_pipeline/components/regression/test_adaboost.py @@ -0,0 +1,25 @@ +import unittest + +from autosklearn.pipeline.components.regression.adaboost import \ + AdaboostRegressor +from autosklearn.pipeline.util import _test_regressor + +import sklearn.metrics + + +class AdaBoostComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(AdaboostRegressor, dataset='boston') + self.assertAlmostEqual(0.11053868761882502, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_regressor(AdaboostRegressor, sparse=True, dataset='boston') + self.assertAlmostEqual(-0.077540100211211049, + sklearn.metrics.r2_score(targets, + predictions)) diff --git a/test/test_pipeline/components/regression/test_decision_tree.py b/test/test_pipeline/components/regression/test_decision_tree.py new file mode 100644 index 0000000000..e3f9f520dd --- /dev/null +++ b/test/test_pipeline/components/regression/test_decision_tree.py @@ -0,0 +1,22 @@ +import unittest + +from autosklearn.pipeline.components.regression.decision_tree import DecisionTree +from autosklearn.pipeline.util import _test_regressor + +import sklearn.metrics + + +class DecisionTreetComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(DecisionTree,) + self.assertAlmostEqual(0.14886750572325669, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_regressor(DecisionTree, sparse=True) + self.assertAlmostEqual(0.021778487309118133, + sklearn.metrics.r2_score(targets, + predictions)) diff --git a/test/test_pipeline/components/regression/test_extra_trees.py b/test/test_pipeline/components/regression/test_extra_trees.py new file mode 100644 index 0000000000..94012a0c6c --- /dev/null +++ b/test/test_pipeline/components/regression/test_extra_trees.py @@ -0,0 +1,33 @@ +import unittest + +from autosklearn.pipeline.components.regression.extra_trees import \ + ExtraTreesRegressor +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class ExtraTreesComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(ExtraTreesRegressor) + self.assertAlmostEqual(0.4269923975466271, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = \ + _test_regressor(ExtraTreesRegressor, sparse=True) + self.assertAlmostEqual(0.26287621251507987, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_regressor_iterative_fit(ExtraTreesRegressor) + self.assertAlmostEqual(0.4269923975466271, + sklearn.metrics.r2_score(targets, + predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/regression/test_gaussian_process.py b/test/test_pipeline/components/regression/test_gaussian_process.py new file mode 100644 index 0000000000..7977eddafc --- /dev/null +++ b/test/test_pipeline/components/regression/test_gaussian_process.py @@ -0,0 +1,18 @@ +import unittest + +from autosklearn.pipeline.components.regression.gaussian_process import GaussianProcess +from autosklearn.pipeline.util import _test_regressor + +import sklearn.metrics + + +class GaussianProcessComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + # Float32 leads to numeric instabilities + predictions, targets = _test_regressor(GaussianProcess, + dataset='boston') + self.assertAlmostEqual(0.83362335184173442, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions), + places=2) + diff --git a/test/test_pipeline/components/regression/test_gradient_boosting.py b/test/test_pipeline/components/regression/test_gradient_boosting.py new file mode 100644 index 0000000000..4a331a79fc --- /dev/null +++ b/test/test_pipeline/components/regression/test_gradient_boosting.py @@ -0,0 +1,21 @@ +import unittest + +from autosklearn.pipeline.components.regression.gradient_boosting import GradientBoosting +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class GradientBoostingComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + + predictions, targets = _test_regressor(GradientBoosting) + self.assertAlmostEqual(0.35273007696557712, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_regressor(GradientBoosting) + self.assertAlmostEqual(0.35273007696557712, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/test/test_pipeline/components/regression/test_k_nearest_neighbors.py b/test/test_pipeline/components/regression/test_k_nearest_neighbors.py new file mode 100644 index 0000000000..19aa3dce03 --- /dev/null +++ b/test/test_pipeline/components/regression/test_k_nearest_neighbors.py @@ -0,0 +1,25 @@ +import unittest + +from autosklearn.pipeline.components.regression.k_nearest_neighbors import \ + KNearestNeighborsRegressor +from autosklearn.pipeline.util import _test_regressor + +import sklearn.metrics + + +class KNearestNeighborsComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = \ + _test_regressor(KNearestNeighborsRegressor) + self.assertAlmostEqual(0.068600456340847438, + sklearn.metrics.r2_score(targets, + predictions)) + + def test_default_configuration_sparse_data(self): + for i in range(10): + predictions, targets = \ + _test_regressor(KNearestNeighborsRegressor, sparse=True) + self.assertAlmostEqual(-0.16321841460809972, + sklearn.metrics.r2_score(targets, + predictions)) diff --git a/test/test_pipeline/components/regression/test_random_forests.py b/test/test_pipeline/components/regression/test_random_forests.py new file mode 100644 index 0000000000..f5d8936da7 --- /dev/null +++ b/test/test_pipeline/components/regression/test_random_forests.py @@ -0,0 +1,29 @@ +import unittest + +from autosklearn.pipeline.components.regression.random_forest import RandomForest +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class RandomForestComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + + predictions, targets = _test_regressor(RandomForest) + self.assertAlmostEqual(0.41224692924630502, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_regressor(RandomForest, sparse=True) + self.assertAlmostEqual(0.24117530425422551, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = \ + _test_regressor_iterative_fit(RandomForest) + self.assertAlmostEqual(0.41224692924630502, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/test/test_pipeline/components/regression/test_ridge_regression.py b/test/test_pipeline/components/regression/test_ridge_regression.py new file mode 100644 index 0000000000..19b7382e6d --- /dev/null +++ b/test/test_pipeline/components/regression/test_ridge_regression.py @@ -0,0 +1,43 @@ +import unittest + +from autosklearn.pipeline.components.regression.ridge_regression import RidgeRegression +from autosklearn.pipeline.components.feature_preprocessing.kitchen_sinks import RandomKitchenSinks +from autosklearn.pipeline.util import _test_regressor, get_dataset + +import sklearn.metrics + + +class RidgeComponentTest(unittest.TestCase): + def test_default_configuration(self): + configuration_space = RidgeRegression.get_hyperparameter_search_space() + default = configuration_space.get_default_configuration() + configuration_space_preproc = RandomKitchenSinks.get_hyperparameter_search_space() + default_preproc = configuration_space_preproc.get_default_configuration() + + for i in range(10): + # This should be a bad results + predictions, targets = _test_regressor(RidgeRegression,) + self.assertAlmostEqual(0.32614416980439365, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + # This should be much more better + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes', + make_sparse=False) + preprocessor = RandomKitchenSinks( + random_state=1, + **{hp_name: default_preproc[hp_name] for hp_name in + default_preproc if default_preproc[hp_name] is not None}) + + transformer = preprocessor.fit(X_train, Y_train) + X_train_transformed = transformer.transform(X_train) + X_test_transformed = transformer.transform(X_test) + + regressor = RidgeRegression( + random_state=1, + **{hp_name: default[hp_name] for hp_name in + default if default[hp_name] is not None}) + predictor = regressor.fit(X_train_transformed, Y_train) + predictions = predictor.predict(X_test_transformed) + + self.assertAlmostEqual(0.37183512452087852, + sklearn.metrics.r2_score(y_true=Y_test, y_pred=predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/regression/test_sgd.py b/test/test_pipeline/components/regression/test_sgd.py new file mode 100644 index 0000000000..fb15bb1bb6 --- /dev/null +++ b/test/test_pipeline/components/regression/test_sgd.py @@ -0,0 +1,22 @@ +import unittest + +from autosklearn.pipeline.components.regression.sgd import SGD +from autosklearn.pipeline.util import _test_regressor, _test_regressor_iterative_fit + +import sklearn.metrics + + +class SGDComponentTest(unittest.TestCase): + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(SGD) + self.assertAlmostEqual(0.092460881802630235, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) + + def test_default_configuration_iterative_fit(self): + for i in range(10): + predictions, targets = _test_regressor_iterative_fit(SGD) + self.assertAlmostEqual(0.092460881802630235, + sklearn.metrics.r2_score(y_true=targets, + y_pred=predictions)) \ No newline at end of file diff --git a/test/test_pipeline/components/regression/test_support_vector_regression.py b/test/test_pipeline/components/regression/test_support_vector_regression.py new file mode 100644 index 0000000000..2ecb3c64a7 --- /dev/null +++ b/test/test_pipeline/components/regression/test_support_vector_regression.py @@ -0,0 +1,23 @@ +import unittest + +from autosklearn.pipeline.components.regression.libsvm_svr import LibSVM_SVR +from autosklearn.pipeline.util import _test_regressor + + +import sklearn.metrics + + +class SupportVectorComponentTest(unittest.TestCase): + + def test_default_configuration(self): + for i in range(10): + predictions, targets = _test_regressor(LibSVM_SVR) + self.assertAlmostEqual(0.12849591861430087, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) + + def test_default_configuration_sparse(self): + for i in range(10): + predictions, targets = _test_regressor(LibSVM_SVR, + sparse=True) + self.assertAlmostEqual(0.0098877566961463881, + sklearn.metrics.r2_score(y_true=targets, y_pred=predictions)) diff --git a/test/test_pipeline/implementations/__init__.py b/test/test_pipeline/implementations/__init__.py new file mode 100644 index 0000000000..8f0ce6cb7c --- /dev/null +++ b/test/test_pipeline/implementations/__init__.py @@ -0,0 +1 @@ +__author__ = 'feurerm' diff --git a/test/test_pipeline/implementations/test_OneHotEncoder.py b/test/test_pipeline/implementations/test_OneHotEncoder.py new file mode 100644 index 0000000000..c2cb2b4546 --- /dev/null +++ b/test/test_pipeline/implementations/test_OneHotEncoder.py @@ -0,0 +1,203 @@ +import unittest + +import numpy as np +import scipy.sparse +from sklearn.utils.testing import assert_array_almost_equal + +from autosklearn.pipeline.implementations.OneHotEncoder import OneHotEncoder + +dense1 = np.array([[0, 1, 0], + [0, 0, 0], + [1, 1, 0]]) +dense1_1h = np.array([[1, 0, 0, 1, 1], + [1, 0, 1, 0, 1], + [0, 1, 0, 1, 1]]) +dense1_1h_minimum_fraction = np.array([[0, 1, 0, 1, 1], + [0, 1, 1, 0, 1], + [1, 0, 0, 1, 1]]) + +# Including NaNs +dense2 = np.array([[0, np.NaN, 0], + [np.NaN, 0, 2], + [1, 1, 1], + [np.NaN, 0, 1]]) +dense2_1h = np.array([[0, 1, 0, 1, 0, 0, 1, 0, 0], + [1, 0, 0, 0, 1, 0, 0, 0, 1], + [0, 0, 1, 0, 0, 1, 0, 1, 0], + [1, 0, 0, 0, 1, 0, 0, 1, 0]]) + +dense2_1h_minimum_fraction = np.array([[1, 0, 1, 0, 1, 0], + [0, 1, 0, 1, 1, 0], + [1, 0, 1, 0, 0, 1], + [0, 1, 0, 1, 0, 1]]) + +dense2_partial_1h = np.array([[0., 1., 0., 1., 0., 0., 0.], + [1., 0., 0., 0., 1., 0., 2.], + [0., 0., 1., 0., 0., 1., 1.], + [1., 0., 0., 0., 1., 0., 1.]]) + +dense2_1h_minimum_fraction_as_sparse = np.array([[0, 0, 1, 0, 0, 0], + [0, 1, 0, 0, 1, 0], + [1, 0, 0, 1, 0, 1], + [0, 1, 0, 0, 0, 1]]) + +# All NaN slice +dense3 = np.array([[0, 1, np.NaN], + [1, 0, np.NaN]]) +dense3_1h = np.array([[1, 0, 0, 1, 1], + [0, 1, 1, 0, 1]]) + +sparse1 = scipy.sparse.csc_matrix(([3, 2, 1, 1, 2, 3], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse1_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 4, 5))), shape=(6, 6)) +sparse1_paratial_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 2, 3], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), + shape=(6, 4)) + +# All zeros slice +sparse2 = scipy.sparse.csc_matrix(([2, 1, 0, 0, 0, 0], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse2_1h = scipy.sparse.csc_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), shape=(6, 4)) + +sparse2_csr = scipy.sparse.csr_matrix(([2, 1, 0, 0, 0, 0], + ((1, 4, 5, 2, 3, 5), + (0, 0, 0, 1, 1, 1))), shape=(6, 2)) +sparse2_csr_1h = scipy.sparse.csr_matrix(([1, 1, 1, 1, 1, 1], + ((5, 4, 1, 2, 3, 5), + (0, 1, 2, 3, 3, 3))), shape=(6, 4)) + + +class OneHotEncoderTest(unittest.TestCase): + def test_dense1(self): + self.fit_then_transform(dense1_1h, dense1) + self.fit_then_transform_dense(dense1_1h, dense1) + + def test_dense1_minimum_fraction(self): + self.fit_then_transform(dense1_1h_minimum_fraction, dense1, minimum_fraction=0.5) + self.fit_then_transform_dense(dense1_1h_minimum_fraction, dense1, minimum_fraction=0.5) + + def test_dense2(self): + self.fit_then_transform(dense2_1h, dense2) + self.fit_then_transform_dense(dense2_1h, dense2) + + def test_dense2_minimum_fraction(self): + self.fit_then_transform(dense2_1h_minimum_fraction, dense2, + minimum_fraction=0.3) + self.fit_then_transform_dense(dense2_1h_minimum_fraction, dense2, + minimum_fraction=0.3) + + def test_dense2_with_non_sparse_components(self): + self.fit_then_transform(dense2_partial_1h, dense2, + categorical_features=[True, True, False]) + self.fit_then_transform_dense(dense2_partial_1h, dense2, + categorical_features=[True, True, False]) + + # Minimum fraction is not too interesting here... + def test_dense3(self): + self.fit_then_transform(dense3_1h, dense3) + self.fit_then_transform_dense(dense3_1h, dense3) + + def test_sparse1(self): + self.fit_then_transform(sparse1_1h.todense(), sparse1) + self.fit_then_transform_dense(sparse1_1h.todense(), sparse1) + + def test_sparse1_minimum_fraction(self): + expected = np.array([[0, 1, 0, 0, 1, 1], + [0, 0, 1, 1, 0, 1]], dtype=float).transpose() + self.fit_then_transform(expected, sparse1, + minimum_fraction=0.5) + self.fit_then_transform_dense(expected, sparse1, + minimum_fraction=0.5) + + def test_sparse1_with_non_sparse_components(self): + self.fit_then_transform(sparse1_paratial_1h.todense(), sparse1, + categorical_features=[True, False]) + # This test does not apply here. The sparse matrix will be cut into a + # continouos and a categorical part, after one hot encoding only the + # categorical part is an array, the continuous part will still be a + # sparse matrix. Therefore, the OHE will only return a sparse matrix + #self.fit_then_transform_dense(sparse1_paratial_1h.todense(), sparse1, + # categorical_features=[True, False]) + + def test_sparse2(self): + self.fit_then_transform(sparse2_1h.todense(), sparse2) + self.fit_then_transform_dense(sparse2_1h.todense(), sparse2) + + def test_sparse2_minimum_fraction(self): + expected = np.array([[0, 1, 0, 0, 1, 1], + [0, 0, 1, 1, 0, 1]], dtype=float).transpose() + self.fit_then_transform(expected, sparse2, + minimum_fraction=0.5) + self.fit_then_transform_dense(expected, sparse2, + minimum_fraction=0.5) + + def test_sparse2_csr(self): + self.fit_then_transform(sparse2_csr_1h.todense(), sparse2_csr) + self.fit_then_transform_dense(sparse2_csr_1h.todense(), sparse2_csr) + + def test_sparse_on_dense2_minimum_fraction(self): + sparse = scipy.sparse.csr_matrix(dense2) + self.fit_then_transform(dense2_1h_minimum_fraction_as_sparse, sparse, + minimum_fraction=0.5) + self.fit_then_transform_dense(dense2_1h_minimum_fraction_as_sparse, sparse, + minimum_fraction=0.5) + + def fit_then_transform(self, expected, input, categorical_features='all', + minimum_fraction=None): + # Test fit_transform + ohe = OneHotEncoder(categorical_features=categorical_features, + minimum_fraction=minimum_fraction) + transformation = ohe.fit_transform(input.copy()) + self.assertIsInstance(transformation, scipy.sparse.csr_matrix) + assert_array_almost_equal(expected.astype(float), + transformation.todense()) + + # Test fit, and afterwards transform + ohe2 = OneHotEncoder(categorical_features=categorical_features, + minimum_fraction=minimum_fraction) + ohe2.fit(input.copy()) + transformation = ohe2.transform(input.copy()) + self.assertIsInstance(transformation, scipy.sparse.csr_matrix) + assert_array_almost_equal(expected, transformation.todense()) + + def fit_then_transform_dense(self, expected, input, + categorical_features='all', + minimum_fraction=None): + ohe = OneHotEncoder(categorical_features=categorical_features, + sparse=False, minimum_fraction=minimum_fraction) + transformation = ohe.fit_transform(input.copy()) + self.assertIsInstance(transformation, np.ndarray) + assert_array_almost_equal(expected, transformation) + + ohe2 = OneHotEncoder(categorical_features=categorical_features, + sparse=False, minimum_fraction=minimum_fraction) + ohe2.fit(input.copy()) + transformation = ohe2.transform(input.copy()) + self.assertIsInstance(transformation, np.ndarray) + assert_array_almost_equal(expected, transformation) + + def test_transform_with_unknown_value(self): + input = np.array(((0, 1, 2, 3, 4, 5), (0, 1, 2, 3, 4, 5))).transpose() + ohe = OneHotEncoder() + ohe.fit(input) + test_data = np.array(((0, 1, 2, 6), (0, 1, 6, 7))).transpose() + output = ohe.transform(test_data).todense() + self.assertEqual(5, np.sum(output)) + + input = np.array(((0, 1, 2, 3, 4, 5), (0, 1, 2, 3, 4, 5))).transpose() + ips = scipy.sparse.csr_matrix(input) + ohe = OneHotEncoder() + ohe.fit(ips) + test_data = np.array(((0, 1, 2, 6), (0, 1, 6, 7))).transpose() + tds = scipy.sparse.csr_matrix(test_data) + output = ohe.transform(tds).todense() + self.assertEqual(3, np.sum(output)) + + diff --git a/test/test_pipeline/implementations/test_ProjLogit.py b/test/test_pipeline/implementations/test_ProjLogit.py new file mode 100644 index 0000000000..626a95636a --- /dev/null +++ b/test/test_pipeline/implementations/test_ProjLogit.py @@ -0,0 +1,39 @@ +import unittest +import os +import numpy as np +#import scipy.io + +from autosklearn.pipeline.implementations.ProjLogit import ProjLogit + + +class TestProjLogit(unittest.TestCase): + def test_sparse_filtering(self): + """Test logistic regression implementation based on least squares""" + + # simple test that should work out + trainx = np.random.rand(100,3) + trainy = np.zeros(10000) + testx = np.random.rand(100,3) + testy = np.zeros(100) + for i in range(100): + if trainx[i, 2] > 0.5: + trainy[i] = 1 + for i in range(100): + if testx[i, 2] > 0.5: + testy[i] = 1 + + model = ProjLogit(max_epochs = 10, verbose = True) + model.fit(trainx, trainy) + print("weights 0:") + print(model.w0) + predicted_prob = model.predict_proba(testx) + predicted2 = np.argmax(predicted_prob, axis = 1) + predicted = model.predict(testx) + + #print(predicted) + #print(testy) + #print((predicted != testy).sum()) + #print((predicted2 != testy).sum()) + self.assertTrue((predicted == predicted2).all()) + self.assertTrue(((1 - predicted_prob.sum(axis=1)) < 1e-3).all()) + self.assertTrue((predicted != testy).sum() < 20) diff --git a/test/test_pipeline/implementations/test_imputation.py b/test/test_pipeline/implementations/test_imputation.py new file mode 100644 index 0000000000..29119ca48b --- /dev/null +++ b/test/test_pipeline/implementations/test_imputation.py @@ -0,0 +1,363 @@ +import unittest + +import numpy as np +from scipy import sparse + +from sklearn.utils.testing import assert_equal +from sklearn.utils.testing import assert_array_equal +from sklearn.utils.testing import assert_raises +from sklearn.utils.testing import assert_false +from sklearn.utils.testing import assert_true + +from autosklearn.pipeline.implementations.Imputation import Imputer +from sklearn.pipeline import Pipeline +from sklearn import grid_search +from sklearn import tree +from sklearn.random_projection import sparse_random_matrix + + +def safe_median(arr, *args, **kwargs): + # np.median([]) raises a TypeError for numpy >= 1.10.1 + length = arr.size if hasattr(arr, 'size') else len(arr) + return np.nan if length == 0 else np.median(arr, *args, **kwargs) + + +def safe_mean(arr, *args, **kwargs): + # np.mean([]) raises a RuntimeWarning for numpy >= 1.10.1 + length = arr.size if hasattr(arr, 'size') else len(arr) + return np.nan if length == 0 else np.mean(arr, *args, **kwargs) + + +class ImputationTest(unittest.TestCase): + def _check_statistics(self, X, X_true, + strategy, statistics, missing_values): + """Utility function for testing imputation for a given strategy. + + Test: + - along the two axes + - with dense and sparse arrays + + Check that: + - the statistics (mean, median, mode) are correct + - the missing values are imputed correctly""" + + err_msg = "Parameters: strategy = %s, missing_values = %s, " \ + "axis = {0}, sparse = {1}" % (strategy, missing_values) + + # Normal matrix, axis = 0 + imputer = Imputer(missing_values, strategy=strategy, axis=0) + X_trans = imputer.fit(X).transform(X.copy()) + assert_array_equal(imputer.statistics_, statistics, + err_msg.format(0, False)) + assert_array_equal(X_trans, X_true, err_msg.format(0, False)) + + # Normal matrix, axis = 1 + imputer = Imputer(missing_values, strategy=strategy, axis=1) + imputer.fit(X.transpose()) + if np.isnan(statistics).any(): + assert_raises(ValueError, imputer.transform, X.copy().transpose()) + else: + X_trans = imputer.transform(X.copy().transpose()) + assert_array_equal(X_trans, X_true.transpose(), + err_msg.format(1, False)) + + # Sparse matrix, axis = 0 + imputer = Imputer(missing_values, strategy=strategy, axis=0) + imputer.fit(sparse.csc_matrix(X)) + X_trans = imputer.transform(sparse.csc_matrix(X.copy())) + + if sparse.issparse(X_trans): + X_trans = X_trans.toarray() + + assert_array_equal(imputer.statistics_, statistics, + err_msg.format(0, True)) + assert_array_equal(X_trans, X_true, err_msg.format(0, True)) + + # Sparse matrix, axis = 1 + imputer = Imputer(missing_values, strategy=strategy, axis=1) + imputer.fit(sparse.csc_matrix(X.transpose())) + if np.isnan(statistics).any(): + assert_raises(ValueError, imputer.transform, + sparse.csc_matrix(X.copy().transpose())) + else: + X_trans = imputer.transform(sparse.csc_matrix(X.copy().transpose())) + + if sparse.issparse(X_trans): + X_trans = X_trans.toarray() + + assert_array_equal(X_trans, X_true.transpose(), + err_msg.format(1, True)) + + + def test_imputation_shape(self): + """Verify the shapes of the imputed matrix for different strategies.""" + X = np.random.randn(10, 2) + X[::2] = np.nan + + for strategy in ['mean', 'median', 'most_frequent']: + imputer = Imputer(strategy=strategy) + X_imputed = imputer.fit_transform(X) + assert_equal(X_imputed.shape, (10, 2)) + X_imputed = imputer.fit_transform(sparse.csr_matrix(X)) + assert_equal(X_imputed.shape, (10, 2)) + + + def test_imputation_mean_median_only_zero(self): + """Test imputation using the mean and median strategies, when + missing_values == 0.""" + X = np.array([ + [np.nan, 0, 0, 0, 5], + [np.nan, 1, 0, np.nan, 3], + [np.nan, 2, 0, 0, 0], + [np.nan, 6, 0, 5, 13], + ]) + + X_imputed_mean = np.array([ + [3, 5], + [1, 3], + [2, 7], + [6, 13], + ]) + statistics_mean = [np.nan, 3, np.nan, np.nan, 7] + + # Behaviour of median with NaN is undefined, e.g. different results in + # np.median and np.ma.median + X_for_median = X[:, [0, 1, 2, 4]] + X_imputed_median = np.array([ + [2, 5], + [1, 3], + [2, 5], + [6, 13], + ]) + statistics_median = [np.nan, 2, np.nan, 5] + + self._check_statistics(X, X_imputed_mean, "mean", statistics_mean, 0) + self._check_statistics(X_for_median, X_imputed_median, "median", + statistics_median, 0) + + + def test_imputation_mean_median(self): + """Test imputation using the mean and median strategies, when + missing_values != 0.""" + rng = np.random.RandomState(0) + + dim = 10 + dec = 10 + shape = (dim * dim, dim + dec) + + zeros = np.zeros(shape[0]) + values = np.arange(1, shape[0] + 1) + values[4::2] = - values[4::2] + + tests = [("mean", "NaN", lambda z, v, p: safe_mean(np.hstack((z, v)))), + ("mean", 0, lambda z, v, p: np.mean(v)), + ("median", "NaN", lambda z, v, p: safe_median(np.hstack((z, v)))), + ("median", 0, lambda z, v, p: np.median(v))] + + for strategy, test_missing_values, true_value_fun in tests: + X = np.empty(shape) + X_true = np.empty(shape) + true_statistics = np.empty(shape[1]) + + # Create a matrix X with columns + # - with only zeros, + # - with only missing values + # - with zeros, missing values and values + # And a matrix X_true containing all true values + for j in range(shape[1]): + nb_zeros = (j - dec + 1 > 0) * (j - dec + 1) * (j - dec + 1) + nb_missing_values = max(shape[0] + dec * dec + - (j + dec) * (j + dec), 0) + nb_values = shape[0] - nb_zeros - nb_missing_values + + z = zeros[:nb_zeros] + p = np.repeat(test_missing_values, nb_missing_values) + v = values[rng.permutation(len(values))[:nb_values]] + true_statistics[j] = true_value_fun(z, v, p) + + # Create the columns + X[:, j] = np.hstack((v, z, p)) + + if 0 == test_missing_values: + X_true[:, j] = np.hstack((v, + np.repeat( + true_statistics[j], + nb_missing_values + nb_zeros))) + else: + X_true[:, j] = np.hstack((v, + z, + np.repeat(true_statistics[j], + nb_missing_values))) + + # Shuffle them the same way + np.random.RandomState(j).shuffle(X[:, j]) + np.random.RandomState(j).shuffle(X_true[:, j]) + + # Mean doesn't support columns containing NaNs, median does + if strategy == "median": + cols_to_keep = ~np.isnan(X_true).any(axis=0) + else: + cols_to_keep = ~np.isnan(X_true).all(axis=0) + + X_true = X_true[:, cols_to_keep] + + self._check_statistics(X, X_true, strategy, + true_statistics, test_missing_values) + + + def test_imputation_median_special_cases(self): + """Test median imputation with sparse boundary cases + """ + X = np.array([ + [0, np.nan, np.nan], # odd: implicit zero + [5, np.nan, np.nan], # odd: explicit nonzero + [0, 0, np.nan], # even: average two zeros + [-5, 0, np.nan], # even: avg zero and neg + [0, 5, np.nan], # even: avg zero and pos + [4, 5, np.nan], # even: avg nonzeros + [-4, -5, np.nan], # even: avg negatives + [-1, 2, np.nan], # even: crossing neg and pos + ]).transpose() + + X_imputed_median = np.array([ + [0, 0, 0], + [5, 5, 5], + [0, 0, 0], + [-5, 0, -2.5], + [0, 5, 2.5], + [4, 5, 4.5], + [-4, -5, -4.5], + [-1, 2, .5], + ]).transpose() + statistics_median = [0, 5, 0, -2.5, 2.5, 4.5, -4.5, .5] + + self._check_statistics(X, X_imputed_median, "median", + statistics_median, 'NaN') + + + def test_imputation_most_frequent(self): + """Test imputation using the most-frequent strategy.""" + X = np.array([ + [-1, -1, 0, 5], + [-1, 2, -1, 3], + [-1, 1, 3, -1], + [-1, 2, 3, 7], + ]) + + X_true = np.array([ + [2, 0, 5], + [2, 3, 3], + [1, 3, 3], + [2, 3, 7], + ]) + + # scipy.stats.mode, used in Imputer, doesn't return the first most + # frequent as promised in the doc but the lowest most frequent. When this + # test will fail after an update of scipy, Imputer will need to be updated + # to be consistent with the new (correct) behaviour + self._check_statistics(X, X_true, "most_frequent", [np.nan, 2, 3, 3], + -1) + + + def test_imputation_pipeline_grid_search(self): + """Test imputation within a pipeline + gridsearch.""" + pipeline = Pipeline([('imputer', Imputer(missing_values=0)), + ('tree', tree.DecisionTreeRegressor(random_state=0))]) + + parameters = { + 'imputer__strategy': ["mean", "median", "most_frequent"], + 'imputer__axis': [0, 1] + } + + l = 100 + X = sparse_random_matrix(l, l, density=0.10) + Y = sparse_random_matrix(l, 1, density=0.10).toarray() + gs = grid_search.GridSearchCV(pipeline, parameters) + gs.fit(X, Y) + + + def test_imputation_pickle(self): + """Test for pickling imputers.""" + import pickle + + l = 100 + X = sparse_random_matrix(l, l, density=0.10) + + for strategy in ["mean", "median", "most_frequent"]: + imputer = Imputer(missing_values=0, strategy=strategy) + imputer.fit(X) + + imputer_pickled = pickle.loads(pickle.dumps(imputer)) + + assert_array_equal(imputer.transform(X.copy()), + imputer_pickled.transform(X.copy()), + "Fail to transform the data after pickling " + "(strategy = %s)" % (strategy)) + + + def test_imputation_copy(self): + """Test imputation with copy""" + X_orig = sparse_random_matrix(5, 5, density=0.75, random_state=0) + + # copy=True, dense => copy + X = X_orig.copy().toarray() + imputer = Imputer(missing_values=0, strategy="mean", copy=True) + Xt = imputer.fit(X).transform(X) + Xt[0, 0] = -1 + assert_false(np.all(X == Xt)) + + # copy=True, sparse csr => copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", copy=True) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, dense => no copy + X = X_orig.copy().toarray() + imputer = Imputer(missing_values=0, strategy="mean", copy=False) + Xt = imputer.fit(X).transform(X) + Xt[0, 0] = -1 + assert_true(np.all(X == Xt)) + + # copy=False, sparse csr, axis=1 => no copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_true(np.all(X.data == Xt.data)) + + # copy=False, sparse csc, axis=0 => no copy + X = X_orig.copy().tocsc() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=0) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_true(np.all(X.data == Xt.data)) + + # copy=False, sparse csr, axis=0 => copy + X = X_orig.copy() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=0) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, sparse csc, axis=1 => copy + X = X_orig.copy().tocsc() + imputer = Imputer(missing_values=X.data[0], strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + Xt.data[0] = -1 + assert_false(np.all(X.data == Xt.data)) + + # copy=False, sparse csr, axis=1, missing_values=0 => copy + X = X_orig.copy() + imputer = Imputer(missing_values=0, strategy="mean", + copy=False, axis=1) + Xt = imputer.fit(X).transform(X) + assert_false(sparse.issparse(Xt)) + + # Note: If X is sparse and if missing_values=0, then a (dense) copy of X is + # made, even if copy=False. diff --git a/test/test_pipeline/implementations/test_minmaxscaler.py b/test/test_pipeline/implementations/test_minmaxscaler.py new file mode 100644 index 0000000000..d22e3e2ec3 --- /dev/null +++ b/test/test_pipeline/implementations/test_minmaxscaler.py @@ -0,0 +1,125 @@ +import unittest + +import numpy as np +from scipy import sparse +from sklearn.utils.testing import assert_array_almost_equal +from sklearn.datasets import load_iris + +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.implementations.MinMaxScaler import MinMaxScaler + + +class MinMaxScalerTest(unittest.TestCase): + def test_min_max_scaler_iris(self): + iris = load_iris() + X = iris.data + + scaler = MinMaxScaler() + # default params + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), 0) + assert_array_almost_equal(X_trans.min(axis=0), 0) + assert_array_almost_equal(X_trans.max(axis=0), 1) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # not default params: min=1, max=2 + scaler = MinMaxScaler(feature_range=(1, 2)) + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), 1) + assert_array_almost_equal(X_trans.max(axis=0), 2) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # min=-.5, max=.6 + scaler = MinMaxScaler(feature_range=(-.5, .6)) + X_trans = scaler.fit_transform(X) + assert_array_almost_equal(X_trans.min(axis=0), -.5) + assert_array_almost_equal(X_trans.max(axis=0), .6) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + # raises on invalid range + scaler = MinMaxScaler(feature_range=(2, 1)) + self.assertRaises(ValueError, scaler.fit, X) + + def test_min_max_scaler_zero_variance_features(self): + """Check min max scaler on toy data with zero variance features""" + X = [[0., 1., +0.5], + [0., 1., -0.1], + [0., 1., +1.1]] + + X_new = [[+0., 2., 0.5], + [-1., 1., 0.0], + [+0., 1., 1.5]] + # default params + scaler = MinMaxScaler() + X_trans = scaler.fit_transform(X) + X_expected_0_1 = [[0., 0., 0.5], + [0., 0., 0.0], + [0., 0., 1.0]] + assert_array_almost_equal(X_trans, X_expected_0_1) + X_trans_inv = scaler.inverse_transform(X_trans) + assert_array_almost_equal(X, X_trans_inv) + + X_trans_new = scaler.transform(X_new) + X_expected_0_1_new = [[+0., 1., 0.500], + [-1., 0., 0.083], + [+0., 0., 1.333]] + assert_array_almost_equal(X_trans_new, X_expected_0_1_new, decimal=2) + + # not default params + scaler = MinMaxScaler(feature_range=(1, 2)) + X_trans = scaler.fit_transform(X) + X_expected_1_2 = [[1., 1., 1.5], + [1., 1., 1.0], + [1., 1., 2.0]] + assert_array_almost_equal(X_trans, X_expected_1_2) + + + @unittest.skip("I don't understand the original unit test. Thus, I forbid " + "1d input to the scaling function.") + def test_min_max_scaler_1d(self): + """Test scaling of dataset along single axis""" + rng = np.random.RandomState(0) + X = rng.randn(5) + X_orig_copy = X.copy() + + scaler = MinMaxScaler() + X_scaled = scaler.fit(X).transform(X) + assert_array_almost_equal(X_scaled.min(axis=0), 0.0) + assert_array_almost_equal(X_scaled.max(axis=0), 1.0) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + assert_array_almost_equal(X_scaled_back, X_orig_copy) + + # Test with 1D list + X = [0., 1., 2, 0.4, 1.] + scaler = MinMaxScaler() + X_scaled = scaler.fit(X).transform(X) + assert_array_almost_equal(X_scaled.min(axis=0), 0.0) + assert_array_almost_equal(X_scaled.max(axis=0), 1.0) + + def test_min_max_scaler_sparse_boston_data(self): + # Use the boston housing dataset, because column three is 1HotEncoded! + # This is important to test; because the normal sklearn rescaler + # would set all values of the 1Hot Encoded column to zero, while we + # keep the values at 1. + X_train, Y_train, X_test, Y_test = get_dataset('boston', + make_sparse=True) + num_data_points = len(X_train.data) + expected_max_values = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] + expected_max_values = np.array(expected_max_values).reshape((1, -1)) + + scaler = MinMaxScaler() + scaler.fit(X_train, Y_train) + transformation = scaler.transform(X_train) + + assert_array_almost_equal(np.array(transformation.todense().min(axis=0)), + np.zeros((1, 13))) + assert_array_almost_equal(np.array(transformation.todense().max(axis=0)), + expected_max_values) + # Test that the matrix is still sparse + self.assertTrue(sparse.issparse(transformation)) + self.assertEqual(num_data_points, len(transformation.data)) \ No newline at end of file diff --git a/test/test_pipeline/implementations/test_standard_scaler.py b/test/test_pipeline/implementations/test_standard_scaler.py new file mode 100644 index 0000000000..044f3edf8a --- /dev/null +++ b/test/test_pipeline/implementations/test_standard_scaler.py @@ -0,0 +1,158 @@ +from itertools import chain +import unittest + +import numpy as np +import scipy.sparse +from sklearn.utils.testing import assert_array_almost_equal +from sklearn.preprocessing.data import scale + +from autosklearn.pipeline.implementations.StandardScaler import StandardScaler +from autosklearn.pipeline.util import get_dataset + +matrix1 = [[0, 1, 2], + [0, 1, 2], + [0, 1, 2]] + + +class TestStandardScaler(unittest.TestCase): + def test_scaler_1d(self): + """Test scaling of dataset along single axis""" + rng = np.random.RandomState(0) + X = rng.randn(5) + X_orig_copy = X.copy() + + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + assert_array_almost_equal(X_scaled_back, X_orig_copy) + + # Test with 1D list + X = [0., 1., 2, 0.4, 1.] + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + X_scaled = scale(X) + assert_array_almost_equal(X_scaled.mean(axis=0), 0.0) + assert_array_almost_equal(X_scaled.std(axis=0), 1.0) + + # Test with sparse list + X = scipy.sparse.coo_matrix((np.random.random((10,)), + ([i**2 for i in range(10)], + [0 for i in range(10)]))) + X = X.tocsr() + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + + self.assertFalse(np.any(np.isnan(X_scaled.data))) + self.assertAlmostEqual(X_scaled.mean(), 0) + self.assertAlmostEqual(np.sqrt(X_scaled.data.var()), 1) + + # Check that X has not been copied + # self.assertTrue(X_scaled is X) + # Check that the matrix is still sparse + self.assertEqual(len(X.indices), 10) + + def test_scaler_2d_arrays(self): + """Test scaling of 2d array along first axis""" + rng = np.random.RandomState(0) + X = rng.randn(4, 5) + X[:, 0] = 0.0 # first feature is always of zero + + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has been copied + self.assertTrue(X_scaled is not X) + + # check inverse transform + X_scaled_back = scaler.inverse_transform(X_scaled) + self.assertTrue(X_scaled_back is not X) + self.assertTrue(X_scaled_back is not X_scaled) + assert_array_almost_equal(X_scaled_back, X) + + X_scaled = scale(X, axis=1, with_std=False) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=1), 4 * [0.0]) + X_scaled = scale(X, axis=1, with_std=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=1), 4 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=1), 4 * [1.0]) + # Check that the data hasn't been modified + self.assertTrue(X_scaled is not X) + + X_scaled = scaler.fit(X).transform(X, copy=False) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has not been copied + self.assertTrue(X_scaled is X) + + X = rng.randn(4, 5) + X[:, 0] = 1.0 # first feature is a constant, non zero feature + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=True) + self.assertFalse(np.any(np.isnan(X_scaled))) + assert_array_almost_equal(X_scaled.mean(axis=0), 5 * [0.0]) + assert_array_almost_equal(X_scaled.std(axis=0), [0., 1., 1., 1., 1.]) + # Check that X has not been copied + self.assertTrue(X_scaled is not X) + + # Same thing for sparse matrices... + X = scipy.sparse.coo_matrix((np.random.random((12,)), + ([i for i in range(12)], + [int(i / 3) for i in range(12)]))) + X = X.tocsr() + scaler = StandardScaler() + X_scaled = scaler.fit(X).transform(X, copy=False) + + self.assertFalse(np.any(np.isnan(X_scaled.data))) + assert_array_almost_equal( + [X_scaled.data[X_scaled.indptr[i]:X_scaled.indptr[i + 1]].mean() + for i in range(X_scaled.shape[1])], + np.zeros((4, ), dtype=np.float64)) + assert_array_almost_equal(np.sqrt([ + X_scaled.data[X_scaled.indptr[i]:X_scaled.indptr[i + 1]].var() + for i in range(X_scaled.shape[1])]), + np.ones((4, ), dtype=np.float64)) + + # Because we change the sparse format to csc, we cannot assert that + # the matrix did not change! + # self.assertTrue(X_scaled is X) + # Check that the matrix is still sparse + self.assertEqual(len(X.indices), 12) + + # TODO add more tests from scikit-learn here: + # https://github.com/scikit-learn/scikit-learn/blob/0.15.X/sklearn/preprocessing/tests/test_data.py + + def test_standard_scaler_sparse_boston_data(self): + X_train, Y_train, X_test, Y_test = get_dataset('boston', + make_sparse=True) + num_data_points = len(X_train.data) + + scaler = StandardScaler() + scaler.fit(X_train, Y_train) + tr = scaler.transform(X_train) + + # Test this for every single dimension! + means = np.array([tr.data[tr.indptr[i]:tr.indptr[i + 1]].mean() + for i in range(13)]) + vars = np.array([tr.data[tr.indptr[i]:tr.indptr[i + 1]].var() + for i in range(13)]) + + for i in chain(range(1, 3), range(4, 13)): + self.assertAlmostEqual(means[i], 0, 2) + self.assertAlmostEqual(vars[i], 1, 2) + self.assertAlmostEqual(means[3], 1) + self.assertAlmostEqual(vars[3], 0) + # Test that the matrix is still sparse + self.assertTrue(scipy.sparse.issparse(tr)) + self.assertEqual(num_data_points, len(tr.data)) diff --git a/test/test_pipeline/implementations/test_util.py b/test/test_pipeline/implementations/test_util.py new file mode 100644 index 0000000000..cc7a1fc714 --- /dev/null +++ b/test/test_pipeline/implementations/test_util.py @@ -0,0 +1,42 @@ +import unittest + +import numpy as np +from sklearn.utils.testing import assert_array_almost_equal + +from autosklearn.pipeline.implementations.util import softmax + +class UtilTest(unittest.TestCase): + def test_softmax_binary(self): + df = np.array([-40.00643897, 34.69754581, 23.71181359 -29.89724287, + 27.06071791, -37.78334103, -40.15812461, 40.16139229, + -27.85887801, 42.67404756, -36.89753589 -36.45148009, + 54.68976306, 19.47886562, -49.99821027, -35.70205302, + -40.59639267, 32.96343916, -39.23777841, -37.86535019, + -33.10196906, 26.84144377, -36.8569686]) + probas = softmax(df) + expected = [[1., 0.], [0., 1.], [0.99794501, 0.00205499], + [0., 1.], [1., 0.], [1., 0.], [0., 1.], + [1., 0.], [0., 1.], [1., 0.], [0., 1.], + [0., 1.], [1., 0.], [1., 0.], [1., 0.], + [0., 1.], [1., 0.], [1., 0.], [1., 0.], + [0., 1.], [1., 0.]] + assert_array_almost_equal(expected, probas) + + def test_softmax(self): + df = np.array([[2.75021367e+10, -8.83772371e-01, -2.20516715e+27], + [-2.10848072e+11, 2.35024444e-01, 5.20106536e+25]]) + # With a numerically unstable softmax, the output would be something + # like this: + # [[ 0. 0. nan] + # [nan 0. 0.]] + probas = softmax(df) + expected = np.array([[1, 0, 0], [0, 0, 1]]) + self.assertTrue((expected == probas).all()) + + df = np.array([[0.1, 0.6, 0.3], [0.2, 0.3, 0.5]]) + probas = softmax(df) + expected = np.array([[0.25838965, 0.42601251, 0.31559783], + [0.28943311, 0.31987306, 0.39069383]]) + assert_array_almost_equal(expected, probas) + + diff --git a/test/test_pipeline/test_base.py b/test/test_pipeline/test_base.py new file mode 100644 index 0000000000..bc9663dcf1 --- /dev/null +++ b/test/test_pipeline/test_base.py @@ -0,0 +1,92 @@ +import unittest + +import HPOlibConfigSpace.configuration_space + +import autosklearn.pipeline.base +import autosklearn.pipeline.components.feature_preprocessing +import autosklearn.pipeline.components.classification + +class BaseTest(unittest.TestCase): + def test_get_hyperparameter_configuration_space_3choices(self): + base = autosklearn.pipeline.base.BasePipeline + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'target_type': 'classification'} + exclude = {} + include = {} + pipeline = [('p0', autosklearn.pipeline.components.feature_preprocessing._preprocessors[ + 'preprocessor']), + ('p1', autosklearn.pipeline.components.feature_preprocessing._preprocessors[ + 'preprocessor']), + ('c', autosklearn.pipeline.components.classification._classifiers[ + 'classifier'])] + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), 16) + + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print clause + self.assertEqual(151, len(cs.forbidden_clauses)) + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'target_type': 'classification', 'signed': True} + include = {'c': ['multinomial_nb']} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 10) + self.assertEqual(len(cs.get_hyperparameter("c:__choice__").choices), + 1) + # Mostly combinations of p0 making the data unsigned and p1 not + # changing the values of the data points + self.assertEqual(74, len(cs.forbidden_clauses)) + + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'target_type': 'classification', 'signed': True} + include = {} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 14) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + self.assertEqual(len(cs.get_hyperparameter("c:__choice__").choices), + 16) + self.assertEqual(126, len(cs.forbidden_clauses)) + + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'target_type': 'classification', 'sparse': True} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 11) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + self.assertEqual(409, len(cs.forbidden_clauses)) + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print(clause) + + cs = HPOlibConfigSpace.configuration_space.ConfigurationSpace() + dataset_properties = {'target_type': 'classification', + 'sparse': True, 'signed': True} + cs = base._get_hyperparameter_search_space(cs, dataset_properties, + exclude, include, pipeline) + + #for clause in sorted([str(clause) for clause in cs.forbidden_clauses]): + # print(clause) + + self.assertEqual(len(cs.get_hyperparameter("p0:__choice__").choices), + 11) + self.assertEqual(len(cs.get_hyperparameter("p1:__choice__").choices), + 16) + # Data is guaranteed to be positive in cases like densifier, + # extra_trees_preproc, multinomial_nb -> less constraints + self.assertEqual(364, len(cs.forbidden_clauses)) + + diff --git a/test/test_pipeline/test_classification.py b/test/test_pipeline/test_classification.py new file mode 100644 index 0000000000..926198d2df --- /dev/null +++ b/test/test_pipeline/test_classification.py @@ -0,0 +1,685 @@ +import os +import resource +import sys +import traceback +import unittest + +import mock +import numpy as np +import sklearn.datasets +import sklearn.decomposition +import sklearn.cross_validation +import sklearn.ensemble +import sklearn.svm +from sklearn.utils.testing import assert_array_almost_equal + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace, \ + Configuration +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from autosklearn.pipeline.classification import SimpleClassificationPipeline +from autosklearn.pipeline.components.base import \ + AutoSklearnClassificationAlgorithm, AutoSklearnPreprocessingAlgorithm +import autosklearn.pipeline.components.classification as classification_components +import autosklearn.pipeline.components.feature_preprocessing as preprocessing_components +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.constants import * + + +class SimpleClassificationPipelineTest(unittest.TestCase): + def test_io_dict(self): + classifiers = classification_components._classifiers + for c in classifiers: + if classifiers[c] == classification_components.ClassifierChoice: + continue + props = classifiers[c].get_properties() + self.assertIn('input', props) + self.assertIn('output', props) + inp = props['input'] + output = props['output'] + + self.assertIsInstance(inp, tuple) + self.assertIsInstance(output, tuple) + for i in inp: + self.assertIn(i, (SPARSE, DENSE, SIGNED_DATA, UNSIGNED_DATA)) + self.assertEqual(output, (PREDICTIONS,)) + self.assertIn('handles_regression', props) + self.assertFalse(props['handles_regression']) + self.assertIn('handles_classification', props) + self.assertIn('handles_multiclass', props) + self.assertIn('handles_multilabel', props) + + def test_find_classifiers(self): + classifiers = classification_components._classifiers + self.assertGreaterEqual(len(classifiers), 2) + for key in classifiers: + if hasattr(classifiers[key], 'get_components'): + continue + self.assertIn(AutoSklearnClassificationAlgorithm, + classifiers[key].__bases__) + + def test_find_preprocessors(self): + preprocessors = preprocessing_components._preprocessors + self.assertGreaterEqual(len(preprocessors), 1) + for key in preprocessors: + if hasattr(preprocessors[key], 'get_components'): + continue + self.assertIn(AutoSklearnPreprocessingAlgorithm, + preprocessors[key].__bases__) + + def test_default_configuration(self): + for i in range(2): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='iris') + auto = SimpleClassificationPipeline(default) + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(X_test) + self.assertAlmostEqual(0.9599999999999995, + sklearn.metrics.accuracy_score(predictions, Y_test)) + scores = auto.predict_proba(X_test) + + def test_repr(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + representation = repr(SimpleClassificationPipeline(default)) + cls = eval(representation) + self.assertIsInstance(cls, SimpleClassificationPipeline) + + def test_multilabel(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + dataset_properties = {'multilabel': True} + cs = SimpleClassificationPipeline.get_hyperparameter_search_space(dataset_properties=dataset_properties) + + print(cs) + cs.seed(5) + + for i in range(50): + X, Y = sklearn.datasets.\ + make_multilabel_classification(n_samples=150, + n_features=20, + n_classes=5, + n_labels=2, + length=50, + allow_unlabeled=True, + sparse=False, + return_indicator=True, + return_distributions=False, + random_state=1) + X_train = X[:100, :] + Y_train = Y[:100, :] + X_test = X[101:, :] + Y_test = Y[101:, ] + + config = cs.sample_configuration() + config._populate_values() + + if 'classifier:passive_aggressive:n_iter' in config: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if 'classifier:sgd:n_iter' in config: + config._values['classifier:sgd:n_iter'] = 5 + + cls = SimpleClassificationPipeline(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabilities = cls.predict_proba(X_test_) + [self.assertIsInstance(i, np.ndarray) for i in predicted_probabilities] + except np.linalg.LinAlgError: + continue + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + + def test_configurations(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + + print(cs) + cs.seed(1) + + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls = SimpleClassificationPipeline(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict_proba(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + + def test_configurations_signed_data(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'signed': True}) + + print(cs) + + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls = SimpleClassificationPipeline(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict_proba(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + + def test_configurations_sparse(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + print(cs) + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + print(config) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + cls = SimpleClassificationPipeline(config, random_state=1) + try: + cls.fit(X_train, Y_train) + predictions = cls.predict(X_test) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + traceback.print_tb(sys.exc_info()[2]) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + raise e + + def test_configurations_categorical_data(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + print(cs) + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['classifier:passive_aggressive:n_iter'] is not None: + config._values['classifier:passive_aggressive:n_iter'] = 5 + if config['classifier:sgd:n_iter'] is not None: + config._values['classifier:sgd:n_iter'] = 5 + + print(config) + categorical = [True, True, True, False, False, True, True, True, + False, True, True, True, True, True, True, True, + True, True, True, True, True, True, True, True, True, + True, True, True, True, True, True, True, False, + False, False, True, True, True] + this_directory = os.path.dirname(__file__) + X = np.loadtxt(os.path.join(this_directory, "components", + "data_preprocessing", "dataset.pkl")) + y = X[:, -1].copy() + X = X[:,:-1] + X_train, X_test, Y_train, Y_test = \ + sklearn.cross_validation.train_test_split(X, y) + + cls = SimpleClassificationPipeline(config, random_state=1,) + try: + cls.fit(X_train, Y_train, + init_params={'one_hot_encoding:categorical_features': categorical}) + predictions = cls.predict(X_test) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + traceback.print_tb(sys.exc_info()[2]) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + raise e + + def test_get_hyperparameter_search_space(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + self.assertIsInstance(cs, ConfigurationSpace) + conditions = cs.get_conditions() + + self.assertEqual(len(cs.get_hyperparameter( + 'rescaling:__choice__').choices), 4) + self.assertEqual(len(cs.get_hyperparameter( + 'classifier:__choice__').choices), 16) + self.assertEqual(len(cs.get_hyperparameter( + 'preprocessor:__choice__').choices), 14) + + hyperparameters = cs.get_hyperparameters() + self.assertEqual(144, len(hyperparameters)) + + #for hp in sorted([str(h) for h in hyperparameters]): + # print hp + + # The four parameters which are always active are classifier, + # preprocessor, imputation strategy and scaling strategy + self.assertEqual(len(hyperparameters) - 6, len(conditions)) + + def test_get_hyperparameter_search_space_include_exclude_models(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'classifier': ['libsvm_svc']}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__'), + CategoricalHyperparameter('classifier:__choice__', ['libsvm_svc'])) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + exclude={'classifier': ['libsvm_svc']}) + self.assertNotIn('libsvm_svc', str(cs)) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['select_percentile_classification']}) + self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), + CategoricalHyperparameter('preprocessor:__choice__', + ['select_percentile_classification'])) + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + exclude={'preprocessor': ['select_percentile_classification']}) + self.assertNotIn('select_percentile_classification', str(cs)) + + def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, + 'qda') + + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['nystroem_sampler']}) + self.assertEqual(cs.get_hyperparameter('classifier:__choice__').default, + 'sgd') + + def test_get_hyperparameter_search_space_only_forbidden_combinations(self): + self.assertRaisesRegexp(AssertionError, "No valid pipeline found.", + SimpleClassificationPipeline.get_hyperparameter_search_space, + include={'classifier': ['multinomial_nb'], + 'preprocessor': ['pca']}, + dataset_properties={'sparse':True}) + + # It must also be catched that no classifiers which can handle sparse + # data are located behind the densifier + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration.", + SimpleClassificationPipeline.get_hyperparameter_search_space, + include={'classifier': ['liblinear_svc'], + 'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + + @unittest.skip("Wait until HPOlibConfigSpace is fixed.") + def test_get_hyperparameter_search_space_dataset_properties(self): + cs_mc = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'multiclass': True}) + self.assertNotIn('bernoulli_nb', str(cs_mc)) + + cs_ml = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'multilabel': True}) + self.assertNotIn('k_nearest_neighbors', str(cs_ml)) + self.assertNotIn('liblinear', str(cs_ml)) + self.assertNotIn('libsvm_svc', str(cs_ml)) + self.assertNotIn('sgd', str(cs_ml)) + + cs_sp = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + self.assertIn('extra_trees', str(cs_sp)) + self.assertIn('gradient_boosting', str(cs_sp)) + self.assertIn('random_forest', str(cs_sp)) + + cs_mc_ml = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'multilabel': True, 'multiclass': True}) + self.assertEqual(cs_ml, cs_mc_ml) + + def test_predict_batched(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + cls = SimpleClassificationPipeline(default) + + # Multiclass + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647,), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647, 2), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + def test_predict_batched_sparse(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + config = Configuration(cs, + values={"balancing:strategy": "none", + "classifier:__choice__": "random_forest", + "imputation:strategy": "mean", + "one_hot_encoding:minimum_fraction": 0.01, + "one_hot_encoding:use_minimum_fraction": "True", + "preprocessor:__choice__": "no_preprocessing", + 'classifier:random_forest:bootstrap': 'True', + 'classifier:random_forest:criterion': 'gini', + 'classifier:random_forest:max_depth': 'None', + 'classifier:random_forest:min_samples_split': 2, + 'classifier:random_forest:min_samples_leaf': 2, + 'classifier:random_forest:max_features': 0.5, + 'classifier:random_forest:max_leaf_nodes': 'None', + 'classifier:random_forest:n_estimators': 100, + 'classifier:random_forest:min_weight_fraction_leaf': 0.0, + "rescaling:__choice__": "min/max"}) + cls = SimpleClassificationPipeline(config) + + # Multiclass + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647,), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((1647, 2), prediction.shape) + self.assertEqual(83, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + def test_predict_proba_batched(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + + # Multiclass + cls = SimpleClassificationPipeline(default) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + # The object behind the last step in the pipeline + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertEqual((1647, 10), prediction.shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + cls = SimpleClassificationPipeline(default) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits') + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertIsInstance(prediction, list) + self.assertEqual(2, len(prediction)) + self.assertEqual((1647, 10), prediction[0].shape) + self.assertEqual((1647, 10), prediction[1].shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + def test_predict_proba_batched_sparse(self): + cs = SimpleClassificationPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + + config = Configuration(cs, + values={"balancing:strategy": "none", + "classifier:__choice__": "random_forest", + "imputation:strategy": "mean", + "one_hot_encoding:minimum_fraction": 0.01, + "one_hot_encoding:use_minimum_fraction": 'True', + "preprocessor:__choice__": "no_preprocessing", + 'classifier:random_forest:bootstrap': 'True', + 'classifier:random_forest:criterion': 'gini', + 'classifier:random_forest:max_depth': 'None', + 'classifier:random_forest:min_samples_split': 2, + 'classifier:random_forest:min_samples_leaf': 2, + 'classifier:random_forest:min_weight_fraction_leaf': 0.0, + 'classifier:random_forest:max_features': 0.5, + 'classifier:random_forest:max_leaf_nodes': 'None', + 'classifier:random_forest:n_estimators': 100, + "rescaling:__choice__": "min/max"}) + + # Multiclass + cls = SimpleClassificationPipeline(config) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + # The object behind the last step in the pipeline + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertEqual((1647, 10), prediction.shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + # Multilabel + cls = SimpleClassificationPipeline(config) + X_train, Y_train, X_test, Y_test = get_dataset(dataset='digits', + make_sparse=True) + Y_train = np.array([(y, 26 - y) for y in Y_train]) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict_proba(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_.steps[-1][1]) + cls.pipeline_.steps[-1] = ("estimator", cls_predict) + prediction = cls.predict_proba(X_test, batch_size=20) + self.assertIsInstance(prediction, list) + self.assertEqual(2, len(prediction)) + self.assertEqual((1647, 10), prediction[0].shape) + self.assertEqual((1647, 10), prediction[1].shape) + self.assertEqual(84, cls_predict.predict_proba.call_count) + assert_array_almost_equal(prediction_, prediction) + + @unittest.skip("test_check_random_state Not yet Implemented") + def test_check_random_state(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_X Not yet Implemented") + def test_validate_input_X(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_Y Not yet Implemented") + def test_validate_input_Y(self): + raise NotImplementedError() + + def test_set_params(self): + pass + + def test_get_params(self): + pass diff --git a/test/test_pipeline/test_create_searchspace_util_classification.py b/test/test_pipeline/test_create_searchspace_util_classification.py new file mode 100644 index 0000000000..a93296ecec --- /dev/null +++ b/test/test_pipeline/test_create_searchspace_util_classification.py @@ -0,0 +1,136 @@ +from collections import OrderedDict + +import unittest +import numpy + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from autosklearn.pipeline.components.classification.liblinear_svc import LibLinear_SVC +from autosklearn.pipeline.components.classification.lda import LDA + +from autosklearn.pipeline.components.feature_preprocessing.pca import PCA +from autosklearn.pipeline.components.feature_preprocessing.truncatedSVD import TruncatedSVD +from autosklearn.pipeline.components.feature_preprocessing.no_preprocessing import NoPreprocessing +from autosklearn.pipeline.components.feature_preprocessing.fast_ica import FastICA +from autosklearn.pipeline.components.feature_preprocessing.random_trees_embedding import RandomTreesEmbedding +import autosklearn.pipeline.create_searchspace_util + +class TestCreateClassificationSearchspace(unittest.TestCase): + + def test_get_match_array_sparse_and_dense(self): + # preproc is empty + preprocessors = OrderedDict() + preprocessors['pca'] = PCA + classifiers = OrderedDict() + classifiers['lda'] = LDA + # Sparse + dense + class Preprocessors(object): + @classmethod + def get_available_components(self, *args, **kwargs): + return preprocessors + + class Classifiers(object): + @classmethod + def get_available_components(self, *args, **kwargs): + return classifiers + + # Dense + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': True}) + self.assertEqual(numpy.sum(m), 0) + + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, PCA), (1, LDA)), dataset_properties={'sparse': False}) + self.assertEqual(m, [[1]]) + + # Sparse + preprocessors['tSVD'] = TruncatedSVD + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, LDA)), + dataset_properties={'sparse': True}) + self.assertEqual(m[0], [0]) # pca + self.assertEqual(m[1], [1]) # svd + + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, LDA)), + dataset_properties={'sparse': False}) + self.assertEqual(m[0], [1]) # pca + self.assertEqual(m[1], [0]) # svd + + preprocessors['none'] = NoPreprocessing + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, LDA)), + dataset_properties={'sparse': True}) + self.assertEqual(m[0, :], [0]) # pca + self.assertEqual(m[1, :], [1]) # tsvd + self.assertEqual(m[2, :], [0]) # none + + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, LDA)), + dataset_properties={'sparse': False}) + self.assertEqual(m[0, :], [1]) # pca + self.assertEqual(m[1, :], [0]) # tsvd + self.assertEqual(m[2, :], [1]) # none + + classifiers['libsvm'] = LibLinear_SVC + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, Classifiers)), + dataset_properties={'sparse': False}) + self.assertListEqual(list(m[0, :]), [1, 1]) # pca + self.assertListEqual(list(m[1, :]), [0, 0]) # tsvd + self.assertListEqual(list(m[2, :]), [1, 1]) # none + + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, Classifiers)), + dataset_properties={'sparse': True}) + self.assertListEqual(list(m[0, :]), [0, 0]) # pca + self.assertListEqual(list(m[1, :]), [1, 1]) # tsvd + self.assertListEqual(list(m[2, :]), [0, 1]) # none + + # Do fancy 3d stuff + preprocessors['random_trees'] = RandomTreesEmbedding + m = autosklearn.pipeline.create_searchspace_util.get_match_array( + pipeline=((0, Preprocessors), (1, Preprocessors), (2, Classifiers)), + dataset_properties={'sparse': False}) + # PCA followed by truncated SVD is forbidden + self.assertEqual(list(m[0].flatten()), [1, 1, 0, 0, 1, 1, 0, 1]) + # Truncated SVD is forbidden + self.assertEqual(list(m[1].flatten()), [0, 0, 0, 0, 0, 0, 0, 0]) + # Truncated SVD is forbidden after no_preprocessing + self.assertEqual(list(m[2].flatten()), [1, 1, 0, 0, 1, 1, 0, 1]) + # PCA is forbidden, truncatedSVD allowed after random trees embedding + # lda only allowed after truncatedSVD + self.assertEqual(list(m[3].flatten()), [0, 0, 1, 1, 0, 1, 0, 1]) + + def test_get_match_array_signed_unsigned_and_binary(self): + pass + + @unittest.skip("Not currently working.") + def test_add_forbidden(self): + m = numpy.ones([2, 3]) + preprocessors_list = ['pa', 'pb'] + classifier_list = ['ca', 'cb', 'cc'] + cs = ConfigurationSpace() + preprocessor = CategoricalHyperparameter(name='preprocessor', + choices=preprocessors_list) + classifier = CategoricalHyperparameter(name='classifier', + choices=classifier_list) + cs.add_hyperparameter(preprocessor) + cs.add_hyperparameter(classifier) + new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( + conf_space=cs, node_0_list=preprocessors_list, + node_1_list=classifier_list, matches=m, + node_0_name='preprocessor', node_1_name="classifier") + self.assertEqual(len(new_cs.forbidden_clauses), 0) + self.assertIsInstance(new_cs, ConfigurationSpace) + + m[1, 1] = 0 + new_cs = autosklearn.pipeline.create_searchspace_util.add_forbidden( + conf_space=cs, node_0_list=preprocessors_list, + node_1_list=classifier_list, matches=m, + node_0_name='preprocessor', node_1_name="classifier") + self.assertEqual(len(new_cs.forbidden_clauses), 1) + self.assertEqual(new_cs.forbidden_clauses[0].components[0].value, 'cb') + self.assertEqual(new_cs.forbidden_clauses[0].components[1].value, 'pb') + self.assertIsInstance(new_cs, ConfigurationSpace) \ No newline at end of file diff --git a/test/test_pipeline/test_regression.py b/test/test_pipeline/test_regression.py new file mode 100644 index 0000000000..709191534b --- /dev/null +++ b/test/test_pipeline/test_regression.py @@ -0,0 +1,293 @@ +__author__ = 'eggenspk' + +import copy +import resource +import traceback +import unittest + +import mock +import numpy as np +import sklearn.datasets +import sklearn.decomposition +import sklearn.ensemble +import sklearn.svm +from sklearn.utils.testing import assert_array_almost_equal + +from HPOlibConfigSpace.configuration_space import ConfigurationSpace +from HPOlibConfigSpace.hyperparameters import CategoricalHyperparameter + +from autosklearn.pipeline.regression import SimpleRegressionPipeline +from autosklearn.pipeline.components.base import \ + AutoSklearnPreprocessingAlgorithm, AutoSklearnRegressionAlgorithm +import autosklearn.pipeline.components.regression as regression_components +import autosklearn.pipeline.components.feature_preprocessing as preprocessing_components +from autosklearn.pipeline.util import get_dataset +from autosklearn.pipeline.constants import * + + +class SimpleRegressionPipelineTest(unittest.TestCase): + + def test_io_dict(self): + regressors = regression_components._regressors + for r in regressors: + if regressors[r] == regression_components.RegressorChoice: + continue + props = regressors[r].get_properties() + self.assertIn('input', props) + self.assertIn('output', props) + inp = props['input'] + output = props['output'] + + self.assertIsInstance(inp, tuple) + self.assertIsInstance(output, tuple) + for i in inp: + self.assertIn(i, (SPARSE, DENSE, SIGNED_DATA, UNSIGNED_DATA)) + self.assertEqual(output, (PREDICTIONS,)) + self.assertIn('handles_regression', props) + self.assertTrue(props['handles_regression']) + self.assertIn('handles_classification', props) + self.assertIn('handles_multiclass', props) + self.assertIn('handles_multilabel', props) + self.assertFalse(props['handles_classification']) + self.assertFalse(props['handles_multiclass']) + self.assertFalse(props['handles_multilabel']) + + def test_find_regressors(self): + regressors = regression_components._regressors + self.assertGreaterEqual(len(regressors), 1) + for key in regressors: + if hasattr(regressors[key], 'get_components'): + continue + self.assertIn(AutoSklearnRegressionAlgorithm, + regressors[key].__bases__) + + def test_find_preprocessors(self): + preprocessors = preprocessing_components._preprocessors + self.assertGreaterEqual(len(preprocessors), 1) + for key in preprocessors: + if hasattr(preprocessors[key], 'get_components'): + continue + self.assertIn(AutoSklearnPreprocessingAlgorithm, + preprocessors[key].__bases__) + + def test_configurations(self): + # Use a limit of ~4GiB + limit = 4000 * 1024 * 1024 + resource.setrlimit(resource.RLIMIT_AS, (limit, limit)) + + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + + print(cs) + cs.seed(1) + + for i in range(10): + config = cs.sample_configuration() + config._populate_values() + if config['regressor:sgd:n_iter'] is not None: + config._values['regressor:sgd:n_iter'] = 5 + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') + cls = SimpleRegressionPipeline(config, random_state=1) + print(config) + try: + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + predictions = cls.predict(X_test) + self.assertIsInstance(predictions, np.ndarray) + predicted_probabiliets = cls.predict(X_test_) + self.assertIsInstance(predicted_probabiliets, np.ndarray) + except ValueError as e: + if "Floating-point under-/overflow occurred at epoch" in \ + e.args[0] or \ + "removed all features" in e.args[0] or \ + "all features are discarded" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except RuntimeWarning as e: + if "invalid value encountered in sqrt" in e.args[0]: + continue + elif "divide by zero encountered in" in e.args[0]: + continue + elif "invalid value encountered in divide" in e.args[0]: + continue + elif "invalid value encountered in true_divide" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except UserWarning as e: + if "FastICA did not converge" in e.args[0]: + continue + else: + print(config) + print(traceback.format_exc()) + raise e + except MemoryError as e: + continue + + def test_default_configuration(self): + for i in range(2): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + X_train, Y_train, X_test, Y_test = get_dataset(dataset='diabetes') + auto = SimpleRegressionPipeline(default) + auto = auto.fit(X_train, Y_train) + predictions = auto.predict(copy.deepcopy(X_test)) + # The lower the worse + r2_score = sklearn.metrics.r2_score(Y_test, predictions) + self.assertAlmostEqual(0.41626416529791199, r2_score) + model_score = auto.score(copy.deepcopy(X_test), Y_test) + self.assertEqual(model_score, r2_score) + + def test_repr(self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + representation = repr(SimpleRegressionPipeline(default)) + cls = eval(representation) + self.assertIsInstance(cls, SimpleRegressionPipeline) + + def test_get_hyperparameter_search_space(self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + self.assertIsInstance(cs, ConfigurationSpace) + conditions = cs.get_conditions() + hyperparameters = cs.get_hyperparameters() + self.assertEqual(114, len(hyperparameters)) + self.assertEqual(len(hyperparameters) - 5, len(conditions)) + + def test_get_hyperparameter_search_space_include_exclude_models(self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + include={'regressor': ['random_forest']}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__'), + CategoricalHyperparameter('regressor:__choice__', ['random_forest'])) + + # TODO add this test when more than one regressor is present + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + exclude={'regressor': ['random_forest']}) + self.assertNotIn('random_forest', str(cs)) + + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['pca']}) + self.assertEqual(cs.get_hyperparameter('preprocessor:__choice__'), + CategoricalHyperparameter('preprocessor:__choice__', ['pca'])) + + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + exclude={'preprocessor': ['no_preprocessing']}) + self.assertNotIn('no_preprocessing', str(cs)) + + def test_get_hyperparameter_search_space_preprocessor_contradicts_default_classifier( + self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, + 'gradient_boosting') + + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + include={'preprocessor': ['nystroem_sampler']}) + self.assertEqual(cs.get_hyperparameter('regressor:__choice__').default, + 'sgd') + + def test_get_hyperparameter_search_space_only_forbidden_combinations(self): + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration.", + SimpleRegressionPipeline.get_hyperparameter_search_space, + include={'regressor': ['random_forest'], + 'preprocessor': ['kitchen_sinks']}) + + # It must also be catched that no classifiers which can handle sparse + # data are located behind the densifier + self.assertRaisesRegexp(ValueError, "Cannot find a legal default " + "configuration", + SimpleRegressionPipeline.get_hyperparameter_search_space, + include={'regressor': ['ridge_regression'], + 'preprocessor': ['densifier']}, + dataset_properties={'sparse': True}) + + @unittest.skip("test_get_hyperparameter_search_space_dataset_properties" + + " Not yet Implemented") + def test_get_hyperparameter_search_space_dataset_properties(self): + # TODO: We do not have any dataset properties for regression, so this + # test is somewhat stupid + pass + """ + full_cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + cs_mc = SimpleRegressionPipeline.get_hyperparameter_search_space() + self.assertEqual(full_cs, cs_mc) + + cs_ml = SimpleRegressionPipeline.get_hyperparameter_search_space() + self.assertNotIn('k_nearest_neighbors', str(cs_ml)) + self.assertNotIn('liblinear', str(cs_ml)) + self.assertNotIn('libsvm_svc', str(cs_ml)) + self.assertNotIn('sgd', str(cs_ml)) + + cs_sp = SimpleRegressionPipeline.get_hyperparameter_search_space( + sparse=True) + self.assertNotIn('extra_trees', str(cs_sp)) + self.assertNotIn('gradient_boosting', str(cs_sp)) + self.assertNotIn('random_forest', str(cs_sp)) + + cs_mc_ml = SimpleRegressionPipeline.get_hyperparameter_search_space() + self.assertEqual(cs_ml, cs_mc_ml) + + self.assertRaisesRegexp(ValueError, + "No regressor to build a configuration space " + "for...", SimpleRegressionPipeline. + get_hyperparameter_search_space, + multiclass=True, multilabel=True, sparse=True) + """ + + def test_predict_batched(self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space() + default = cs.get_default_configuration() + cls = SimpleRegressionPipeline(default) + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston') + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((356,), prediction.shape) + self.assertEqual(18, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + def test_predict_batched_sparse(self): + cs = SimpleRegressionPipeline.get_hyperparameter_search_space( + dataset_properties={'sparse': True}) + default = cs.get_default_configuration() + cls = SimpleRegressionPipeline(default) + + X_train, Y_train, X_test, Y_test = get_dataset(dataset='boston', + make_sparse=True) + cls.fit(X_train, Y_train) + X_test_ = X_test.copy() + prediction_ = cls.predict(X_test_) + cls_predict = mock.Mock(wraps=cls.pipeline_) + cls.pipeline_ = cls_predict + prediction = cls.predict(X_test, batch_size=20) + self.assertEqual((356,), prediction.shape) + self.assertEqual(18, cls_predict.predict.call_count) + assert_array_almost_equal(prediction_, prediction) + + @unittest.skip("test_check_random_state Not yet Implemented") + def test_check_random_state(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_X Not yet Implemented") + def test_validate_input_X(self): + raise NotImplementedError() + + @unittest.skip("test_validate_input_Y Not yet Implemented") + def test_validate_input_Y(self): + raise NotImplementedError() + + def test_set_params(self): + pass + + def test_get_params(self): + pass diff --git a/test/test_pipeline/test_textclassification.py b/test/test_pipeline/test_textclassification.py new file mode 100644 index 0000000000..28f3dd54af --- /dev/null +++ b/test/test_pipeline/test_textclassification.py @@ -0,0 +1,25 @@ +# import unittest +# +# from HPOlibConfigSpace.configuration_space import ConfigurationSpace +# +# from autosklearn.pipeline.textclassification import autosklearn.pipelineTextClassifier +# +# +# class TextClassificationTest(unittest.TestCase): +# @unittest.skip("Not properly implemented yet!") +# def test_get_hyperparameter_search_space(self): +# cs = autosklearn.pipelineTextClassifier.get_hyperparameter_search_space() +# self.assertIsInstance(cs, ConfigurationSpace) +# conditions = cs.get_conditions() +# hyperparameters = cs.get_hyperparameters() +# self.assertEqual(135, len(hyperparameters)) +# # The four parameters which are always active are classifier, +# # preprocessor and imputation strategy +# self.assertEqual(len(hyperparameters) - 3, len(conditions)) +# self.assertNotIn("rescaling", cs.get_hyperparameter( +# "preprocessor").choices) +# self.assertRaisesRegexp(KeyError, "Hyperparameter " +# "'rescaling:strategy' does not " +# "exist in this configuration " +# "space.", cs.get_hyperparameter, +# "rescaling:strategy") diff --git a/test/util/test_StopWatch.py b/test/util/test_StopWatch.py index 8a6e2e7145..449f1880de 100644 --- a/test/util/test_StopWatch.py +++ b/test/util/test_StopWatch.py @@ -39,7 +39,7 @@ def test_stopwatch_overhead(self): wall_overhead = dur - watch.wall_sum() self.assertLess(wall_overhead, 2) - self.assertLess(cpu_overhead, wall_overhead) + self.assertLess(cpu_overhead, 1.2*wall_overhead) if __name__ == '__main__': diff --git a/testcommand.sh b/testcommand.sh index 24e6795eb5..426743ef2a 100644 --- a/testcommand.sh +++ b/testcommand.sh @@ -1,2 +1,2 @@ #!/usr/bin/env bash -nosetests --processes=4 --process-timeout=120 \ No newline at end of file +nosetests --processes=3 --process-timeout=120 -v \ No newline at end of file