Skip to content

Commit

Permalink
Merge 083adb2 into b1cb06d
Browse files Browse the repository at this point in the history
  • Loading branch information
weixuanfu committed Nov 5, 2019
2 parents b1cb06d + 083adb2 commit f1db44a
Show file tree
Hide file tree
Showing 9 changed files with 27 additions and 69 deletions.
2 changes: 1 addition & 1 deletion .appveyor.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ install:
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
- conda create -q -n test-environment python=%PYTHON_VERSION% numpy scipy scikit-learn nose cython pandas pywin32 joblib
- conda create -q -n test-environment python=%PYTHON_VERSION% numpy scipy scikit-learn nose cython pandas joblib
- activate test-environment
- pip install deap tqdm update_checker stopit xgboost dask[delayed] dask[dataframe] cloudpickle==0.5.6 fsspec>=0.3.3 dask_ml==%DASK_ML_VERSION%

Expand Down
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ Development status: [![Development Build Status - Mac/Linux](https://travis-ci.o
[![Development Build Status - Windows](https://ci.appveyor.com/api/projects/status/b7bmpwpkjhifrm7v/branch/development?svg=true)](https://ci.appveyor.com/project/weixuanfu/tpot?branch=development)
[![Development Coverage Status](https://coveralls.io/repos/github/EpistasisLab/tpot/badge.svg?branch=development)](https://coveralls.io/github/EpistasisLab/tpot?branch=development)

Package information: [![Python 2.7](https://img.shields.io/badge/python-2.7-blue.svg)](https://www.python.org/download/releases/2.7/)
[![Python 3.7](https://img.shields.io/badge/python-3.7-blue.svg)](https://www.python.org/downloads/release/python-370/)
Package information: [![Python 3.7](https://img.shields.io/badge/python-3.7-blue.svg)](https://www.python.org/downloads/release/python-370/)
[![License: LGPL v3](https://img.shields.io/badge/license-LGPL%20v3-blue.svg)](http://www.gnu.org/licenses/lgpl-3.0)
[![PyPI version](https://badge.fury.io/py/TPOT.svg)](https://badge.fury.io/py/TPOT)

Expand Down
6 changes: 0 additions & 6 deletions docs_sources/installing.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,6 @@ DEAP, update_checker, tqdm and stopit can be installed with `pip` via the comman
pip install deap update_checker tqdm stopit
```

**For the Windows users**, the pywin32 module is required if Python is NOT installed via the [Anaconda Python distribution](https://www.continuum.io/downloads) and can be installed with `pip` for Python version <=3.3 or `conda` (e.g. miniconda) for any Python version:

```Shell
conda install pywin32
```

**Optionally**, you can install [XGBoost](https://github.com/dmlc/xgboost) if you would like TPOT to use the eXtreme Gradient Boosting models. XGBoost is entirely optional, and TPOT will still function normally without XGBoost if you do not have it installed. **Windows users: pip installation may not work on some Windows environments, and it may cause unexpected errors.**

```Shell
Expand Down
18 changes: 9 additions & 9 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
deap==1.0.2.post2
deap>=1.2
nose==1.3.7
numpy==1.12.1
scikit-learn==0.18.1
scipy==0.19.0
tqdm==4.26.0
update-checker==0.16
stopit==1.1.1
pandas==0.20.2
joblib==0.10.3
numpy>=1.16.3
scikit-learn>=0.21.0
scipy>=1.3.1
tqdm>=4.36.1
update-checker>=0.16
stopit>=1.1.1
pandas>=0.24.2
joblib>=0.13.2
22 changes: 9 additions & 13 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,30 +35,26 @@ def calculate_version():
This project is hosted at https://github.com/EpistasisLab/tpot
''',
zip_safe=True,
install_requires=['numpy>=1.12.1',
'scipy>=0.19.0',
'scikit-learn>=0.18.1',
'deap>=1.0',
install_requires=['numpy>=1.16.3',
'scipy>=1.3.1',
'scikit-learn>=0.21.0',
'deap>=1.2',
'update_checker>=0.16',
'tqdm>=4.26.0',
'tqdm>=4.36.1',
'stopit>=1.1.1',
'pandas>=0.20.2',
'joblib>=0.10.3'],
'pandas>=0.24.2',
'joblib>=0.13.2'],
extras_require={
'xgboost': ['xgboost==0.6a2'],
'xgboost': ['xgboost==0.90'],
'skrebate': ['skrebate>=0.3.4'],
'mdr': ['scikit-mdr>=0.4.4'],
'dask': ['dask>=0.18.2',
'distributed>=1.22.1',
'dask-ml>=0.9.0'],
'dask-ml>=1.0.0'],
},
classifiers=[
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
Expand Down
7 changes: 2 additions & 5 deletions tests/export_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -645,18 +645,15 @@ def test_imputer_in_export():
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
try:
from sklearn.impute import SimpleImputer as Imputer
except ImportError:
from sklearn.preprocessing import Imputer
from sklearn.impute import SimpleImputer
# NOTE: Make sure that the outcome column is labeled 'target' in the data file
tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64)
features = tpot_data.drop('target', axis=1)
training_features, testing_features, training_target, testing_target = \\
train_test_split(features, tpot_data['target'], random_state=None)
imputer = Imputer(strategy="median")
imputer = SimpleImputer(strategy="median")
imputer.fit(training_features)
training_features = imputer.transform(training_features)
testing_features = imputer.transform(testing_features)
Expand Down
2 changes: 1 addition & 1 deletion tpot/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,4 +23,4 @@
"""

__version__ = '0.10.2'
__version__ = '0.11.0'
29 changes: 2 additions & 27 deletions tpot/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,7 @@
from sklearn.utils import check_X_y, check_consistent_length, check_array
from sklearn.pipeline import make_pipeline, make_union
from sklearn.preprocessing import FunctionTransformer
try:
from sklearn.impute import SimpleImputer as Imputer
except ImportError:
from sklearn.preprocessing import Imputer
from sklearn.impute import SimpleImputer
from sklearn.model_selection import train_test_split
from sklearn.metrics.scorer import make_scorer, _BaseScorer

Expand Down Expand Up @@ -81,28 +78,6 @@
warnings.simplefilter('ignore')
from tqdm.autonotebook import tqdm

# hot patch for Windows: solve the problem of crashing python after Ctrl + C in Windows OS
# https://github.com/ContinuumIO/anaconda-issues/issues/905
if sys.platform.startswith('win'):
import win32api

try:
import _thread
except ImportError:
import thread as _thread


def handler(dwCtrlType, hook_sigint=_thread.interrupt_main):
"""SIGINT handler function."""
if dwCtrlType == 0: # CTRL_C_EVENT
hook_sigint()
return 1 # don't chain to the next handler
return 0


win32api.SetConsoleCtrlHandler(handler, 1)



class TPOTBase(BaseEstimator):
"""Automatically creates and optimizes machine learning pipelines using GP."""
Expand Down Expand Up @@ -1147,7 +1122,7 @@ def _impute_values(self, features):
print('Imputing missing values in feature set')

if self._fitted_imputer is None:
self._fitted_imputer = Imputer(strategy="median")
self._fitted_imputer = SimpleImputer(strategy="median")
self._fitted_imputer.fit(features)

return self._fitted_imputer.transform(features)
Expand Down
7 changes: 2 additions & 5 deletions tpot/export_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def export_pipeline(exported_pipeline,
# Add the imputation step if it was used by TPOT
if impute:
pipeline_text += """
imputer = Imputer(strategy="median")
imputer = SimpleImputer(strategy="median")
imputer.fit(training_features)
training_features = imputer.transform(training_features)
testing_features = imputer.transform(testing_features)
Expand Down Expand Up @@ -217,10 +217,7 @@ def merge_imports(old_dict, new_dict):

# Add the imputer if necessary
if impute:
pipeline_text += """try:
from sklearn.impute import SimpleImputer as Imputer
except ImportError:
from sklearn.preprocessing import Imputer
pipeline_text += """from sklearn.impute import SimpleImputer
"""
if random_state is not None:
pipeline_text += """from tpot.export_utils import set_param_recursive
Expand Down

0 comments on commit f1db44a

Please sign in to comment.