/
setup.py
89 lines (75 loc) · 2.78 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
import os
import re
import sys
from setuptools import setup, find_packages
from functools import reduce
# Get version without importing, which avoids dependency issues
def get_version():
with open('optimus/version.py') as version_file:
return re.search(r"""__version__\s+=\s+(['"])(?P<version>.+?)\1""",
version_file.read()).group('version')
if sys.version_info < (3, 6):
raise RuntimeError('This version requires Python 3.6+') # pragma: no cover
def readme():
with open('README.md') as f:
return f.read()
# Requirements
try:
import google.colab
IN_COLAB = True
except ImportError:
IN_COLAB = False
if "DATABRICKS_RUNTIME_VERSION" in os.environ:
with open('requirements-databricks.txt') as f:
required = f.read().splitlines()
elif IN_COLAB:
with open('requirements-google-colab.txt') as f:
required = f.read().splitlines()
else:
with open('requirements.txt') as f:
required = f.read().splitlines()
extras_requirements_keys = ['spark', 'dask', 'vaex', 'cudf', 'ai', 'db']
extras_requirements = {}
for extra in extras_requirements_keys:
with open('requirements-'+extra+'.txt') as f:
extras_requirements[extra] = f.read().splitlines()
lint_requirements = ['pep8', 'pyflakes']
test_requirements = ['pytest', 'mock', 'nose']
dependency_links = []
setup_requirements = ['pytest-runner']
if 'nosetests' in sys.argv[1:]:
setup_requirements.append('nose')
setup(
name='pyoptimus',
version=get_version(),
author='Argenis Leon',
author_email='argenisleon@gmail.com',
url='https://github.com/hi-primus/optimus/',
description=('Optimus is the missing framework for cleaning and pre-processing data in a distributed fashion.'),
long_description=readme(),
long_description_content_type='text/markdown',
license='APACHE',
packages=find_packages(),
install_requires=required,
tests_require=test_requirements,
setup_requires=setup_requirements,
extras_require={
'test': test_requirements,
'all': test_requirements + reduce(lambda x, key: x + extras_requirements[key], extras_requirements, []),
'docs': ['sphinx'] + test_requirements,
'lint': lint_requirements,
**extras_requirements
},
dependency_links=dependency_links,
test_suite='nose.collector',
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords=['datacleaner', 'data-wrangling', 'data-cleansing', 'data-profiling'],
)