-
Notifications
You must be signed in to change notification settings - Fork 24
/
setup.py
31 lines (30 loc) · 874 Bytes
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
from setuptools import setup, find_packages
setup(
name = 'FLASH-pytorch',
packages = find_packages(exclude=[]),
version = '0.1.9',
license='MIT',
description = 'FLASH - Transformer Quality in Linear Time - Pytorch',
author = 'Phil Wang',
author_email = 'lucidrains@gmail.com',
long_description_content_type = 'text/markdown',
url = 'https://github.com/lucidrains/FLASH-pytorch',
keywords = [
'artificial intelligence',
'deep learning',
'transformers',
'attention mechanism'
],
install_requires=[
'einops>=0.4',
'rotary-embedding-torch>=0.1.5',
'torch>=1.9',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
],
)