forked from idiap/fast-transformers
-
Notifications
You must be signed in to change notification settings - Fork 1
/
setup.py
100 lines (82 loc) · 2.81 KB
/
setup.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
#!/usr/bin/env python
#
# Copyright (c) 2020 Idiap Research Institute, http://www.idiap.ch/
# Written by Angelos Katharopoulos <angelos.katharopoulos@idiap.ch>,
# Apoorv Vyas <avyas@idiap.ch>
#
"""Setup fast transformers"""
from functools import lru_cache
from itertools import dropwhile
from os import path
from subprocess import DEVNULL, call
from setuptools import find_packages, setup
from torch.utils.cpp_extension import BuildExtension, CppExtension
@lru_cache(None)
def cuda_toolkit_available():
try:
call(["nvcc"], stdout=DEVNULL, stderr=DEVNULL)
return True
except FileNotFoundError:
return False
def collect_docstring(lines):
"""Return document docstring if it exists"""
lines = dropwhile(lambda x: not x.startswith('"""'), lines)
doc = ""
for line in lines:
doc += line
if doc.endswith('"""\n'):
break
return doc[3:-4].replace("\r", "").replace("\n", " ")
def collect_metadata():
meta = {}
with open(path.join("fast_transformers", "__init__.py")) as f:
lines = iter(f)
meta["description"] = collect_docstring(lines)
for line in lines:
if line.startswith("__"):
key, value = map(lambda x: x.strip(), line.split("="))
meta[key[2:-2]] = value[1:-1]
return meta
def get_extensions():
extensions = [
CppExtension(
"fast_transformers.causal_product.causal_product_cpu",
sources=[
"fast_transformers/causal_product/causal_product_cpu.cpp"
],
extra_compile_args=["-Xpreprocessor", "-fopenmp", "-ffast-math"],
)
]
if cuda_toolkit_available():
from torch.utils.cpp_extension import CUDAExtension
extensions += [
CUDAExtension(
"fast_transformers.causal_product.causal_product_cuda",
sources=[
"fast_transformers/causal_product/causal_product_cuda.cu"
],
extra_compile_args=["-arch=compute_50"],
)
]
return extensions
def setup_package():
meta = collect_metadata()
print(meta)
setup(
name="pytorch-fast-transformers",
version="0.0.1",
classifiers=[
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Scientific/Engineering",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
],
packages=find_packages(exclude=["docs", "tests", "scripts", "examples"]),
ext_modules=get_extensions(),
cmdclass={"build_ext": BuildExtension},
)
if __name__ == "__main__":
setup_package()