diff --git a/README.md b/README.md
index d894d53a46ba..bfda642b2f0e 100644
--- a/README.md
+++ b/README.md
@@ -25,10 +25,12 @@ started:
## Getting Started
-To install PyTorch/XLA a new VM:
+**PyTorch/XLA is now on PyPI!**
+
+To install PyTorch/XLA a new TPU VM:
```
-pip install torch~=2.0.0 https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-2.0-cp38-cp38-linux_x86_64.whl
+pip install torch~=2.1.0 torch_xla[tpu]~=2.1.0 -f https://storage.googleapis.com/libtpu-releases/index.html
```
To update your existing training loop, make the following changes:
@@ -130,26 +132,37 @@ Our comprehensive user guides are available at:
## Available docker images and wheels
-### Wheel
+### Python packages
+
+PyTorch/XLA releases starting with version r2.1 will be available on PyPI. You
+can now install the main build with `pip install torch_xla`. To also install the
+Cloud TPU plugin, install the optional `tpu` dependencies:
+
+```
+pip install torch_xla[tpu] -f https://storage.googleapis.com/libtpu-releases/index.html
+```
+
+GPU, XRT (legacy runtime), and nightly builds are available in our public GCS
+bucket.
| Version | Cloud TPU VMs Wheel |
| --- | ----------- |
-| 2.0 (Python 3.8) | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-2.0-cp38-cp38-linux_x86_64.whl` |
-| nightly >= 2023/04/25 (Python 3.8) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-nightly-cp38-cp38-linux_x86_64.whl` |
-| nightly >= 2023/04/25 (Python 3.10) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-nightly-cp310-cp310-linux_x86_64.whl` |
+| 2.1 (CUDA 12.0 + Python 3.8) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/cuda/12.0/torch_xla-2.1.0-cp38-cp38-manylinux_2_28_x86_64.whl` |
+| 2.1 (XRT + Python 3.10) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/xrt/tpuvm/torch_xla-2.1.0%2Bxrt-cp310-cp310-manylinux_2_28_x86_64.whl` |
+| nightly (Python 3.8) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-nightly-cp38-cp38-linux_x86_64.whl` |
+| nightly (Python 3.10) | `https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-nightly-cp310-cp310-linux_x86_64.whl` |
- older versions
+
+older versions
| Version | Cloud TPU VMs Wheel |
|---------|-------------------|
+| 2.0 (Python 3.8) | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-2.0-cp38-cp38-linux_x86_64.whl` |
| 1.13 | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-1.13-cp38-cp38-linux_x86_64.whl` |
| 1.12 | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-1.12-cp38-cp38-linux_x86_64.whl` |
| 1.11 | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-1.11-cp38-cp38-linux_x86_64.whl` |
| 1.10 | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-1.10-cp38-cp38-linux_x86_64.whl` |
-| nightly <= 2023/04/25 | `https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-nightly-cp38-cp38-linux_x86_64.whl` |
-
-
@@ -204,53 +217,58 @@ pip3 install torch_xla[tpuvm]
This is only required on Cloud TPU VMs.
+
+
### Docker
| Version | Cloud TPU VMs Docker |
| --- | ----------- |
-2.0 | `gcr.io/tpu-pytorch/xla:r2.0_3.8_tpuvm` |
-1.13 | `gcr.io/tpu-pytorch/xla:r1.13_3.8_tpuvm` |
-nightly python 3.10 | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.10_tpuvm` |
-nightly python 3.8 | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_tpuvm` |
-nightly python 3.10(>= 2023/04/25) | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.10_tpuvm_YYYYMMDD` |
-nightly python 3.8(>= 2023/04/25) | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_tpuvm_YYYYMMDD` |
-nightly at date(< 2023/04/25) | `gcr.io/tpu-pytorch/xla:nightly_3.8_tpuvm_YYYYMMDD` |
+| 2.1 | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:r2.1.0_3.10_tpuvm` |
+| 2.0 | `gcr.io/tpu-pytorch/xla:r2.0_3.8_tpuvm` |
+| 1.13 | `gcr.io/tpu-pytorch/xla:r1.13_3.8_tpuvm` |
+| nightly python | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.10_tpuvm` |
-| Version | GPU CUDA 12.0 + Python 3.8 Docker |
+| Version | GPU CUDA 12.0 Docker |
| --- | ----------- |
+| 2.1 | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:r2.1.0_3.10_cuda_12.0` |
| nightly | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_12.0` |
-| nightly at date(>=2023/06/27) | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_12.0_YYYYMMDD` |
+| nightly at date | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_12.0_YYYYMMDD` |
-| Version | GPU CUDA 11.8 + Python 3.8 Docker |
+| Version | GPU CUDA 11.8 + Docker |
| --- | ----------- |
+| 2.1 | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:r2.1.0_3.10_cuda_11.8` |
| 2.0 | `gcr.io/tpu-pytorch/xla:r2.0_3.8_cuda_11.8` |
| nightly | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_11.8` |
-| nightly at date(>=2023/04/25) | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_11.8_YYYYMMDD` |
-| nightly at date(<2023/04/25) | `gcr.io/tpu-pytorch/xla:nightly_3.8_cuda_11.8_YYYYMMDD` |
+| nightly at date | `us-central1-docker.pkg.dev/tpu-pytorch-releases/docker/xla:nightly_3.8_cuda_11.8_YYYYMMDD` |
-| Version | GPU CUDA 11.7 + Python 3.8 Docker |
+
+
+older versions
+
+| Version | GPU CUDA 11.7 + Docker |
| --- | ----------- |
| 2.0 | `gcr.io/tpu-pytorch/xla:r2.0_3.8_cuda_11.7` |
-| Version | GPU CUDA 11.2 + Python 3.8 Docker |
+| Version | GPU CUDA 11.2 + Docker |
| --- | ----------- |
| 1.13 | `gcr.io/tpu-pytorch/xla:r1.13_3.8_cuda_11.2` |
-| Version | GPU CUDA 11.2 + Python 3.7 Docker |
+| Version | GPU CUDA 11.2 + Docker |
| --- | ----------- |
-1.13 | `gcr.io/tpu-pytorch/xla:r1.13_3.7_cuda_11.2` |
-1.12 | `gcr.io/tpu-pytorch/xla:r1.12_3.7_cuda_11.2` |
+| 1.13 | `gcr.io/tpu-pytorch/xla:r1.13_3.7_cuda_11.2` |
+| 1.12 | `gcr.io/tpu-pytorch/xla:r1.12_3.7_cuda_11.2` |
+
To run on [compute instances with
GPUs](https://cloud.google.com/compute/docs/gpus/create-vm-with-gpus).
diff --git a/scripts/fixup_binary.py b/scripts/fixup_binary.py
deleted file mode 100755
index 8d12d1c78f22..000000000000
--- a/scripts/fixup_binary.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-
-import argparse
-import glob
-import os
-import site
-import subprocess
-
-
-def find_torch_xla_site(site_paths):
- for site_path in site_paths:
- # If there is one named 'torch_xla', this is what we pick.
- path = os.path.join(site_path, 'torch_xla', 'lib')
- if os.path.isdir(path):
- return [site_path, path]
- dirs = glob.glob(os.path.join(site_path, 'torch_xla*'))
- # Get the most recent one.
- for xpath in sorted(dirs, key=os.path.getmtime):
- path = os.path.join(xpath, 'lib')
- if os.path.isdir(path):
- return [site_path, path]
- if os.path.isfile(os.path.join(xpath, 'libptxla.so')):
- return [site_path, xpath, os.path.join(xpath, 'torch_xla', 'lib')]
- raise RuntimeError('Unable to find torch_xla package in {}'.format(site_path))
-
-
-def find_torch_site(site_paths):
- for site_path in site_paths:
- path = os.path.join(site_path, 'torch', 'lib')
- if os.path.isdir(path):
- return [path]
- raise RuntimeError('Unable to find torch package in {}'.format(site_path))
-
-
-def list_rpaths(path):
- if subprocess.call(['patchelf', '--shrink-rpath', path]) != 0:
- raise RuntimeError('Failed to shrink RPATH folders: {}'.format(path))
- return subprocess.check_output(['patchelf', '--print-rpath',
- path]).decode('utf-8').strip('\n').split(':')
-
-
-def set_rpaths(path, rpaths):
- if subprocess.call(['patchelf', '--set-rpath', ':'.join(rpaths), path]) != 0:
- raise RuntimeError('Failed to set RPATH folders {}: {}'.format(
- rpaths, path))
-
-
-def fixup_binary(args):
- site_paths = site.getsitepackages()
- xla_rpaths = find_torch_xla_site(site_paths)
- torch_rpaths = find_torch_site(site_paths)
- rpaths = list_rpaths(args.binary)
- rpaths = xla_rpaths + torch_rpaths + rpaths
- set_rpaths(args.binary, rpaths)
-
-
-if __name__ == '__main__':
- arg_parser = argparse.ArgumentParser()
- arg_parser.add_argument(
- 'binary',
- type=str,
- metavar='BINARY',
- help='The path to the binary to be patched')
- args, files = arg_parser.parse_known_args()
- fixup_binary(args)
diff --git a/setup.py b/setup.py
index f157fd728097..3a98db22c32f 100644
--- a/setup.py
+++ b/setup.py
@@ -307,15 +307,37 @@ def run(self):
super().run()
+# Read in README.md for our long_description
+cwd = os.path.dirname(os.path.abspath(__file__))
+with open(os.path.join(cwd, "README.md"), encoding="utf-8") as f:
+ long_description = f.read()
+
setup(
name=os.environ.get('TORCH_XLA_PACKAGE_NAME', 'torch_xla'),
version=version,
description='XLA bridge for PyTorch',
+ long_description=long_description,
+ long_description_content_type="text/markdown",
url='https://github.com/pytorch/xla',
author='PyTorch/XLA Dev Team',
author_email='pytorch-xla@googlegroups.com',
- # Exclude the build files.
- packages=find_packages(exclude=['build']),
+ classifiers=[
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "Intended Audience :: Education",
+ "Intended Audience :: Science/Research",
+ "License :: OSI Approved :: BSD License",
+ "Topic :: Scientific/Engineering",
+ "Topic :: Scientific/Engineering :: Mathematics",
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
+ "Topic :: Software Development",
+ "Topic :: Software Development :: Libraries",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ "Programming Language :: C++",
+ "Programming Language :: Python :: 3",
+ ],
+ python_requires=">=3.8.0",
+ packages=find_packages(include=['torch_xla*']),
ext_modules=[
BazelExtension('//:_XLAC.so'),
],
@@ -334,12 +356,12 @@ def run(self):
},
extras_require={
# On Cloud TPU VM install with:
- # $ sudo pip3 install torch_xla[tpuvm] -f https://storage.googleapis.com/tpu-pytorch/wheels/tpuvm/torch_xla-1.11-cp38-cp38-linux_x86_64.whl
+ # pip install torch_xla[tpu] -f https://storage.googleapis.com/libtpu-releases/index.html
+ 'tpu': [f'libtpu-nightly=={_libtpu_version}'],
+ # On nightly, install libtpu with `pip install torch_xla[tpuvm]`
+ # Remove from release branches since this is not allowed by PyPI.
'tpuvm': [f'libtpu-nightly @ {_libtpu_storage_path}'],
},
- data_files=[
- 'scripts/fixup_binary.py',
- ],
cmdclass={
'build_ext': BuildBazelExtension,
'clean': Clean,