Skip to content

Commit

Permalink
refactoring setup (#6590)
Browse files Browse the repository at this point in the history
* refactoring setup

* .

* docs

* flake8
  • Loading branch information
Borda authored Mar 22, 2021
1 parent e2e1de0 commit 1fae10a
Show file tree
Hide file tree
Showing 6 changed files with 101 additions and 93 deletions.
23 changes: 13 additions & 10 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.

# import m2r
import builtins
import glob
import os
import shutil
Expand All @@ -27,10 +26,13 @@

FOLDER_GENERATED = 'generated'
SPHINX_MOCK_REQUIREMENTS = int(os.environ.get('SPHINX_MOCK_REQUIREMENTS', True))
if SPHINX_MOCK_REQUIREMENTS:
builtins.__LIGHTNING_SETUP__ = True

import pytorch_lightning # noqa: E402
try:
from pytorch_lightning import info
except ImportError:
# alternative https://stackoverflow.com/a/67692/4521646
sys.path.append(os.path.join(PATH_ROOT, "pytorch_lightning"))
import info

# -- Project documents -------------------------------------------------------

Expand Down Expand Up @@ -79,13 +81,13 @@ def _transform_changelog(path_in: str, path_out: str) -> None:
# -- Project information -----------------------------------------------------

project = 'PyTorch Lightning'
copyright = pytorch_lightning.__copyright__
author = pytorch_lightning.__author__
copyright = info.__copyright__
author = info.__author__

# The short X.Y version
version = pytorch_lightning.__version__
version = info.__version__
# The full version, including alpha/beta/rc tags
release = pytorch_lightning.__version__
release = info.__version__

# -- General configuration ---------------------------------------------------

Expand Down Expand Up @@ -176,8 +178,8 @@ def _transform_changelog(path_in: str, path_out: str) -> None:
# documentation.

html_theme_options = {
'pytorch_project': pytorch_lightning.__homepage__,
'canonical_url': pytorch_lightning.__homepage__,
'pytorch_project': info.__homepage__,
'canonical_url': info.__homepage__,
'collapse_navigation': False,
'display_version': True,
'logo_only': False,
Expand Down Expand Up @@ -279,6 +281,7 @@ def _transform_changelog(path_in: str, path_out: str) -> None:
'torch': ('https://pytorch.org/docs/stable/', None),
'numpy': ('https://numpy.org/doc/stable/', None),
'PIL': ('https://pillow.readthedocs.io/en/stable/', None),
'torchmetrics': ('https://torchmetrics.readthedocs.io/en/stable/', None),
}

# -- Options for todo extension ----------------------------------------------
Expand Down
81 changes: 22 additions & 59 deletions pytorch_lightning/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,42 +2,17 @@

import logging
import os
import sys
import time

_this_year = time.strftime("%Y")
__version__ = '1.3.0dev'
__author__ = 'William Falcon et al.'
__author_email__ = 'waf2107@columbia.edu'
__license__ = 'Apache-2.0'
__copyright__ = f'Copyright (c) 2018-{_this_year}, {__author__}.'
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
from pytorch_lightning.info import ( # noqa: F401
__author__,
__author_email__,
__copyright__,
__docs__,
__homepage__,
__license__,
__version__,
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.

In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
_root_logger = logging.getLogger()
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
Expand All @@ -50,32 +25,20 @@
_PACKAGE_ROOT = os.path.dirname(__file__)
_PROJECT_ROOT = os.path.dirname(_PACKAGE_ROOT)

try:
# This variable is injected in the __builtins__ by the build
# process. It used to enable importing subpackages of skimage when
# the binaries are not built
_ = None if __LIGHTNING_SETUP__ else None
except NameError:
__LIGHTNING_SETUP__: bool = False

if __LIGHTNING_SETUP__: # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
from pytorch_lightning import metrics
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything

__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
]
from pytorch_lightning import metrics # noqa: E402
from pytorch_lightning.callbacks import Callback # noqa: E402
from pytorch_lightning.core import LightningDataModule, LightningModule # noqa: E402
from pytorch_lightning.trainer import Trainer # noqa: E402
from pytorch_lightning.utilities.seed import seed_everything # noqa: E402

__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
]

# for compatibility with namespace packages
__import__('pkg_resources').declare_namespace(__name__)
3 changes: 1 addition & 2 deletions pytorch_lightning/callbacks/progress.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,7 @@

class tqdm(_tqdm):
"""
Custom tqdm progressbar where we append 0 to floating points/strings to
prevent the progress bar from flickering
Custom tqdm progressbar where we append 0 to floating points/strings to prevent the progress bar from flickering
"""

@staticmethod
Expand Down
35 changes: 35 additions & 0 deletions pytorch_lightning/info.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import time

_this_year = time.strftime("%Y")
__version__ = '1.3.0dev'
__author__ = 'William Falcon et al.'
__author_email__ = 'waf2107@columbia.edu'
__license__ = 'Apache-2.0'
__copyright__ = f'Copyright (c) 2018-{_this_year}, {__author__}.'
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.
In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
6 changes: 3 additions & 3 deletions pytorch_lightning/setup_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import re
from typing import List

from pytorch_lightning import __homepage__, __version__, _PROJECT_ROOT
_PROJECT_ROOT = os.path.dirname(os.path.dirname(__file__))


def _load_requirements(path_dir: str, file_name: str = 'requirements.txt', comment_char: str = '#') -> List[str]:
Expand All @@ -40,10 +40,10 @@ def _load_requirements(path_dir: str, file_name: str = 'requirements.txt', comme
return reqs


def _load_readme_description(path_dir: str, homepage: str = __homepage__, version: str = __version__) -> str:
def _load_readme_description(path_dir: str, homepage: str, version: str) -> str:
"""Load readme as decribtion
>>> _load_readme_description(_PROJECT_ROOT) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
>>> _load_readme_description(_PROJECT_ROOT, "", "") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
'<div align="center">...'
"""
path_readme = os.path.join(path_dir, "README.md")
Expand Down
46 changes: 27 additions & 19 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,31 +16,33 @@
import os

# Always prefer setuptools over distutils
import sys

from setuptools import find_packages, setup

try:
import builtins
from pytorch_lightning import info, setup_tools
except ImportError:
import __builtin__ as builtins
# alternative https://stackoverflow.com/a/67692/4521646
sys.path.append("pytorch_lightning")
import info
import setup_tools

# https://packaging.python.org/guides/single-sourcing-package-version/
# http://blog.ionelmc.ro/2014/05/25/python-packaging/
PATH_ROOT = os.path.dirname(__file__)
builtins.__LIGHTNING_SETUP__ = True

import pytorch_lightning # noqa: E402
from pytorch_lightning.setup_tools import _load_readme_description, _load_requirements # noqa: E402
_PATH_ROOT = os.path.dirname(__file__)
_PATH_REQUIRE = os.path.join(_PATH_ROOT, 'requirements')

# https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras
# Define package extras. These are only installed if you specify them.
# From remote, use like `pip install pytorch-lightning[dev, docs]`
# From local copy of repo, use like `pip install ".[dev, docs]"`
extras = {
# 'docs': load_requirements(file_name='docs.txt'),
'examples': _load_requirements(path_dir=os.path.join(PATH_ROOT, 'requirements'), file_name='examples.txt'),
'loggers': _load_requirements(path_dir=os.path.join(PATH_ROOT, 'requirements'), file_name='loggers.txt'),
'extra': _load_requirements(path_dir=os.path.join(PATH_ROOT, 'requirements'), file_name='extra.txt'),
'test': _load_requirements(path_dir=os.path.join(PATH_ROOT, 'requirements'), file_name='test.txt')
'examples': setup_tools._load_requirements(path_dir=_PATH_REQUIRE, file_name='examples.txt'),
'loggers': setup_tools._load_requirements(path_dir=_PATH_REQUIRE, file_name='loggers.txt'),
'extra': setup_tools._load_requirements(path_dir=_PATH_REQUIRE, file_name='extra.txt'),
'test': setup_tools._load_requirements(path_dir=_PATH_REQUIRE, file_name='test.txt')
}
extras['dev'] = extras['extra'] + extras['loggers'] + extras['test']
extras['all'] = extras['dev'] + extras['examples'] # + extras['docs']
Expand All @@ -53,29 +55,35 @@
# filter cpu only packages
extras[ex] = [pkg for pkg in extras[kw] if not any(pgpu.lower() in pkg.lower() for pgpu in PACKAGES_GPU_ONLY)]

long_description = setup_tools._load_readme_description(
_PATH_ROOT,
homepage=info.__homepage__,
version=info.__version__,
)

# https://packaging.python.org/discussions/install-requires-vs-requirements /
# keep the meta-data here for simplicity in reading this file... it's not obvious
# what happens and to non-engineers they won't know to look in init ...
# the goal of the project is simplicity for researchers, don't want to add too much
# engineer specific practices
setup(
name="pytorch-lightning",
version=pytorch_lightning.__version__,
description=pytorch_lightning.__docs__,
author=pytorch_lightning.__author__,
author_email=pytorch_lightning.__author_email__,
url=pytorch_lightning.__homepage__,
version=info.__version__,
description=info.__docs__,
author=info.__author__,
author_email=info.__author_email__,
url=info.__homepage__,
download_url='https://github.com/PyTorchLightning/pytorch-lightning',
license=pytorch_lightning.__license__,
license=info.__license__,
packages=find_packages(exclude=['tests', 'tests/*', 'benchmarks', 'legacy', 'legacy/*']),
long_description=_load_readme_description(PATH_ROOT),
long_description=long_description,
long_description_content_type='text/markdown',
include_package_data=True,
zip_safe=False,
keywords=['deep learning', 'pytorch', 'AI'],
python_requires='>=3.6',
setup_requires=[],
install_requires=_load_requirements(PATH_ROOT),
install_requires=setup_tools._load_requirements(_PATH_ROOT),
extras_require=extras,
project_urls={
"Bug Tracker": "https://github.com/PyTorchLightning/pytorch-lightning/issues",
Expand Down

0 comments on commit 1fae10a

Please sign in to comment.