From 38418922dfbb7d4b983cd74d6cd8556d061b63b7 Mon Sep 17 00:00:00 2001 From: rohitgr7 Date: Sun, 27 Sep 2020 19:56:49 +0530 Subject: [PATCH] isort --- pytorch_lightning/accelerators/base_backend.py | 9 +++++---- pytorch_lightning/accelerators/cpu_backend.py | 3 ++- .../accelerators/ddp_base_backend.py | 13 +++++++------ pytorch_lightning/accelerators/dp_backend.py | 4 ++-- pytorch_lightning/accelerators/gpu_backend.py | 3 ++- .../accelerators/horovod_backend.py | 6 ++++-- pytorch_lightning/accelerators/tpu_backend.py | 8 ++++---- pytorch_lightning/callbacks/model_checkpoint.py | 2 +- pytorch_lightning/core/lightning.py | 16 ++++++---------- .../trainer/connectors/precision_connector.py | 4 ++-- 10 files changed, 35 insertions(+), 33 deletions(-) diff --git a/pytorch_lightning/accelerators/base_backend.py b/pytorch_lightning/accelerators/base_backend.py index 87750ee8d1c6b..5bc69cc08e819 100644 --- a/pytorch_lightning/accelerators/base_backend.py +++ b/pytorch_lightning/accelerators/base_backend.py @@ -1,10 +1,11 @@ -import torch +import math from typing import Any -from pytorch_lightning.utilities.apply_func import move_data_to_device + +import torch + from pytorch_lightning.utilities import AMPType, rank_zero_warn +from pytorch_lightning.utilities.apply_func import move_data_to_device from pytorch_lightning.utilities.exceptions import MisconfigurationException -import math - try: from apex import amp diff --git a/pytorch_lightning/accelerators/cpu_backend.py b/pytorch_lightning/accelerators/cpu_backend.py index 610a69865161c..b5973ceccacd0 100644 --- a/pytorch_lightning/accelerators/cpu_backend.py +++ b/pytorch_lightning/accelerators/cpu_backend.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import torch -from pytorch_lightning.utilities.exceptions import MisconfigurationException + from pytorch_lightning.accelerators.base_backend import Accelerator from pytorch_lightning.utilities import AMPType, rank_zero_warn +from pytorch_lightning.utilities.exceptions import MisconfigurationException class CPUBackend(Accelerator): diff --git a/pytorch_lightning/accelerators/ddp_base_backend.py b/pytorch_lightning/accelerators/ddp_base_backend.py index 310a357adeb87..89b524562f2a8 100644 --- a/pytorch_lightning/accelerators/ddp_base_backend.py +++ b/pytorch_lightning/accelerators/ddp_base_backend.py @@ -13,20 +13,21 @@ # limitations under the License import os import re -import torch -from pytorch_lightning.utilities import AMPType -from pytorch_lightning.accelerators.base_backend import Accelerator +import torch import torch.distributed as torch_distrib import torch.distributed as dist -from pytorch_lightning.utilities.cloud_io import atomic_save -from pytorch_lightning.utilities.distributed import rank_zero_warn, rank_zero_only + from pytorch_lightning import _logger as log +from pytorch_lightning.accelerators.base_backend import Accelerator +from pytorch_lightning.utilities import AMPType +from pytorch_lightning.utilities.cloud_io import atomic_save +from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn from pytorch_lightning.utilities.seed import seed_everything try: - from hydra.utils import to_absolute_path, get_original_cwd from hydra.core.hydra_config import HydraConfig + from hydra.utils import get_original_cwd, to_absolute_path except ImportError: HYDRA_AVAILABLE = False else: diff --git a/pytorch_lightning/accelerators/dp_backend.py b/pytorch_lightning/accelerators/dp_backend.py index b96f8e406a599..87d00bcd8bc8c 100644 --- a/pytorch_lightning/accelerators/dp_backend.py +++ b/pytorch_lightning/accelerators/dp_backend.py @@ -15,12 +15,12 @@ import torch from torch import optim +from pytorch_lightning.accelerators.base_backend import Accelerator from pytorch_lightning.core import LightningModule +from pytorch_lightning.core.step_result import Result from pytorch_lightning.overrides.data_parallel import LightningDataParallel from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.exceptions import MisconfigurationException -from pytorch_lightning.core.step_result import Result -from pytorch_lightning.accelerators.base_backend import Accelerator class DataParallelBackend(Accelerator): diff --git a/pytorch_lightning/accelerators/gpu_backend.py b/pytorch_lightning/accelerators/gpu_backend.py index 92d040580980c..d3c6c59160ac4 100644 --- a/pytorch_lightning/accelerators/gpu_backend.py +++ b/pytorch_lightning/accelerators/gpu_backend.py @@ -13,8 +13,9 @@ # limitations under the License. import torch -from pytorch_lightning.utilities import AMPType + from pytorch_lightning.accelerators.base_backend import Accelerator +from pytorch_lightning.utilities import AMPType class GPUBackend(Accelerator): diff --git a/pytorch_lightning/accelerators/horovod_backend.py b/pytorch_lightning/accelerators/horovod_backend.py index a8ccb66f9823b..cfdf80fa2b264 100644 --- a/pytorch_lightning/accelerators/horovod_backend.py +++ b/pytorch_lightning/accelerators/horovod_backend.py @@ -12,11 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. from contextlib import ExitStack + import torch -from pytorch_lightning.utilities import AMPType +from torch.optim.lr_scheduler import _LRScheduler + from pytorch_lightning.accelerators.base_backend import Accelerator +from pytorch_lightning.utilities import AMPType from pytorch_lightning.utilities.distributed import rank_zero_only -from torch.optim.lr_scheduler import _LRScheduler try: import horovod.torch as hvd diff --git a/pytorch_lightning/accelerators/tpu_backend.py b/pytorch_lightning/accelerators/tpu_backend.py index 8d1302d064d66..e12e2043e6c02 100644 --- a/pytorch_lightning/accelerators/tpu_backend.py +++ b/pytorch_lightning/accelerators/tpu_backend.py @@ -19,17 +19,17 @@ import torch.multiprocessing as mp from pytorch_lightning import _logger as log -from pytorch_lightning.core import LightningModule -from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn, AMPType -from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.accelerators.base_backend import Accelerator +from pytorch_lightning.core import LightningModule +from pytorch_lightning.utilities import AMPType, rank_zero_info, rank_zero_only, rank_zero_warn from pytorch_lightning.utilities.cloud_io import atomic_save +from pytorch_lightning.utilities.exceptions import MisconfigurationException try: import torch_xla import torch_xla.core.xla_model as xm - import torch_xla.distributed.xla_multiprocessing as xmp import torch_xla.distributed.parallel_loader as xla_pl + import torch_xla.distributed.xla_multiprocessing as xmp except ImportError: XLA_AVAILABLE = False else: diff --git a/pytorch_lightning/callbacks/model_checkpoint.py b/pytorch_lightning/callbacks/model_checkpoint.py index b05565a0debd9..dbab9803c8276 100644 --- a/pytorch_lightning/callbacks/model_checkpoint.py +++ b/pytorch_lightning/callbacks/model_checkpoint.py @@ -542,4 +542,4 @@ def to_yaml(self, filepath: Optional[Union[str, Path]] = None): if filepath is None: filepath = os.path.join(self.dirpath, "best_k_models.yaml") with open(filepath, "w") as fp: - yaml.dump(best_k, fp) \ No newline at end of file + yaml.dump(best_k, fp) diff --git a/pytorch_lightning/core/lightning.py b/pytorch_lightning/core/lightning.py index 8f5c608b78225..2171520d358ec 100644 --- a/pytorch_lightning/core/lightning.py +++ b/pytorch_lightning/core/lightning.py @@ -23,6 +23,11 @@ import torch import torch.distributed as torch_distrib +from torch import ScriptModule, Tensor +from torch.nn import Module +from torch.nn.parallel import DistributedDataParallel +from torch.optim.optimizer import Optimizer + from pytorch_lightning import _logger as log from pytorch_lightning.core.grads import GradInformation from pytorch_lightning.core.hooks import DataHooks, ModelHooks @@ -32,16 +37,7 @@ from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel from pytorch_lightning.utilities import rank_zero_warn from pytorch_lightning.utilities.device_dtype_mixin import DeviceDtypeModuleMixin -from pytorch_lightning.utilities.parsing import ( - AttributeDict, - collect_init_args, - get_init_args, -) -from torch import ScriptModule, Tensor -from torch.nn import Module -from torch.nn.parallel import DistributedDataParallel -from torch.optim.optimizer import Optimizer - +from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args try: import torch_xla.core.xla_model as xm diff --git a/pytorch_lightning/trainer/connectors/precision_connector.py b/pytorch_lightning/trainer/connectors/precision_connector.py index b92207163f009..f85ebb46bd29f 100644 --- a/pytorch_lightning/trainer/connectors/precision_connector.py +++ b/pytorch_lightning/trainer/connectors/precision_connector.py @@ -13,9 +13,9 @@ # limitations under the License. from pytorch_lightning import _logger as log -from pytorch_lightning.utilities import APEX_AVAILABLE, NATIVE_AMP_AVALAIBLE, rank_zero_warn, AMPType -from pytorch_lightning.plugins.native_amp import NativeAMP from pytorch_lightning.plugins.apex import ApexPlugin +from pytorch_lightning.plugins.native_amp import NativeAMP +from pytorch_lightning.utilities import APEX_AVAILABLE, NATIVE_AMP_AVALAIBLE, AMPType, rank_zero_warn class PrecisionConnector: