Skip to content

Commit

Permalink
isort
Browse files Browse the repository at this point in the history
  • Loading branch information
rohitgr7 committed Sep 27, 2020
1 parent 835308b commit 3841892
Show file tree
Hide file tree
Showing 10 changed files with 35 additions and 33 deletions.
9 changes: 5 additions & 4 deletions pytorch_lightning/accelerators/base_backend.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import torch
import math
from typing import Any
from pytorch_lightning.utilities.apply_func import move_data_to_device

import torch

from pytorch_lightning.utilities import AMPType, rank_zero_warn
from pytorch_lightning.utilities.apply_func import move_data_to_device
from pytorch_lightning.utilities.exceptions import MisconfigurationException
import math


try:
from apex import amp
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/cpu_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from pytorch_lightning.utilities.exceptions import MisconfigurationException

from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.utilities import AMPType, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException


class CPUBackend(Accelerator):
Expand Down
13 changes: 7 additions & 6 deletions pytorch_lightning/accelerators/ddp_base_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,20 +13,21 @@
# limitations under the License
import os
import re
import torch

from pytorch_lightning.utilities import AMPType
from pytorch_lightning.accelerators.base_backend import Accelerator
import torch
import torch.distributed as torch_distrib
import torch.distributed as dist
from pytorch_lightning.utilities.cloud_io import atomic_save
from pytorch_lightning.utilities.distributed import rank_zero_warn, rank_zero_only

from pytorch_lightning import _logger as log
from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.cloud_io import atomic_save
from pytorch_lightning.utilities.distributed import rank_zero_only, rank_zero_warn
from pytorch_lightning.utilities.seed import seed_everything

try:
from hydra.utils import to_absolute_path, get_original_cwd
from hydra.core.hydra_config import HydraConfig
from hydra.utils import get_original_cwd, to_absolute_path
except ImportError:
HYDRA_AVAILABLE = False
else:
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/accelerators/dp_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@
import torch
from torch import optim

from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.core import LightningModule
from pytorch_lightning.core.step_result import Result
from pytorch_lightning.overrides.data_parallel import LightningDataParallel
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.core.step_result import Result
from pytorch_lightning.accelerators.base_backend import Accelerator


class DataParallelBackend(Accelerator):
Expand Down
3 changes: 2 additions & 1 deletion pytorch_lightning/accelerators/gpu_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@
# limitations under the License.

import torch
from pytorch_lightning.utilities import AMPType

from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.utilities import AMPType


class GPUBackend(Accelerator):
Expand Down
6 changes: 4 additions & 2 deletions pytorch_lightning/accelerators/horovod_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import ExitStack

import torch
from pytorch_lightning.utilities import AMPType
from torch.optim.lr_scheduler import _LRScheduler

from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.utilities import AMPType
from pytorch_lightning.utilities.distributed import rank_zero_only
from torch.optim.lr_scheduler import _LRScheduler

try:
import horovod.torch as hvd
Expand Down
8 changes: 4 additions & 4 deletions pytorch_lightning/accelerators/tpu_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,17 @@
import torch.multiprocessing as mp

from pytorch_lightning import _logger as log
from pytorch_lightning.core import LightningModule
from pytorch_lightning.utilities import rank_zero_info, rank_zero_only, rank_zero_warn, AMPType
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.core import LightningModule
from pytorch_lightning.utilities import AMPType, rank_zero_info, rank_zero_only, rank_zero_warn
from pytorch_lightning.utilities.cloud_io import atomic_save
from pytorch_lightning.utilities.exceptions import MisconfigurationException

try:
import torch_xla
import torch_xla.core.xla_model as xm
import torch_xla.distributed.xla_multiprocessing as xmp
import torch_xla.distributed.parallel_loader as xla_pl
import torch_xla.distributed.xla_multiprocessing as xmp
except ImportError:
XLA_AVAILABLE = False
else:
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/callbacks/model_checkpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,4 +542,4 @@ def to_yaml(self, filepath: Optional[Union[str, Path]] = None):
if filepath is None:
filepath = os.path.join(self.dirpath, "best_k_models.yaml")
with open(filepath, "w") as fp:
yaml.dump(best_k, fp)
yaml.dump(best_k, fp)
16 changes: 6 additions & 10 deletions pytorch_lightning/core/lightning.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@

import torch
import torch.distributed as torch_distrib
from torch import ScriptModule, Tensor
from torch.nn import Module
from torch.nn.parallel import DistributedDataParallel
from torch.optim.optimizer import Optimizer

from pytorch_lightning import _logger as log
from pytorch_lightning.core.grads import GradInformation
from pytorch_lightning.core.hooks import DataHooks, ModelHooks
Expand All @@ -32,16 +37,7 @@
from pytorch_lightning.overrides.data_parallel import LightningDistributedDataParallel
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.device_dtype_mixin import DeviceDtypeModuleMixin
from pytorch_lightning.utilities.parsing import (
AttributeDict,
collect_init_args,
get_init_args,
)
from torch import ScriptModule, Tensor
from torch.nn import Module
from torch.nn.parallel import DistributedDataParallel
from torch.optim.optimizer import Optimizer

from pytorch_lightning.utilities.parsing import AttributeDict, collect_init_args, get_init_args

try:
import torch_xla.core.xla_model as xm
Expand Down
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/connectors/precision_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
# limitations under the License.

from pytorch_lightning import _logger as log
from pytorch_lightning.utilities import APEX_AVAILABLE, NATIVE_AMP_AVALAIBLE, rank_zero_warn, AMPType
from pytorch_lightning.plugins.native_amp import NativeAMP
from pytorch_lightning.plugins.apex import ApexPlugin
from pytorch_lightning.plugins.native_amp import NativeAMP
from pytorch_lightning.utilities import APEX_AVAILABLE, NATIVE_AMP_AVALAIBLE, AMPType, rank_zero_warn


class PrecisionConnector:
Expand Down

0 comments on commit 3841892

Please sign in to comment.