From 55dbbf68790acf50134cd15fa74bf5961667bf69 Mon Sep 17 00:00:00 2001 From: Jirka Borovec Date: Sat, 20 Mar 2021 19:58:59 +0100 Subject: [PATCH] fixing examples (#6600) * try Azure * -e * path (cherry picked from commit cb590392880aefcb0830bf00ec08e4beef6d4f7e) --- azure-pipelines.yml | 10 +++++----- pl_examples/basic_examples/submit_ddp2_job.sh | 2 +- pl_examples/basic_examples/submit_ddp_job.sh | 2 +- tests/__init__.py | 4 ++-- tests/base/model_template.py | 3 ++- tests/checkpointing/test_legacy_checkpoints.py | 4 ++-- tests/helpers/advanced_models.py | 4 +++- tests/helpers/datasets.py | 15 +++++---------- tests/helpers/test_datasets.py | 11 ++++++++--- 9 files changed, 29 insertions(+), 26 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 6dfddda0295fe2..1447176c7ea707 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -95,12 +95,12 @@ jobs: python -m pytest benchmarks -v --maxfail=2 --durations=0 displayName: 'Testing: benchmarks' - - bash: | + - script: | + set -e python -m pytest pl_examples -v --maxfail=2 --durations=0 python setup.py install --user --quiet bash pl_examples/run_ddp-example.sh - cd pl_examples/basic_examples - bash submit_ddp_job.sh - bash submit_ddp2_job.sh - pip uninstall -y pytorch-lightning + # cd pl_examples/basic_examples + # bash submit_ddp_job.sh + # bash submit_ddp2_job.sh displayName: 'Examples' diff --git a/pl_examples/basic_examples/submit_ddp2_job.sh b/pl_examples/basic_examples/submit_ddp2_job.sh index 6fed6afef0d1c9..026589a604c362 100755 --- a/pl_examples/basic_examples/submit_ddp2_job.sh +++ b/pl_examples/basic_examples/submit_ddp2_job.sh @@ -24,4 +24,4 @@ source activate $1 # ------------------------- # run script from above -srun python3 image_classifier.py --accelerator 'ddp2' --gpus 2 --num_nodes 2 +srun python3 simple_image_classifier.py --accelerator 'ddp2' --gpus 2 --num_nodes 2 --max_epochs 5 diff --git a/pl_examples/basic_examples/submit_ddp_job.sh b/pl_examples/basic_examples/submit_ddp_job.sh index 383579c4346b6d..b4f5ff0a64d92f 100755 --- a/pl_examples/basic_examples/submit_ddp_job.sh +++ b/pl_examples/basic_examples/submit_ddp_job.sh @@ -24,4 +24,4 @@ source activate $1 # ------------------------- # run script from above -srun python3 image_classifier.py --accelerator 'ddp' --gpus 2 --num_nodes 2 +srun python3 simple_image_classifier.py --accelerator 'ddp' --gpus 2 --num_nodes 2 --max_epochs 5 diff --git a/tests/__init__.py b/tests/__init__.py index 7f88230f3296e3..e002e36518661a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -24,8 +24,8 @@ _TEST_ROOT = os.path.dirname(__file__) _PROJECT_ROOT = os.path.dirname(_TEST_ROOT) _TEMP_PATH = os.path.join(_PROJECT_ROOT, 'test_temp') -DATASETS_PATH = os.path.join(_PROJECT_ROOT, 'Datasets') -LEGACY_PATH = os.path.join(_PROJECT_ROOT, 'legacy') +PATH_DATASETS = os.path.join(_PROJECT_ROOT, 'Datasets') +PATH_LEGACY = os.path.join(_PROJECT_ROOT, 'legacy') # todo: this setting `PYTHONPATH` may not be used by other evns like Conda for import packages if _PROJECT_ROOT not in os.getenv('PYTHONPATH', ""): diff --git a/tests/base/model_template.py b/tests/base/model_template.py index 1d36df8f5ef501..991ed03a4b7a3f 100644 --- a/tests/base/model_template.py +++ b/tests/base/model_template.py @@ -18,6 +18,7 @@ import torch.nn.functional as F from pytorch_lightning.core.lightning import LightningModule +from tests import PATH_DATASETS from tests.base.model_optimizers import ConfigureOptimizersPool from tests.base.model_test_dataloaders import TestDataloaderVariations from tests.base.model_test_epoch_ends import TestEpochEndVariations @@ -28,7 +29,7 @@ from tests.base.model_valid_dataloaders import ValDataloaderVariations from tests.base.model_valid_epoch_ends import ValidationEpochEndVariations from tests.base.model_valid_steps import ValidationStepVariations -from tests.helpers.datasets import PATH_DATASETS, TrialMNIST +from tests.helpers.datasets import TrialMNIST class EvalModelTemplate( diff --git a/tests/checkpointing/test_legacy_checkpoints.py b/tests/checkpointing/test_legacy_checkpoints.py index b5d22372ff15f0..f40b849dd2b362 100644 --- a/tests/checkpointing/test_legacy_checkpoints.py +++ b/tests/checkpointing/test_legacy_checkpoints.py @@ -18,9 +18,9 @@ import pytest from pytorch_lightning import Trainer -from tests import LEGACY_PATH +from tests import PATH_LEGACY -LEGACY_CHECKPOINTS_PATH = os.path.join(LEGACY_PATH, 'checkpoints') +LEGACY_CHECKPOINTS_PATH = os.path.join(PATH_LEGACY, 'checkpoints') CHECKPOINT_EXTENSION = ".ckpt" diff --git a/tests/helpers/advanced_models.py b/tests/helpers/advanced_models.py index 7ad678b3046fdb..2b0146e1ee0998 100644 --- a/tests/helpers/advanced_models.py +++ b/tests/helpers/advanced_models.py @@ -20,6 +20,7 @@ from torch.utils.data import DataLoader from pytorch_lightning.core.lightning import LightningModule +from tests import PATH_DATASETS from tests.helpers.datasets import AverageDataset, MNIST, TrialMNIST @@ -165,7 +166,7 @@ def configure_optimizers(self): return [opt_g, opt_d], [] def train_dataloader(self): - return DataLoader(TrialMNIST(train=True, download=True), batch_size=16) + return DataLoader(TrialMNIST(root=PATH_DATASETS, train=True, download=True), batch_size=16) class ParityModuleRNN(LightningModule): @@ -223,6 +224,7 @@ def configure_optimizers(self): def train_dataloader(self): return DataLoader(MNIST( + root=PATH_DATASETS, train=True, download=True, ), batch_size=128, num_workers=1) diff --git a/tests/helpers/datasets.py b/tests/helpers/datasets.py index e7bdad0f1538c3..77035796ca3b18 100644 --- a/tests/helpers/datasets.py +++ b/tests/helpers/datasets.py @@ -22,11 +22,6 @@ from torch import Tensor from torch.utils.data import Dataset -from tests import _PROJECT_ROOT - -#: local path to test datasets -PATH_DATASETS = os.path.join(_PROJECT_ROOT, 'Datasets') - class MNIST(Dataset): """ @@ -47,7 +42,7 @@ class MNIST(Dataset): downloaded again. Examples: - >>> dataset = MNIST(download=True) + >>> dataset = MNIST(".", download=True) >>> len(dataset) 60000 >>> torch.bincount(dataset.targets) @@ -65,7 +60,7 @@ class MNIST(Dataset): def __init__( self, - root: str = PATH_DATASETS, + root: str, train: bool = True, normalize: tuple = (0.1307, 0.3081), download: bool = True, @@ -152,7 +147,7 @@ class TrialMNIST(MNIST): kwargs: Same as MNIST Examples: - >>> dataset = TrialMNIST(download=True) + >>> dataset = TrialMNIST(".", download=True) >>> len(dataset) 300 >>> sorted(set([d.item() for d in dataset.targets])) @@ -161,7 +156,7 @@ class TrialMNIST(MNIST): tensor([100, 100, 100]) """ - def __init__(self, num_samples: int = 100, digits: Optional[Sequence] = (0, 1, 2), **kwargs): + def __init__(self, root: str, num_samples: int = 100, digits: Optional[Sequence] = (0, 1, 2), **kwargs): # number of examples per class self.num_samples = num_samples # take just a subset of MNIST dataset @@ -169,7 +164,7 @@ def __init__(self, num_samples: int = 100, digits: Optional[Sequence] = (0, 1, 2 self.cache_folder_name = f"digits-{'-'.join(str(d) for d in self.digits)}_nb-{self.num_samples}" - super().__init__(normalize=(0.5, 1.0), **kwargs) + super().__init__(root, normalize=(0.5, 1.0), **kwargs) @staticmethod def _prepare_subset(full_data: torch.Tensor, full_targets: torch.Tensor, num_samples: int, digits: Sequence): diff --git a/tests/helpers/test_datasets.py b/tests/helpers/test_datasets.py index 6319fdb5625041..42b5df0ff91a4c 100644 --- a/tests/helpers/test_datasets.py +++ b/tests/helpers/test_datasets.py @@ -16,12 +16,17 @@ import cloudpickle import pytest +from tests import PATH_DATASETS from tests.helpers.datasets import AverageDataset, MNIST, TrialMNIST -@pytest.mark.parametrize('dataset_cls', [MNIST, TrialMNIST, AverageDataset]) -def test_pickling_dataset_mnist(tmpdir, dataset_cls): - mnist = dataset_cls() +@pytest.mark.parametrize('dataset_cls,args', [ + (MNIST, dict(root=PATH_DATASETS)), + (TrialMNIST, dict(root=PATH_DATASETS)), + (AverageDataset, dict()), +]) +def test_pickling_dataset_mnist(tmpdir, dataset_cls, args): + mnist = dataset_cls(**args) mnist_pickled = pickle.dumps(mnist) pickle.loads(mnist_pickled)