Skip to content

Commit

Permalink
flake8
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda committed Jan 4, 2021
1 parent 137cf11 commit 523d492
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 4 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/trainer/supporters.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ def __init__(self, datasets: Union[Sequence, Mapping], mode: str):
Args:
datasets: a sequence/mapping datasets. Can be a collections of torch.utils.Dataset,
Iterable or even None.
mode: whether to use the minimum number of batches in all samples or the maximum
mode: whether to use the minimum number of batches in all samples or the maximum
number of batches in all samples.
"""
Expand Down Expand Up @@ -307,7 +307,7 @@ def __len__(self) -> int:
class CombinedLoader(object):
"""
Combines different dataloaders and allows sampling in parallel.
Supported modes are 'min_size', which raises StopIteration after the shortest loader
(the one with the lowest number of batches) is done, and 'max_size_cycle` which raises
StopIteration after the longest loader (the one with most batches) is done, while cycling
Expand Down
2 changes: 1 addition & 1 deletion tests/base/model_train_dataloaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def train_dataloader__multiple_mapping(self):
"""Return a mapping loaders with different lengths"""
return {'a': self.dataloader(train=True, num_samples=100),
'b': self.dataloader(train=True, num_samples=50)}

def train_dataloader__multiple_sequence(self):
return [self.dataloader(train=True, num_samples=100),
self.dataloader(train=True, num_samples=50)]
1 change: 0 additions & 1 deletion tests/base/model_train_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,4 +174,3 @@ def training_step__multiple_dataloaders(self, batch, batch_idx, optimizer_idx=No
}
)
return output

0 comments on commit 523d492

Please sign in to comment.