Skip to content

Commit

Permalink
tpu
Browse files Browse the repository at this point in the history
  • Loading branch information
awaelchli committed Feb 22, 2021
1 parent 0714933 commit 01f26b4
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 2 deletions.
4 changes: 2 additions & 2 deletions pytorch_lightning/accelerators/tpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ class TPUAccelerator(Accelerator):
def setup(self, trainer, model):
if isinstance(self.precision_plugin, MixedPrecisionPlugin):
raise MisconfigurationException(
"amp + tpu is not supported. "
"Only bfloats are supported on TPU. Consider using TPUHalfPrecisionPlugin"
"amp + tpu is not supported."
" Only bfloats are supported on TPU. Consider using TPUHalfPrecisionPlugin"
)

if not isinstance(self.training_type_plugin, (SingleTPUPlugin, TPUSpawnPlugin)):
Expand Down
32 changes: 32 additions & 0 deletions tests/accelerators/test_tpu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from unittest.mock import Mock

import pytest

from pytorch_lightning.accelerators import TPUAccelerator
from pytorch_lightning.plugins import SingleTPUPlugin, DDPPlugin, PrecisionPlugin
from pytorch_lightning.plugins.precision import MixedPrecisionPlugin
from pytorch_lightning.utilities.exceptions import MisconfigurationException


def test_unsupported_precision_plugins():
""" Test error messages are raised for unsupported precision plugins with TPU. """
trainer = Mock()
model = Mock()
accelerator = TPUAccelerator(
training_type_plugin=SingleTPUPlugin(device=Mock()),
precision_plugin=MixedPrecisionPlugin(),
)
with pytest.raises(MisconfigurationException, match=r"amp \+ tpu is not supported."):
accelerator.setup(trainer=trainer, model=model)


def test_unsupported_training_type_plugins():
""" Test error messages are raised for unsupported training type with TPU. """
trainer = Mock()
model = Mock()
accelerator = TPUAccelerator(
training_type_plugin=DDPPlugin(),
precision_plugin=PrecisionPlugin(),
)
with pytest.raises(MisconfigurationException, match="TPUs only support a single tpu core or tpu spawn training"):
accelerator.setup(trainer=trainer, model=model)

0 comments on commit 01f26b4

Please sign in to comment.