Skip to content

Commit

Permalink
add tpu bfloat
Browse files Browse the repository at this point in the history
Co-authored-by: Adrian Wälchli <aedu.waelchli@gmail.com>
  • Loading branch information
justusschock and awaelchli committed Jan 30, 2021
1 parent 42a5838 commit 9d18d27
Showing 1 changed file with 28 additions and 0 deletions.
28 changes: 28 additions & 0 deletions pytorch_lightning/plugins/precision/tpu_bfloat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os

import torch

from pytorch_lightning.plugins.precision.precision_plugin import PrecisionPlugin


class TPUHalfPrecisionPlugin(PrecisionPlugin):
"""Plugin that enables bfloats on TPUs"""

precision = 16

def connect(self, model: torch.nn.Module, optimizers, lr_schedulers):
os.environ["XLA_USE_BF16"] = str(1)
return super().connect(model=model, optimizers=optimizers, lr_schedulers=lr_schedulers)

0 comments on commit 9d18d27

Please sign in to comment.