Skip to content

Commit

Permalink
initialize communication backend
Browse files Browse the repository at this point in the history
  • Loading branch information
hwchen2017 committed Dec 16, 2024
1 parent 87c6506 commit b0ba18e
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions deepspeed/runtime/domino/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@
import torch
import torch.nn.functional as F
from torch.nn.parameter import Parameter
import deepspeed
from deepspeed import comm as dist
import deepspeed.comm
from deepspeed.comm.comm import init_distributed
from deepspeed.accelerator import get_accelerator


def is_rank_0():
if dist.get_rank() == 0:
if deepspeed.comm.get_rank() == 0:
return True


Expand Down Expand Up @@ -249,6 +249,8 @@ def __init__(self,
output_bias=None):
super(DominoTransformerLayer, self).__init__()

init_distributed()

self.llama_model = config.llama_model
self.layer_number = layer_number
self.layer_type = layer_type
Expand Down

0 comments on commit b0ba18e

Please sign in to comment.