Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
Borda committed Jul 23, 2020
1 parent f336507 commit b0860b0
Showing 1 changed file with 3 additions and 4 deletions.
7 changes: 3 additions & 4 deletions pytorch_lightning/trainer/distrib_data_parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@ def train_fx(trial_hparams, cluster_manager, _):
else:
XLA_AVAILABLE = True

pid = os.getpid()
rng1 = np.random.RandomState(pid)
RANDOM_PORTS = rng1.randint(10000, 19999, 100)
PID = os.getpid()
RNG1 = np.random.RandomState(PID)
RANDOM_PORTS = RNG1.randint(10000, 19999, 100)


class TrainerDDPMixin(ABC):
Expand Down Expand Up @@ -345,7 +345,6 @@ def configure_slurm_ddp(self, num_gpu_nodes):
def determine_local_rank(self):
if self.is_slurm_managing_tasks:
return int(os.environ['SLURM_LOCALID'])

else:
return int(os.environ.get('LOCAL_RANK', 0))

Expand Down

0 comments on commit b0860b0

Please sign in to comment.