From 2acaf02a50863082db39dc7915067ec25bc2daba Mon Sep 17 00:00:00 2001 From: thorstenwagner Date: Thu, 19 Oct 2023 12:07:51 +0200 Subject: [PATCH] increase timeout of the dataloader to 180 seconds. On some clusters (like ours) sometimes long timeout happen in between. --- tomotwin/modules/training/torchtrainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tomotwin/modules/training/torchtrainer.py b/tomotwin/modules/training/torchtrainer.py index 00aaa33..c2479cc 100644 --- a/tomotwin/modules/training/torchtrainer.py +++ b/tomotwin/modules/training/torchtrainer.py @@ -508,7 +508,7 @@ def get_train_test_dataloader(self) -> Tuple[DataLoader, DataLoader]: num_workers=self.workers, pin_memory=False, # prefetch_factor=5, - timeout=60, + timeout=180, ) test_loader = None