Skip to content

Commit

Permalink
remove TQDMProgressBar callback (#899)
Browse files Browse the repository at this point in the history
  • Loading branch information
jmoralez authored and marcopeix committed Mar 8, 2024
1 parent 4cd19a1 commit 8fdfb99
Show file tree
Hide file tree
Showing 8 changed files with 40 additions and 72 deletions.
5 changes: 2 additions & 3 deletions nbs/common.base_auto.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@
"import torch\n",
"import pytorch_lightning as pl\n",
"\n",
"from pytorch_lightning.callbacks import TQDMProgressBar\n",
"from ray import air, tune\n",
"from ray.tune.integration.pytorch_lightning import TuneReportCallback\n",
"from ray.tune.search.basic_variant import BasicVariantGenerator"
Expand Down Expand Up @@ -254,9 +253,9 @@
" `test_size`: int, test size for temporal cross-validation.<br>\n",
" \"\"\"\n",
" metrics = {\"loss\": \"ptl/val_loss\", \"train_loss\": \"train_loss\"}\n",
" callbacks = [TQDMProgressBar(), TuneReportCallback(metrics, on=\"validation_end\")]\n",
" callbacks = [TuneReportCallback(metrics, on=\"validation_end\")]\n",
" if 'callbacks' in config_step.keys():\n",
" callbacks += config_step['callbacks']\n",
" callbacks.extend(config_step['callbacks'])\n",
" config_step = {**config_step, **{'callbacks': callbacks}}\n",
"\n",
" # Protect dtypes from tune samplers\n",
Expand Down
15 changes: 6 additions & 9 deletions nbs/common.base_multivariate.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@
"import torch\n",
"import torch.nn as nn\n",
"import pytorch_lightning as pl\n",
"from pytorch_lightning.callbacks import TQDMProgressBar\n",
"from pytorch_lightning.callbacks.early_stopping import EarlyStopping\n",
"\n",
"from neuralforecast.common._scalers import TemporalNorm\n",
Expand Down Expand Up @@ -166,14 +165,12 @@
" raise Exception('max_epochs is deprecated, use max_steps instead.')\n",
"\n",
" # Callbacks\n",
" if trainer_kwargs.get('callbacks', None) is None:\n",
" callbacks = [TQDMProgressBar()]\n",
" # Early stopping\n",
" if self.early_stop_patience_steps > 0:\n",
" callbacks += [EarlyStopping(monitor='ptl/val_loss',\n",
" patience=self.early_stop_patience_steps)]\n",
"\n",
" trainer_kwargs['callbacks'] = callbacks\n",
" if 'callbacks' not in trainer_kwargs and self.early_stop_patience_steps > 0:\n",
" trainer_kwargs['callbacks'] = [\n",
" EarlyStopping(\n",
" monitor='ptl/val_loss', patience=self.early_stop_patience_steps\n",
" )\n",
" ]\n",
"\n",
" # Add GPU accelerator if available\n",
" if trainer_kwargs.get('accelerator', None) is None:\n",
Expand Down
15 changes: 6 additions & 9 deletions nbs/common.base_recurrent.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
"import torch\n",
"import torch.nn as nn\n",
"import pytorch_lightning as pl\n",
"from pytorch_lightning.callbacks import TQDMProgressBar\n",
"from pytorch_lightning.callbacks.early_stopping import EarlyStopping\n",
"\n",
"from neuralforecast.common._scalers import TemporalNorm\n",
Expand Down Expand Up @@ -178,14 +177,12 @@
" raise Exception('max_epochs is deprecated, use max_steps instead.')\n",
"\n",
" # Callbacks\n",
" if trainer_kwargs.get('callbacks', None) is None:\n",
" callbacks = [TQDMProgressBar()]\n",
" # Early stopping\n",
" if self.early_stop_patience_steps > 0:\n",
" callbacks += [EarlyStopping(monitor='ptl/val_loss',\n",
" patience=self.early_stop_patience_steps)]\n",
"\n",
" trainer_kwargs['callbacks'] = callbacks\n",
" if 'callbacks' not in trainer_kwargs and self.early_stop_patience_steps > 0:\n",
" trainer_kwargs['callbacks'] = [\n",
" EarlyStopping(\n",
" monitor='ptl/val_loss', patience=self.early_stop_patience_steps\n",
" )\n",
" ]\n",
"\n",
" # Add GPU accelerator if available\n",
" if trainer_kwargs.get('accelerator', None) is None:\n",
Expand Down
15 changes: 6 additions & 9 deletions nbs/common.base_windows.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@
"import torch\n",
"import torch.nn as nn\n",
"import pytorch_lightning as pl\n",
"from pytorch_lightning.callbacks import TQDMProgressBar\n",
"from pytorch_lightning.callbacks.early_stopping import EarlyStopping\n",
"\n",
"from neuralforecast.common._scalers import TemporalNorm\n",
Expand Down Expand Up @@ -192,14 +191,12 @@
" raise Exception('max_epochs is deprecated, use max_steps instead.')\n",
"\n",
" # Callbacks\n",
" if trainer_kwargs.get('callbacks', None) is None:\n",
" callbacks = [TQDMProgressBar()]\n",
" # Early stopping\n",
" if self.early_stop_patience_steps > 0:\n",
" callbacks += [EarlyStopping(monitor='ptl/val_loss',\n",
" patience=self.early_stop_patience_steps)]\n",
"\n",
" trainer_kwargs['callbacks'] = callbacks\n",
" if 'callbacks' not in trainer_kwargs and self.early_stop_patience_steps > 0:\n",
" trainer_kwargs['callbacks'] = [\n",
" EarlyStopping(\n",
" monitor='ptl/val_loss', patience=self.early_stop_patience_steps\n",
" )\n",
" ]\n",
"\n",
" # Add GPU accelerator if available\n",
" if trainer_kwargs.get('accelerator', None) is None:\n",
Expand Down
8 changes: 2 additions & 6 deletions neuralforecast/common/_base_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
import torch
import pytorch_lightning as pl

from pytorch_lightning.callbacks import TQDMProgressBar
from ray import air, tune
from ray.tune.integration.pytorch_lightning import TuneReportCallback
from ray.tune.search.basic_variant import BasicVariantGenerator
Expand Down Expand Up @@ -194,12 +193,9 @@ def _train_tune(self, config_step, cls_model, dataset, val_size, test_size):
`test_size`: int, test size for temporal cross-validation.<br>
"""
metrics = {"loss": "ptl/val_loss", "train_loss": "train_loss"}
callbacks = [
TQDMProgressBar(),
TuneReportCallback(metrics, on="validation_end"),
]
callbacks = [TuneReportCallback(metrics, on="validation_end")]
if "callbacks" in config_step.keys():
callbacks += config_step["callbacks"]
callbacks.extend(config_step["callbacks"])
config_step = {**config_step, **{"callbacks": callbacks}}

# Protect dtypes from tune samplers
Expand Down
18 changes: 6 additions & 12 deletions neuralforecast/common/_base_multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import torch
import torch.nn as nn
import pytorch_lightning as pl
from pytorch_lightning.callbacks import TQDMProgressBar
from pytorch_lightning.callbacks.early_stopping import EarlyStopping

from ._scalers import TemporalNorm
Expand Down Expand Up @@ -115,17 +114,12 @@ def __init__(
raise Exception("max_epochs is deprecated, use max_steps instead.")

# Callbacks
if trainer_kwargs.get("callbacks", None) is None:
callbacks = [TQDMProgressBar()]
# Early stopping
if self.early_stop_patience_steps > 0:
callbacks += [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

trainer_kwargs["callbacks"] = callbacks
if "callbacks" not in trainer_kwargs and self.early_stop_patience_steps > 0:
trainer_kwargs["callbacks"] = [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

# Add GPU accelerator if available
if trainer_kwargs.get("accelerator", None) is None:
Expand Down
18 changes: 6 additions & 12 deletions neuralforecast/common/_base_recurrent.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import torch
import torch.nn as nn
import pytorch_lightning as pl
from pytorch_lightning.callbacks import TQDMProgressBar
from pytorch_lightning.callbacks.early_stopping import EarlyStopping

from ._scalers import TemporalNorm
Expand Down Expand Up @@ -127,17 +126,12 @@ def __init__(
raise Exception("max_epochs is deprecated, use max_steps instead.")

# Callbacks
if trainer_kwargs.get("callbacks", None) is None:
callbacks = [TQDMProgressBar()]
# Early stopping
if self.early_stop_patience_steps > 0:
callbacks += [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

trainer_kwargs["callbacks"] = callbacks
if "callbacks" not in trainer_kwargs and self.early_stop_patience_steps > 0:
trainer_kwargs["callbacks"] = [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

# Add GPU accelerator if available
if trainer_kwargs.get("accelerator", None) is None:
Expand Down
18 changes: 6 additions & 12 deletions neuralforecast/common/_base_windows.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
import torch
import torch.nn as nn
import pytorch_lightning as pl
from pytorch_lightning.callbacks import TQDMProgressBar
from pytorch_lightning.callbacks.early_stopping import EarlyStopping

from ._scalers import TemporalNorm
Expand Down Expand Up @@ -137,17 +136,12 @@ def __init__(
raise Exception("max_epochs is deprecated, use max_steps instead.")

# Callbacks
if trainer_kwargs.get("callbacks", None) is None:
callbacks = [TQDMProgressBar()]
# Early stopping
if self.early_stop_patience_steps > 0:
callbacks += [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

trainer_kwargs["callbacks"] = callbacks
if "callbacks" not in trainer_kwargs and self.early_stop_patience_steps > 0:
trainer_kwargs["callbacks"] = [
EarlyStopping(
monitor="ptl/val_loss", patience=self.early_stop_patience_steps
)
]

# Add GPU accelerator if available
if trainer_kwargs.get("accelerator", None) is None:
Expand Down

0 comments on commit 8fdfb99

Please sign in to comment.