Skip to content

Commit

Permalink
introduce logger.warning_once and use it for grad checkpointing code (
Browse files Browse the repository at this point in the history
#21804)

* logger.warning_once

* style
  • Loading branch information
stas00 authored Feb 27, 2023
1 parent f95f60c commit c7f3abc
Show file tree
Hide file tree
Showing 58 changed files with 74 additions and 57 deletions.
2 changes: 1 addition & 1 deletion src/transformers/models/altclip/modeling_altclip.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,7 +638,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/bart/modeling_bart.py
Original file line number Diff line number Diff line change
Expand Up @@ -1085,7 +1085,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/bert/modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -585,7 +585,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -395,7 +395,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/big_bird/modeling_big_bird.py
Original file line number Diff line number Diff line change
Expand Up @@ -1606,7 +1606,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2265,7 +2265,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/biogpt/modeling_biogpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/blenderbot/modeling_blenderbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -1016,7 +1016,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1012,7 +1012,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/bloom/modeling_bloom.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,7 +757,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -769,7 +769,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/camembert/modeling_camembert.py
Original file line number Diff line number Diff line change
Expand Up @@ -516,7 +516,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -901,7 +901,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/clap/modeling_clap.py
Original file line number Diff line number Diff line change
Expand Up @@ -1588,7 +1588,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/codegen/modeling_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/data2vec/modeling_data2vec_text.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -609,7 +609,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/electra/modeling_electra.py
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/ernie/modeling_ernie.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/esm/modeling_esm.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/git/modeling_git.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/gpt2/modeling_gpt2.py
Original file line number Diff line number Diff line change
Expand Up @@ -853,7 +853,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/gpt_neo/modeling_gpt_neo.py
Original file line number Diff line number Diff line change
Expand Up @@ -589,7 +589,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/gptj/modeling_gptj.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,7 +653,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/imagegpt/modeling_imagegpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -812,7 +812,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/layoutlm/modeling_layoutlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/led/modeling_led.py
Original file line number Diff line number Diff line change
Expand Up @@ -2136,7 +2136,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/m2m_100/modeling_m2m_100.py
Original file line number Diff line number Diff line change
Expand Up @@ -1055,7 +1055,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting"
" `use_cache=False`..."
)
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/marian/modeling_marian.py
Original file line number Diff line number Diff line change
Expand Up @@ -1020,7 +1020,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/markuplm/modeling_markuplm.py
Original file line number Diff line number Diff line change
Expand Up @@ -641,7 +641,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/mbart/modeling_mbart.py
Original file line number Diff line number Diff line change
Expand Up @@ -1069,7 +1069,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing`. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -544,7 +544,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/mt5/modeling_mt5.py
Original file line number Diff line number Diff line change
Expand Up @@ -1008,7 +1008,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/mvp/modeling_mvp.py
Original file line number Diff line number Diff line change
Expand Up @@ -1212,7 +1212,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/nezha/modeling_nezha.py
Original file line number Diff line number Diff line change
Expand Up @@ -571,7 +571,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/opt/modeling_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -671,7 +671,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/pegasus/modeling_pegasus.py
Original file line number Diff line number Diff line change
Expand Up @@ -1070,7 +1070,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/pegasus_x/modeling_pegasus_x.py
Original file line number Diff line number Diff line change
Expand Up @@ -1311,7 +1311,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/plbart/modeling_plbart.py
Original file line number Diff line number Diff line change
Expand Up @@ -1048,7 +1048,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/prophetnet/modeling_prophetnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -1572,7 +1572,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/qdqbert/modeling_qdqbert.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,7 +575,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/realm/modeling_realm.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,7 +578,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/rembert/modeling_rembert.py
Original file line number Diff line number Diff line change
Expand Up @@ -536,7 +536,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/roberta/modeling_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,7 @@ def forward(

if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
logger.warning_once(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
Expand Down
Loading

0 comments on commit c7f3abc

Please sign in to comment.