diff --git a/src/transformers/models/swin/modeling_swin.py b/src/transformers/models/swin/modeling_swin.py index 2cf1d33a51139d..45a7aa718cf026 100644 --- a/src/transformers/models/swin/modeling_swin.py +++ b/src/transformers/models/swin/modeling_swin.py @@ -942,6 +942,12 @@ def _set_gradient_checkpointing(self, module, value=False): @add_start_docstrings( "The bare Swin Model transformer outputting raw hidden-states without any specific head on top.", SWIN_START_DOCSTRING, + """ + add_pooling_layer (`bool`, *optional*, defaults to `True`): + Whether or not to apply pooling layer. + use_mask_token (`bool`, *optional*, defaults to `False`): + Whether or not to create and apply mask tokens in the embedding layer. + """, ) class SwinModel(SwinPreTrainedModel): def __init__(self, config, add_pooling_layer=True, use_mask_token=False): diff --git a/utils/check_docstrings.py b/utils/check_docstrings.py index e140be28037d59..f142c5dbccd1df 100644 --- a/utils/check_docstrings.py +++ b/utils/check_docstrings.py @@ -499,7 +499,6 @@ "SqueezeBertTokenizerFast", "SummarizationPipeline", "Swin2SRImageProcessor", - "SwinModel", "Swinv2Model", "SwitchTransformersConfig", "T5Config",