From e844dd3d02122c16f1fab52d0f12b9a932e771ae Mon Sep 17 00:00:00 2001 From: Matt Date: Mon, 13 May 2024 16:53:01 +0100 Subject: [PATCH] stash commit --- src/transformers/models/idefics2/processing_idefics2.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/transformers/models/idefics2/processing_idefics2.py b/src/transformers/models/idefics2/processing_idefics2.py index 447068607ec3..c87e3f119436 100644 --- a/src/transformers/models/idefics2/processing_idefics2.py +++ b/src/transformers/models/idefics2/processing_idefics2.py @@ -62,7 +62,7 @@ class Idefics2Processor(ProcessorMixin): image_processor_class = "Idefics2ImageProcessor" tokenizer_class = "AutoTokenizer" - def __init__(self, image_processor, tokenizer=None, image_seq_len: int = 64, **kwargs): + def __init__(self, image_processor, tokenizer=None, image_seq_len: int = 64, chat_template: str = None, **kwargs): if image_processor is None: raise ValueError("You need to specify an `image_processor`.") if tokenizer is None: @@ -72,6 +72,7 @@ def __init__(self, image_processor, tokenizer=None, image_seq_len: int = 64, **k self.image_token = AddedToken("", normalized=False, special=True) self.end_of_utterance_token = AddedToken("", normalized=False, special=True) self.image_seq_len = image_seq_len + self.chat_template = chat_template tokens_to_add = { "additional_special_tokens": [self.fake_image_token, self.image_token, self.end_of_utterance_token]