Skip to content

Commit

Permalink
Fixed inconsistency in several fast tokenizers (huggingface#26561)
Browse files Browse the repository at this point in the history
  • Loading branch information
Towdo authored and blbadger committed Nov 8, 2023
1 parent a080e09 commit 0f8dbc0
Show file tree
Hide file tree
Showing 13 changed files with 31 additions and 23 deletions.
2 changes: 1 addition & 1 deletion src/transformers/models/bert/tokenization_bert_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/funnel/tokenization_funnel_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/lxmert/tokenization_lxmert_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/realm/tokenization_realm_fast.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ def build_inputs_with_special_tokens(self, token_ids_0, token_ids_1=None):
"""
output = [self.cls_token_id] + token_ids_0 + [self.sep_token_id]

if token_ids_1:
if token_ids_1 is not None:
output += token_ids_1 + [self.sep_token_id]

return output
Expand Down
30 changes: 19 additions & 11 deletions tests/test_tokenization_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3209,19 +3209,27 @@ def test_build_inputs_with_special_tokens(self):
# output_p = tokenizer_p.build_inputs_with_special_tokens(input_simple, input_pair)
# self.assertEqual(output_p, output_r)

# Input tokens id
input_simple = tokenizer_p.encode("This is a sample input", add_special_tokens=False)
input_pair = tokenizer_p.encode("This is a sample pair", add_special_tokens=False)
input_pairs = [
("", ""),
("", "This is a sample pair"),
("This is a sample input", ""),
("This is a sample input", "This is a sample pair"),
]

# Generate output
output_r = tokenizer_r.build_inputs_with_special_tokens(input_simple)
output_p = tokenizer_p.build_inputs_with_special_tokens(input_simple)
self.assertEqual(output_p, output_r)
for sample_input, sample_pair in input_pairs:
# Input tokens id
input_simple = tokenizer_p.encode(sample_input, add_special_tokens=False)
input_pair = tokenizer_p.encode(sample_pair, add_special_tokens=False)

# Generate pair output
output_r = tokenizer_r.build_inputs_with_special_tokens(input_simple, input_pair)
output_p = tokenizer_p.build_inputs_with_special_tokens(input_simple, input_pair)
self.assertEqual(output_p, output_r)
# Generate output
output_r = tokenizer_r.build_inputs_with_special_tokens(input_simple)
output_p = tokenizer_p.build_inputs_with_special_tokens(input_simple)
self.assertEqual(output_p, output_r)

# Generate pair output
output_r = tokenizer_r.build_inputs_with_special_tokens(input_simple, input_pair)
output_p = tokenizer_p.build_inputs_with_special_tokens(input_simple, input_pair)
self.assertEqual(output_p, output_r)

def test_padding(self, max_length=50):
if not self.test_slow_tokenizer:
Expand Down

0 comments on commit 0f8dbc0

Please sign in to comment.