Skip to content

Commit

Permalink
Fix bug with added tokens (#197)
Browse files Browse the repository at this point in the history
  • Loading branch information
brandenchan authored and tholor committed Jan 17, 2020
1 parent a380d99 commit 1f6e082
Showing 1 changed file with 7 additions and 2 deletions.
9 changes: 7 additions & 2 deletions farm/data_handler/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -722,11 +722,16 @@ def __init__(
)

self.next_sent_pred = next_sent_pred

self.add_task("lm", "acc", list(self.tokenizer.vocab))
added_tokens = self.get_added_tokens()
self.add_task("lm", "acc", list(self.tokenizer.vocab) + added_tokens)
if self.next_sent_pred:
self.add_task("nextsentence", "acc", ["False", "True"])

def get_added_tokens(self):
dictionary = self.tokenizer.added_tokens_encoder
sorted_tuples = sorted(dictionary.items(), key=lambda x: x[0])
return [x[1] for x in sorted_tuples]

def file_to_dicts(self, file: str) -> list:
dicts = read_docs_from_txt(filename=file, delimiter=self.delimiter, max_docs=self.max_docs, proxies=self.proxies)
return dicts
Expand Down

0 comments on commit 1f6e082

Please sign in to comment.