diff --git a/spacy/tokenizer.pyx b/spacy/tokenizer.pyx index 8485a57c8aa..2d6c879e360 100644 --- a/spacy/tokenizer.pyx +++ b/spacy/tokenizer.pyx @@ -9,11 +9,17 @@ from preshed.maps cimport PreshMap import re +from .tokens.doc cimport Doc +from .strings cimport hash_string from .lexeme cimport EMPTY_LEXEME from .strings cimport hash_string from .tokens.doc cimport Doc +from .attrs import intify_attrs +from .symbols import ORTH, NORM +from .errors import Errors from . import util +from .util import get_words_and_spaces from .attrs import intify_attrs from .errors import Errors from .scorer import Scorer @@ -142,10 +148,8 @@ cdef class Tokenizer: property faster_heuristics: def __get__(self): return self._faster_heuristics - return self._faster_heuristics def __set__(self, faster_heuristics): - self._faster_heuristics = faster_heuristics self._faster_heuristics = faster_heuristics self._reload_special_cases() >>>>>>> 5abfa8215 (Cleanup Cython structs (#11337))