Skip to content

Commit

Permalink
deprecate the iter parameter to the FastText constructor
Browse files Browse the repository at this point in the history
  • Loading branch information
mpenkov committed Jan 26, 2019
1 parent 09ab630 commit 2e728cb
Showing 1 changed file with 9 additions and 3 deletions.
12 changes: 9 additions & 3 deletions gensim/models/fasttext.py
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ class FastText(BaseWordEmbeddingsModel):
"""
def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha=0.025, window=5, min_count=5,
max_vocab_size=None, word_ngrams=1, sample=1e-3, seed=1, workers=3, min_alpha=0.0001,
negative=5, ns_exponent=0.75, cbow_mean=1, hashfxn=hash, iter=5, null_word=0, min_n=3, max_n=6,
negative=5, ns_exponent=0.75, cbow_mean=1, hashfxn=hash, iter=None, null_word=0, min_n=3, max_n=6,
sorted_vocab=1, bucket=2000000, trim_rule=None, batch_words=MAX_WORDS_IN_BATCH, callbacks=(),
compatible_hash=True):
"""
Expand Down Expand Up @@ -416,7 +416,7 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha
hashfxn : function, optional
Hash function to use to randomly initialize weights, for increased training reproducibility.
iter : int, optional
Number of iterations (epochs) over the corpus.
Deprecated.
trim_rule : function, optional
Vocabulary trimming rule, specifies whether certain words should remain in the vocabulary,
be trimmed away, or handled using the default (discard if word count < min_count).
Expand Down Expand Up @@ -471,6 +471,12 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha
>>> of_vector = model.wv['of'] # get vector for out-of-vocab word
"""
if iter is not None:
logging.warn(
'The iter parameter is deprecated. Pass the epochs keyword '
'parameter to the train method instead.'
)

self.load = call_on_class_only
self.load_fasttext_format = call_on_class_only
self.callbacks = callbacks
Expand All @@ -487,7 +493,7 @@ def __init__(self, sentences=None, corpus_file=None, sg=0, hs=0, size=100, alpha
self.wv.bucket = self.trainables.bucket

super(FastText, self).__init__(
sentences=sentences, corpus_file=corpus_file, workers=workers, vector_size=size, epochs=iter,
sentences=sentences, corpus_file=corpus_file, workers=workers, vector_size=size,
callbacks=callbacks, batch_words=batch_words, trim_rule=trim_rule, sg=sg, alpha=alpha, window=window,
seed=seed, hs=hs, negative=negative, cbow_mean=cbow_mean, min_alpha=min_alpha, fast_version=FAST_VERSION)

Expand Down

0 comments on commit 2e728cb

Please sign in to comment.