Skip to content

Commit

Permalink
Merge pull request #3129 from RaRe-Technologies/sponsor_techtarget
Browse files Browse the repository at this point in the history
[MRG] Add bronze sponsor: TechTarget
  • Loading branch information
piskvorky committed Apr 29, 2021
2 parents a9b91ba + a8069df commit b0b2174
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 8 deletions.
Binary file added docs/src/_static/images/techtarget-logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
5 changes: 5 additions & 0 deletions docs/src/people.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,8 @@ Bronze Sponsors
:target: https://eaccidents.com/
:width: 50%
:alt: EAccidents

.. figure:: _static/images/techtarget-logo.png
:target: https://www.techtarget.com/
:width: 50%
:alt: TechTarget
19 changes: 12 additions & 7 deletions gensim/models/doc2vec.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,7 @@ def __init__(self, documents=None, corpus_file=None, vector_size=100, dm_mean=No
.. sourcecode:: pycon
>>> model.dv['doc003']
"""
corpus_iterable = documents

Expand Down Expand Up @@ -357,8 +358,10 @@ def reset_from(self, other_model):
self.dv.expandos = other_model.dv.expandos
self.init_weights()

def _do_train_epoch(self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch,
total_examples=None, total_words=None, offsets=None, start_doctags=None, **kwargs):
def _do_train_epoch(
self, corpus_file, thread_id, offset, cython_vocab, thread_private_mem, cur_epoch,
total_examples=None, total_words=None, offsets=None, start_doctags=None, **kwargs
):
work, neu1 = thread_private_mem
doctag_vectors = self.dv.vectors
doctags_lockf = self.dv.vectors_lockf
Expand Down Expand Up @@ -425,10 +428,12 @@ def _do_train_job(self, job, alpha, inits):
)
return tally, self._raw_word_count(job)

def train(self, corpus_iterable=None, corpus_file=None, total_examples=None, total_words=None,
epochs=None, start_alpha=None, end_alpha=None,
word_count=0, queue_factor=2, report_delay=1.0, callbacks=(),
**kwargs):
def train(
self, corpus_iterable=None, corpus_file=None, total_examples=None, total_words=None,
epochs=None, start_alpha=None, end_alpha=None,
word_count=0, queue_factor=2, report_delay=1.0, callbacks=(),
**kwargs,
):
"""Update the model's neural weights.
To support linear learning-rate decay from (initial) `alpha` to `min_alpha`, and accurate
Expand Down Expand Up @@ -795,7 +800,7 @@ def load(cls, *args, **kwargs):
return super(Doc2Vec, cls).load(*args, rethrow=True, **kwargs)
except AttributeError as ae:
logger.error(
"Model load error. Was model saved using code from an older Gensim Version? "
"Model load error. Was model saved using code from an older Gensim version? "
"Try loading older model using gensim-3.8.3, then re-saving, to restore "
"compatibility with current code.")
raise ae
Expand Down
2 changes: 1 addition & 1 deletion gensim/models/word2vec.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def _scan_vocab(self, sentences, progress_per, trim_rule):
if sentence_no % progress_per == 0:
logger.info(
"PROGRESS: at sentence #%i, processed %i words, keeping %i word types",
sentence_no, total_words, len(vocab)
sentence_no, total_words, len(vocab),
)
for word in sentence:
vocab[word] += 1
Expand Down

0 comments on commit b0b2174

Please sign in to comment.