Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/explosion/spaCy
Browse files Browse the repository at this point in the history
  • Loading branch information
honnibal committed Jul 10, 2019
2 parents c6cb782 + c4c21cb commit a388888
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 1 deletion.
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Our libraries
cymem>=2.0.2,<2.1.0
preshed>=2.0.1,<2.1.0
thinc>=7.0.2,<7.1.0
thinc>=7.0.5,<7.1.0
blis>=0.2.2,<0.3.0
murmurhash>=0.28.0,<1.1.0
wasabi>=0.2.0,<1.1.0
Expand Down
1 change: 1 addition & 0 deletions spacy/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,6 +403,7 @@ class Errors(object):
E140 = ("The list of entities, prior probabilities and entity vectors should be of equal length.")
E141 = ("Entity vectors should be of length {required} instead of the provided {found}.")
E142 = ("Unsupported loss_function '{loss_func}'. Use either 'L2' or 'cosine'")
E143 = ("Labels for component '{name}' not initialized. Did you forget to call add_label()?")


@add_codes
Expand Down
7 changes: 7 additions & 0 deletions spacy/pipeline/pipes.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -902,6 +902,11 @@ class TextCategorizer(Pipe):
def labels(self):
return tuple(self.cfg.setdefault("labels", []))

def require_labels(self):
"""Raise an error if the component's model has no labels defined."""
if not self.labels:
raise ValueError(Errors.E143.format(name=self.name))

@labels.setter
def labels(self, value):
self.cfg["labels"] = tuple(value)
Expand Down Expand Up @@ -931,6 +936,7 @@ class TextCategorizer(Pipe):
doc.cats[label] = float(scores[i, j])

def update(self, docs, golds, state=None, drop=0., sgd=None, losses=None):
self.require_model()
scores, bp_scores = self.model.begin_update(docs, drop=drop)
loss, d_scores = self.get_loss(docs, golds, scores)
bp_scores(d_scores, sgd=sgd)
Expand Down Expand Up @@ -985,6 +991,7 @@ class TextCategorizer(Pipe):
def begin_training(self, get_gold_tuples=lambda: [], pipeline=None, sgd=None, **kwargs):
if self.model is True:
self.cfg["pretrained_vectors"] = kwargs.get("pretrained_vectors")
self.require_labels()
self.model = self.Model(len(self.labels), **self.cfg)
link_vectors_to_models(self.vocab)
if sgd is None:
Expand Down

0 comments on commit a388888

Please sign in to comment.