Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

Fix index error in dict embedding when exported to Caffe2 #1182

Closed
wants to merge 1 commit into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
10 changes: 7 additions & 3 deletions pytext/models/embeddings/dict_embedding.py
Expand Up @@ -10,6 +10,7 @@
from pytext.data.tensorizers import Tensorizer
from pytext.data.utils import PAD_INDEX, UNK_INDEX, Vocabulary
from pytext.fields import FieldMeta
from pytext.utils import cuda

from .embedding_base import EmbeddingBase

Expand Down Expand Up @@ -132,9 +133,12 @@ def forward(
batch_size = torch.onnx.operators.shape_as_tensor(feats)[0]
max_toks = torch.onnx.operators.shape_as_tensor(lengths)[1]

# convert all unk indicec to pad indices so
# there vector was the 0 vector
feats[feats == self.unk_index] = self.pad_index
# convert all unk indices to pad indices
feats = torch.where(
feats == self.unk_index,
cuda.GetTensor(torch.full_like(feats, self.pad_index)),
feats,
)

dict_emb = super().forward(feats)

Expand Down