From 6521dfeaddd3ab171e8cbf946a4aab13e1985127 Mon Sep 17 00:00:00 2001 From: Pengjia Zhu Date: Tue, 23 Oct 2018 15:01:30 +1300 Subject: [PATCH] fixed a bug in position.py --- bert_pytorch/model/embedding/position.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bert_pytorch/model/embedding/position.py b/bert_pytorch/model/embedding/position.py index d1d7e81..d55c224 100644 --- a/bert_pytorch/model/embedding/position.py +++ b/bert_pytorch/model/embedding/position.py @@ -13,7 +13,7 @@ def __init__(self, d_model, max_len=512): pe.require_grad = False position = torch.arange(0, max_len).float().unsqueeze(1) - div_term = (torch.arange(0, d_model, 2) * -(math.log(10000.0) / d_model)).float().exp() + div_term = (torch.arange(0, d_model, 2).float() * -(math.log(10000.0) / d_model)).exp() pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term)