Skip to content

Commit

Permalink
.
Browse files Browse the repository at this point in the history
  • Loading branch information
Sasha committed Nov 25, 2019
1 parent 6a55459 commit 2b47f13
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 12 deletions.
22 changes: 14 additions & 8 deletions torch_struct/linearchain.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ def _check_potentials(self, edge, lengths=None):
N = N_1 + 1

if lengths is None:
lengths = torch.LongTensor([N] * batch)

assert max(lengths) <= N, "Length longer than edge scores"
assert max(lengths) == N, "One length must be at least N"
# lengths = torch.LongTensor([N] * batch)
pass
else:
assert max(lengths) <= N, "Length longer than edge scores"
assert max(lengths) == N, "One length must be at least N"
assert C == C2, "Transition shape doesn't match"
return edge, batch, N, C, lengths

Expand All @@ -56,10 +57,15 @@ def _dp_scan(self, log_potentials, lengths=None, force_grad=False):
chart = self._chart((batch, bin_N, C, C), log_potentials, force_grad)

# Init
for b in range(lengths.shape[0]):
end = lengths[b] - 1
semiring.one_(chart[:, b, end:].diagonal(0, 2, 3))
chart[:, b, :end] = log_potentials[:, b, :end]
if lengths is None:
end = N
semiring.one_(chart[:, :, end:].diagonal(0, 2, 3))
chart[:, :, :end] = log_potentials[:, :, :end]
else:
for b in range(lengths.shape[0]):
end = lengths[b] - 1
semiring.one_(chart[:, b, end:].diagonal(0, 2, 3))
chart[:, b, :end] = log_potentials[:, b, :end]

# Scan
for n in range(1, log_N + 1):
Expand Down
15 changes: 11 additions & 4 deletions torch_struct/semirings/semirings.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
import torch
import genbmm

has_genbmm = False
try:
import genbmm
has_genbmm = True
except ImportError:
pass



def matmul(cls, a, b):
Expand Down Expand Up @@ -143,7 +150,7 @@ def matmul(cls, a, b, dims=1):
(Faster than calling sum and times.)
"""

if isinstance(a, genbmm.BandedMatrix):
if has_genbmm and isinstance(a, genbmm.BandedMatrix):
return b.multiply(a.transpose())
else:
return torch.matmul(a, b)
Expand All @@ -158,7 +165,7 @@ class LogSemiring(_BaseLog):

@classmethod
def matmul(cls, a, b):
if isinstance(a, genbmm.BandedMatrix):
if has_genbmm and isinstance(a, genbmm.BandedMatrix):
return b.multiply_log(a.transpose())
else:
return _BaseLog.matmul(a, b)
Expand All @@ -173,7 +180,7 @@ class MaxSemiring(_BaseLog):

@classmethod
def matmul(cls, a, b):
if isinstance(a, genbmm.BandedMatrix):
if has_genbmm and isinstance(a, genbmm.BandedMatrix):
return b.multiply_max(a.transpose())
else:
return matmul(cls, a, b)
Expand Down

0 comments on commit 2b47f13

Please sign in to comment.