Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pytensor/xtensor/math.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,6 +512,11 @@ def softmax(x, dim=None):
return exp_x / exp_x.sum(dim=dim)


def logsumexp(x, dim=None):
"""Compute the logsumexp of an XTensorVariable along a specified dimension."""
return log(exp(x).sum(dim=dim))


class Dot(XOp):
"""Matrix multiplication between two XTensorVariables.

Expand Down
25 changes: 24 additions & 1 deletion tests/xtensor/test_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,15 @@
import inspect

import numpy as np
from scipy.special import logsumexp as scipy_logsumexp
from xarray import DataArray

import pytensor.scalar as ps
import pytensor.xtensor.math as pxm
from pytensor import function
from pytensor.scalar import ScalarOp
from pytensor.xtensor.basic import rename
from pytensor.xtensor.math import add, exp
from pytensor.xtensor.math import add, exp, logsumexp
from pytensor.xtensor.type import xtensor
from tests.xtensor.util import xr_arange_like, xr_assert_allclose, xr_function

Expand Down Expand Up @@ -152,6 +153,28 @@ def test_cast():
yc64.astype("float64")


@pytest.mark.parametrize(
["shape", "dims", "axis"],
[
((3, 4), ("a", "b"), None),
((3, 4), "a", 0),
((3, 4), "b", 1),
],
)
def test_logsumexp(shape, dims, axis):
scipy_inp = np.zeros(shape)
scipy_out = scipy_logsumexp(scipy_inp, axis=axis)

pytensor_inp = DataArray(scipy_inp, dims=("a", "b"))
f = function([], logsumexp(pytensor_inp, dim=dims))
pytensor_out = f()

np.testing.assert_array_almost_equal(
pytensor_out,
scipy_out,
)


def test_dot():
"""Test basic dot product operations."""
# Test matrix-vector dot product (with multiple-letter dim names)
Expand Down