Skip to content

Commit

Permalink
More rearrangement. Make symeig common across backends
Browse files Browse the repository at this point in the history
  • Loading branch information
aarmey committed Feb 13, 2021
1 parent d1a8a50 commit 4cba332
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 151 deletions.
67 changes: 64 additions & 3 deletions tensorly/backend/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,10 +772,8 @@ def partial_svd(self, matrix, n_eigenvecs=None, random_state=None, **kwargs):
dim_1, dim_2 = self.shape(matrix)
if dim_1 <= dim_2:
min_dim = dim_1
max_dim = dim_2
else:
min_dim = dim_2
max_dim = dim_1

# Default on standard SVD
if (n_eigenvecs is None) or (min_dim <= n_eigenvecs):
Expand Down Expand Up @@ -862,12 +860,75 @@ def truncated_svd(self, matrix, n_eigenvecs=None):
U, S, V = U[:, :n_eigenvecs], S[:n_eigenvecs], V[:n_eigenvecs, :]
return U, S, V

def symeig_svd(self, matrix, n_eigenvecs=None, **kwargs):
"""Computes a truncated SVD on `matrix` using symeig
Uses symeig on matrix.T.dot(matrix) or its transpose
Parameters
----------
matrix : 2D-array
n_eigenvecs : int, optional, default is None
if specified, number of eigen[vectors-values] to return
**kwargs : optional
kwargs are used to absorb the difference of parameters among the other SVD functions
Returns
-------
U : 2D-array
of shape (matrix.shape[0], n_eigenvecs)
contains the right singular vectors
S : 1D-array
of shape (n_eigenvecs, )
contains the singular values of `matrix`
V : 2D-array
of shape (n_eigenvecs, matrix.shape[1])
contains the left singular vectors
"""
# Check that matrix is... a matrix!
if self.ndim(matrix) != 2:
raise ValueError('matrix be a matrix. matrix.ndim is %d != 2'
% self.ndim(matrix))

dim_1, dim_2 = self.shape(matrix)
if dim_1 <= dim_2:
min_dim = dim_1
max_dim = dim_2
else:
min_dim = dim_2
max_dim = dim_1

if n_eigenvecs is None:
n_eigenvecs = max_dim

if min_dim <= n_eigenvecs:
if n_eigenvecs > max_dim:
warnings.warn('Trying to compute SVD with n_eigenvecs={0}, which '
'is larger than max(matrix.shape)={1}. Setting '
'n_eigenvecs to {1}'.format(n_eigenvecs, max_dim))
n_eigenvecs = max_dim
# we compute decomposition on the largest of the two to keep more eigenvecs
dim_1, dim_2 = dim_2, dim_1

if dim_1 < dim_2:
S, U = self.eigh(self.dot(matrix, self.transpose(matrix)))
S = self.sqrt(S)
V = self.dot(self.transpose(matrix), U / self.reshape(S, (1, -1)))
else:
S, V = self.eigh(self.dot(self.transpose(matrix), matrix))
S = self.sqrt(S)
U = self.dot(matrix, V) / self.reshape(S, (1, -1))

U, S, V = U[:, ::-1], S[::-1], self.transpose(V)[::-1, :]
return U[:, :n_eigenvecs], S[:n_eigenvecs], V[:n_eigenvecs, :]

index = Index()

@property
def SVD_FUNS(self):
return {'numpy_svd': self.partial_svd,
'truncated_svd': self.truncated_svd}
'truncated_svd': self.truncated_svd,
'symeig_svd': self.symeig_svd}

@staticmethod
def index_update(tensor, indices, values):
Expand Down
2 changes: 1 addition & 1 deletion tensorly/backend/cupy_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,5 @@ def solve(self, matrix1, matrix2):
'conj', 'diag', 'einsum']:
CupyBackend.register_method(name, getattr(cp, name))

for name in ['svd', 'qr']:
for name in ['svd', 'qr', 'eigh']:
CupyBackend.register_method(name, getattr(cp.linalg, name))
8 changes: 1 addition & 7 deletions tensorly/backend/jax_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,6 @@ def kr(self, matrices, weights=None, mask=None):
m = mask.reshape((-1, 1)) if mask is not None else 1
return np.einsum(operation, *matrices).reshape((-1, n_columns))*m

@property
def SVD_FUNS(self):
return {'numpy_svd': self.partial_svd,
'truncated_svd': self.truncated_svd}


@staticmethod
def sort(tensor, axis, descending = False):
if descending:
Expand All @@ -105,7 +99,7 @@ def sort(tensor, axis, descending = False):
'argmax', 'stack', 'conj', 'diag', 'clip', 'einsum']:
JaxBackend.register_method(name, getattr(np, name))

for name in ['solve', 'qr', 'svd']:
for name in ['solve', 'qr', 'svd', 'eigh']:
JaxBackend.register_method(name, getattr(np.linalg, name))

for name in ['index', 'index_update']:
Expand Down
82 changes: 14 additions & 68 deletions tensorly/backend/mxnet_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,73 +79,19 @@ def conj(x, *args, **kwargs):
"""
return x

def symeig_svd(self, matrix, n_eigenvecs=None, **kwargs):
"""Computes a truncated SVD on `matrix` using symeig
Uses symeig on matrix.T.dot(matrix) or its transpose
Parameters
----------
matrix : 2D-array
n_eigenvecs : int, optional, default is None
if specified, number of eigen[vectors-values] to return
**kwargs : optional
kwargs are used to absorb the difference of parameters among the other SVD functions
Returns
-------
U : 2D-array
of shape (matrix.shape[0], n_eigenvecs)
contains the right singular vectors
S : 1D-array
of shape (n_eigenvecs, )
contains the singular values of `matrix`
V : 2D-array
of shape (n_eigenvecs, matrix.shape[1])
contains the left singular vectors
"""
# Check that matrix is... a matrix!
if self.ndim(matrix) != 2:
raise ValueError('matrix be a matrix. matrix.ndim is %d != 2'
% self.ndim(matrix))

dim_1, dim_2 = self.shape(matrix)
if dim_1 <= dim_2:
min_dim = dim_1
max_dim = dim_2
else:
min_dim = dim_2
max_dim = dim_1

if n_eigenvecs is None:
n_eigenvecs = max_dim

if min_dim <= n_eigenvecs:
if n_eigenvecs > max_dim:
warnings.warn('Trying to compute SVD with n_eigenvecs={0}, which '
'is larger than max(matrix.shape)={1}. Setting '
'n_eigenvecs to {1}'.format(n_eigenvecs, max_dim))
n_eigenvecs = max_dim
# we compute decomposition on the largest of the two to keep more eigenvecs
dim_1, dim_2 = dim_2, dim_1

if dim_1 < dim_2:
S, U = np.linalg.eigh(np.dot(matrix, np.transpose(matrix)))
S = self.sqrt(S)
V = np.dot(np.transpose(matrix), U / np.reshape(S, (1, -1)))
else:
S, V = np.linalg.eigh(np.dot(np.transpose(matrix), matrix))
S = self.sqrt(S)
U = np.dot(matrix, V) / np.reshape(S, (1, -1))

U, S, V = U[:, ::-1], S[::-1], np.transpose(V)[::-1, :]
return U[:, :n_eigenvecs], S[:n_eigenvecs], V[:n_eigenvecs, :]

@property
def SVD_FUNS(self):
return {'numpy_svd': self.partial_svd,
'truncated_svd': self.truncated_svd,
'symeig_svd': self.symeig_svd}
@staticmethod
def svd(X, full_matrices=True):
if X.shape[0] > X.shape[1]:
U, S, V = np.linalg.svd(X.T)
return V.T, S, U.T

U, S, V = np.linalg.svd(X)

if full_matrices == False:
U = U[:, 0:min(X.shape)]
V = V[0:min(X.shape), :]

return U, S, V

@staticmethod
def sort(tensor, axis, descending = False):
Expand All @@ -162,5 +108,5 @@ def sort(tensor, axis, descending = False):
'argmax', 'stack', 'diag', 'einsum']:
MxnetBackend.register_method(name, getattr(np, name))

for name in ['solve', 'qr', 'svd']:
for name in ['solve', 'qr', 'eigh']:
MxnetBackend.register_method(name, getattr(np.linalg, name))
2 changes: 1 addition & 1 deletion tensorly/backend/numpy_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,5 +82,5 @@ def sort(tensor, axis, descending = False):
'argmax', 'stack', 'conj', 'diag', 'einsum']:
NumpyBackend.register_method(name, getattr(np, name))

for name in ['solve', 'qr', 'svd']:
for name in ['solve', 'qr', 'svd', 'eigh']:
NumpyBackend.register_method(name, getattr(np.linalg, name))
71 changes: 0 additions & 71 deletions tensorly/backend/pytorch_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,77 +196,6 @@ def _reverse(tensor, axis=0):
def svd(matrix, full_matrices=True):
"""Computes the standard SVD."""
return torch.svd(matrix, some=full_matrices)

def symeig_svd(self, matrix, n_eigenvecs=None, **kwargs):
"""Computes a truncated SVD on `matrix` using symeig
Uses symeig on matrix.T.dot(matrix) or its transpose
Parameters
----------
matrix : 2D-array
n_eigenvecs : int, optional, default is None
if specified, number of eigen[vectors-values] to return
**kwargs : optional
kwargs are used to absorb the difference of parameters among the other SVD functions
Returns
-------
U : 2D-array
of shape (matrix.shape[0], n_eigenvecs)
contains the right singular vectors
S : 1D-array
of shape (n_eigenvecs, )
contains the singular values of `matrix`
V : 2D-array
of shape (n_eigenvecs, matrix.shape[1])
contains the left singular vectors
"""
# Check that matrix is... a matrix!
if self.ndim(matrix) != 2:
raise ValueError('matrix be a matrix. matrix.ndim is %d != 2'
% self.ndim(matrix))
dim_1, dim_2 = self.shape(matrix)
if dim_1 <= dim_2:
min_dim = dim_1
max_dim = dim_2
else:
min_dim = dim_2
max_dim = dim_1

if n_eigenvecs is None:
n_eigenvecs = max_dim

if min_dim <= n_eigenvecs:
if n_eigenvecs > max_dim:
warnings.warn('Trying to compute SVD with n_eigenvecs={0}, which '
'is larger than max(matrix.shape)={1}. Setting '
'n_eigenvecs to {1}'.format(n_eigenvecs, max_dim))
n_eigenvecs = max_dim
# we compute decomposition on the largest of the two to keep more eigenvecs
dim_1, dim_2 = dim_2, dim_1

if dim_1 < dim_2:
S, U = torch.symeig(self.dot(matrix, self.transpose(matrix)),
eigenvectors=True)
S = torch.sqrt(S)
V = self.dot(self.transpose(matrix), U / self.reshape(S, (1, -1)))
else:
S, V = torch.symeig(self.dot(self.transpose(matrix), matrix),
eigenvectors=True)
S = torch.sqrt(S)
U = self.dot(matrix, V) / self.reshape(S, (1, -1))

U = self._reverse(U, 1)
S = self._reverse(S)
V = self._reverse(self.transpose(V), 0)
return U[:, :n_eigenvecs], S[:n_eigenvecs], V[:n_eigenvecs, :]

@property
def SVD_FUNS(self):
return {'numpy_svd': self.partial_svd,
'truncated_svd': self.truncated_svd,
'symeig_svd': self.symeig_svd}

@staticmethod
def sort(tensor, axis, descending = False):
Expand Down

0 comments on commit 4cba332

Please sign in to comment.