Skip to content

Commit

Permalink
Merge pull request #22 from jtamir/fix_gradmethod_maxeig
Browse files Browse the repository at this point in the history
for LinearLeastSquares App, dont compute max eig if alpha is specifie…
  • Loading branch information
frankong committed Jul 2, 2019
2 parents 5743d41 + 9e21875 commit d7f865c
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 9 deletions.
16 changes: 8 additions & 8 deletions sigpy/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,14 +290,14 @@ def gradf(x):
if self.mu != 0:
AHA += self.mu * I

max_eig = MaxEig(AHA, dtype=self.x.dtype,
device=self.x_device, max_iter=self.max_power_iter,
show_pbar=self.show_pbar).run()

if max_eig == 0:
self.alpha = 1
else:
self.alpha = 1 / max_eig
if self.alpha is None:
max_eig = MaxEig(AHA, dtype=self.x.dtype, device=self.x_device,
max_iter=self.max_power_iter,
show_pbar=self.show_pbar).run()
if max_eig == 0:
self.alpha = 1
else:
self.alpha = 1 / max_eig

self.alg = GradientMethod(
gradf,
Expand Down
2 changes: 1 addition & 1 deletion tests/test_app.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def test_precond_LinearLeastSquares(self):
x_rec = app.LinearLeastSquares(A, y, show_pbar=False).run()
npt.assert_allclose(x_rec, x_lstsq, atol=1e-3)

alpha = p / app.MaxEig(P * A.H * A, show_pbar=False).run()
alpha = 1 / app.MaxEig(P * A.H * A, show_pbar=False).run()
x_rec = app.LinearLeastSquares(
A,
y,
Expand Down

0 comments on commit d7f865c

Please sign in to comment.