From 6ccb435482c3e6d509122695b9d536df297fa8a1 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Wed, 3 Apr 2024 09:30:10 -0400 Subject: [PATCH 01/22] patch project/back to be adjoints [skip ci] --- src/aspire/image/image.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aspire/image/image.py b/src/aspire/image/image.py index 9d2911ad7a..ff2353d333 100644 --- a/src/aspire/image/image.py +++ b/src/aspire/image/image.py @@ -537,7 +537,7 @@ def backproject(self, rot_matrices, symmetry_group=None): ) pts_rot = pts_rot.reshape((3, -1)) - vol += anufft(im_f, pts_rot[::-1], (L, L, L), real=True) + vol += anufft(im_f, pts_rot, (L, L, L), real=True) vol /= L From dadc94d5b9b2a8a1f74019fbe79ddbebb0f09dfc Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Tue, 30 Apr 2024 15:14:25 -0400 Subject: [PATCH 02/22] another pts_rot reversal --- src/aspire/reconstruction/mean.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index f7f5bf1a2c..22856d9196 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -126,7 +126,7 @@ def _compute_kernel(self): batch_kernel += ( 1 / (self.r * self.src.L**4) - * anufft(weights, pts_rot[::-1], (_2L, _2L, _2L), real=True) + * anufft(weights, pts_rot, (_2L, _2L, _2L), real=True) ) kernel[k, j] += batch_kernel From 7414c7c3a32ffe1b4f220dd829b12e644b21d955 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Tue, 30 Apr 2024 08:56:01 -0400 Subject: [PATCH 03/22] simplify mean est and back tests [skip ci] --- src/aspire/reconstruction/estimator.py | 2 +- tests/test_mean_estimator.py | 227 +++--------------- tests/test_weighted_mean_estimator.py | 320 +++---------------------- 3 files changed, 63 insertions(+), 486 deletions(-) diff --git a/src/aspire/reconstruction/estimator.py b/src/aspire/reconstruction/estimator.py index 679b3e1234..3e154800bb 100644 --- a/src/aspire/reconstruction/estimator.py +++ b/src/aspire/reconstruction/estimator.py @@ -133,7 +133,7 @@ def estimate(self, b_coef=None, tol=1e-5, regularizer=0): if b_coef is None: b_coef = self.src_backward() est_coef = self.conj_grad(b_coef, tol=tol, regularizer=regularizer) - est = Coef(self.basis, est_coef).evaluate().T + est = Coef(self.basis, est_coef).evaluate() return est diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 4bbd64e6f3..1095f76d27 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -5,11 +5,12 @@ import numpy as np from pytest import raises -from aspire.basis import FBBasis3D +from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import MeanEstimator from aspire.source.simulation import _LegacySimulation -from aspire.volume import LegacyVolume +from aspire.utils import grid_3d +from aspire.volume import LegacyVolume, Volume DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data") @@ -17,17 +18,17 @@ class MeanEstimatorTestCase(TestCase): def setUp(self): self.dtype = np.float32 - self.resolution = 8 - self.vols = LegacyVolume(L=self.resolution, dtype=self.dtype).generate() + self.L = 8 + self.vol = LegacyVolume(L=self.L, C=1, dtype=self.dtype).generate() self.sim = _LegacySimulation( n=1024, - vols=self.vols, + vols=self.vol, unique_filters=[ RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) ], dtype=self.dtype, ) - self.basis = FBBasis3D((self.resolution,) * 3, dtype=self.dtype) + self.basis = FBBasis3D((self.L,) * 3, dtype=self.dtype) self.estimator = MeanEstimator( self.sim, basis=self.basis, preconditioner="none" @@ -36,6 +37,7 @@ def setUp(self): self.estimator_with_preconditioner = MeanEstimator( self.sim, basis=self.basis, preconditioner="circulant" ) + self.mask = grid_3d(self.L)["r"] < 1 def tearDown(self): pass @@ -47,208 +49,33 @@ def testEstimateResolutionError(self): with raises(ValueError, match=r".*resolution.*"): # This basis is intentionally the wrong resolution. - incorrect_basis = FBBasis3D((2 * self.resolution,) * 3, dtype=self.dtype) + incorrect_basis = FBBasis3D((2 * self.L,) * 3, dtype=self.dtype) _ = MeanEstimator(self.sim, basis=incorrect_basis, preconditioner="none") def testEstimate(self): estimate = self.estimator.estimate() - self.assertTrue( - np.allclose( - estimate.asnumpy()[0][:, :, 4], - [ - [ - +0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - -0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - ], - [ - +0.00000000, - +0.00000000, - +0.02446793, - +0.05363505, - +0.21988572, - +0.19513786, - +0.01174418, - +0.00000000, - ], - [ - +0.00000000, - -0.06168774, - +0.13178457, - +0.36011154, - +0.88632372, - +0.92307694, - +0.45524491, - +0.15142541, - ], - [ - +0.00000000, - -0.09108749, - +0.19564009, - +0.78325885, - +2.34527692, - +2.44817345, - +1.41268619, - +0.53634876, - ], - [ - +0.00000000, - +0.07150180, - +0.38347393, - +1.70868980, - +3.78134981, - +3.03582139, - +1.49942724, - +0.52104809, - ], - [ - +0.00000000, - +0.00736866, - +0.19239950, - +1.71596036, - +3.59823119, - +2.64081679, - +1.08514933, - +0.24995637, - ], - [ - +0.00000000, - +0.11075829, - +0.43197553, - +0.82667320, - +1.51163241, - +1.25342639, - +0.36478594, - -0.00464912, - ], - [ - +0.00000000, - +0.00000000, - +0.43422818, - +0.64440739, - +0.44137408, - +0.25311494, - +0.00011242, - +0.00000000, - ], - ], - atol=1e-5, - ) + + est = estimate.asnumpy() * self.mask + vol = self.vol.asnumpy() * self.mask + + np.testing.assert_allclose( + est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.05 ) def testAdjoint(self): - mean_b_coef = self.estimator.src_backward().squeeze() - self.assertTrue( - np.allclose( - mean_b_coef, - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ], - atol=1e-6, - ) + # Mean coefs formed by backprojections + mean_b_coef = self.estimator.src_backward() + + # Evaluate mean coefs into a volume + est = Coef(self.basis, mean_b_coef).evaluate() + + # Mask off corners of volume + vol = self.vol.asnumpy() * self.mask + + # Assert the mean volume is close to original volume + np.testing.assert_allclose( + est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 ) def testOptimize1(self): diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 23496428c1..7940ad0333 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -4,10 +4,11 @@ import numpy as np -from aspire.basis import FBBasis3D +from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import WeightedVolumesEstimator from aspire.source import _LegacySimulation +from aspire.utils import grid_3d from aspire.volume import LegacyVolume logger = logging.getLogger(__name__) @@ -22,7 +23,7 @@ def setUp(self): self.r = 2 self.L = L = 8 self.sim = _LegacySimulation( - vols=LegacyVolume(L, dtype=self.dtype).generate(), + vols=LegacyVolume(L, C=1, dtype=self.dtype).generate(), n=self.n, unique_filters=[ RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) @@ -37,6 +38,7 @@ def setUp(self): self.estimator_with_preconditioner = WeightedVolumesEstimator( self.weights, self.sim, basis=self.basis, preconditioner="circulant" ) + self.mask = grid_3d(self.L)["r"] < 1 def tearDown(self): pass @@ -44,204 +46,31 @@ def tearDown(self): def testPositiveWeightedEstimates(self): estimate = self.estimator.estimate() - a = (estimate.asnumpy()[0][:, :, 4],) + est = estimate * self.mask + vol = self.sim.vols.asnumpy() * self.mask + vol /= np.linalg.norm(vol) - b = np.array( - [ - [ - +0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - -0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - ], - [ - +0.00000000, - +0.00000000, - +0.02446793, - +0.05363505, - +0.21988572, - +0.19513786, - +0.01174418, - +0.00000000, - ], - [ - +0.00000000, - -0.06168774, - +0.13178457, - +0.36011154, - +0.88632372, - +0.92307694, - +0.45524491, - +0.15142541, - ], - [ - +0.00000000, - -0.09108749, - +0.19564009, - +0.78325885, - +2.34527692, - +2.44817345, - +1.41268619, - +0.53634876, - ], - [ - +0.00000000, - +0.07150180, - +0.38347393, - +1.70868980, - +3.78134981, - +3.03582139, - +1.49942724, - +0.52104809, - ], - [ - +0.00000000, - +0.00736866, - +0.19239950, - +1.71596036, - +3.59823119, - +2.64081679, - +1.08514933, - +0.24995637, - ], - [ - +0.00000000, - +0.11075829, - +0.43197553, - +0.82667320, - +1.51163241, - +1.25342639, - +0.36478594, - -0.00464912, - ], - [ - +0.00000000, - +0.00000000, - +0.43422818, - +0.64440739, - +0.44137408, - +0.25311494, - +0.00011242, - +0.00000000, - ], - ] - ) - - logger.info(f"max abs diff: {np.max(np.abs(a - b))}") - self.assertTrue(np.allclose(a, b, atol=1e-5)) + # Compare each output volume + for _est in est: + np.testing.assert_allclose( + _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.05 + ) def testAdjoint(self): - mean_b_coef = self.estimator.src_backward().squeeze() - self.assertTrue( - np.allclose( - mean_b_coef, - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ], - atol=1e-6, + # Mean coefs formed by backprojections + mean_b_coef = self.estimator.src_backward() + + # Evaluate mean coefs into a volume + est = Coef(self.basis, mean_b_coef).evaluate() + + # Mask off corners of volume + vol = self.sim.vols.asnumpy() * self.mask + + # Assert the mean volumes are close to original volume + for _est in est: + np.testing.assert_allclose( + _est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 ) - ) def testOptimize1(self): mean_b_coef = np.array( @@ -693,95 +522,16 @@ def testNegativeWeightedEstimates(self): estimate = estimator.estimate() - a0 = estimate.asnumpy()[0][:, :, 4] - a1 = estimate.asnumpy()[1][:, :, 4] + est = estimate * self.mask + vol = self.sim.vols.asnumpy() * self.mask + vol /= np.linalg.norm(vol) - b = np.array( - [ - [ - +0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - -0.00000000, - +0.00000000, - +0.00000000, - +0.00000000, - ], - [ - +0.00000000, - +0.00000000, - +0.02446793, - +0.05363505, - +0.21988572, - +0.19513786, - +0.01174418, - +0.00000000, - ], - [ - +0.00000000, - -0.06168774, - +0.13178457, - +0.36011154, - +0.88632372, - +0.92307694, - +0.45524491, - +0.15142541, - ], - [ - +0.00000000, - -0.09108749, - +0.19564009, - +0.78325885, - +2.34527692, - +2.44817345, - +1.41268619, - +0.53634876, - ], - [ - +0.00000000, - +0.07150180, - +0.38347393, - +1.70868980, - +3.78134981, - +3.03582139, - +1.49942724, - +0.52104809, - ], - [ - +0.00000000, - +0.00736866, - +0.19239950, - +1.71596036, - +3.59823119, - +2.64081679, - +1.08514933, - +0.24995637, - ], - [ - +0.00000000, - +0.11075829, - +0.43197553, - +0.82667320, - +1.51163241, - +1.25342639, - +0.36478594, - -0.00464912, - ], - [ - +0.00000000, - +0.00000000, - +0.43422818, - +0.64440739, - +0.44137408, - +0.25311494, - +0.00011242, - +0.00000000, - ], - ] + # Compare positive weighted output volume + np.testing.assert_allclose( + est[0] / np.linalg.norm(est[0]), vol / np.linalg.norm(vol), atol=0.05 ) - logger.info(f"max abs diff: {np.max(np.abs(a0 - b))}") - self.assertTrue(np.allclose(a0, b, atol=1e-5)) - logger.info(f"max abs diff: {np.max(np.abs(-a1 - b))}") - self.assertTrue(np.allclose(-a1, b, atol=1e-5)) # negative weights + # Compare negative weighted output volume + np.testing.assert_allclose( + -1 * est[1] / np.linalg.norm(est[1]), vol / np.linalg.norm(vol), atol=0.05 + ) From 2733d812bc3200ce08295ba86f81c0f9f686fd84 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Tue, 30 Apr 2024 15:55:05 -0400 Subject: [PATCH 04/22] tmp rm hardcoded 'optimize{1,2}' mean est tests --- tests/test_mean_estimator.py | 427 +------------------------ tests/test_weighted_mean_estimator.py | 434 +------------------------- 2 files changed, 8 insertions(+), 853 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 1095f76d27..00678ef5d1 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -79,433 +79,14 @@ def testAdjoint(self): ) def testOptimize1(self): - mean_b_coef = np.array( - [ - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ] - ], - dtype=self.dtype, - ) - + """ x = self.estimator.conj_grad(mean_b_coef) - self.assertTrue( - np.allclose( - x, - [ - 1.24325149e01, - 4.06481998e00, - 1.19149607e00, - -3.31414200e00, - -1.23897783e00, - 1.53987018e-01, - 2.50221093e00, - 9.18131863e-01, - 4.09624945e-02, - -1.81129255e00, - -2.58832135e-01, - -7.21149988e-01, - -1.00909836e00, - 5.72232366e-02, - -3.90701966e-01, - -3.65655187e-01, - 2.33601017e-01, - 1.75039197e-01, - 2.52945224e-01, - 3.29783105e-01, - 7.85601834e-02, - -3.96439884e-01, - -8.56255814e-01, - 7.35131473e-03, - 1.10704423e00, - 7.35615877e-02, - 5.61772211e-01, - 2.60428522e-01, - -5.41932165e-01, - 4.29851425e-01, - 3.86300956e-01, - -8.90168838e-02, - -1.02959264e-01, - 6.03104058e-01, - 1.85286462e-01, - -4.16434930e-01, - 2.11092135e-01, - -1.85514653e-01, - 9.80712710e-02, - -8.98429489e-01, - -9.54759574e-02, - -1.17952459e-01, - 1.41721715e-01, - -1.36184702e-01, - 3.23733962e-01, - -2.68721792e-01, - -1.42064052e-01, - 1.41909797e-01, - -2.24251300e-03, - -4.27538724e-01, - 1.28441757e-01, - -5.57623000e-01, - -1.54801935e-01, - 6.51729903e-01, - -2.15567768e-01, - 1.95041528e-01, - -4.18334680e-01, - 3.26735913e-02, - 6.35474331e-02, - 3.06828631e-01, - 1.43149180e-01, - -2.34377520e-01, - -1.54299735e-01, - 2.82627560e-01, - 9.60630473e-02, - 1.47687304e-01, - 1.38157247e-01, - -4.25581692e-01, - -5.62236939e-01, - 2.09287213e-01, - -1.14280315e-01, - 2.70617650e-02, - -1.19705716e-01, - 1.68350236e-02, - -1.20459065e-01, - 6.03971532e-02, - 3.21465643e-01, - 1.82032297e-01, - -2.95991444e-02, - 1.53711400e-01, - -1.30594319e-01, - -4.71412485e-02, - -1.35301477e-01, - -2.36292616e-01, - 1.95728111e-01, - 2.54618329e-01, - -1.81663289e-03, - 2.77960420e-02, - 3.58816749e-02, - -2.50138365e-02, - 2.54103161e-01, - 9.82534014e-02, - 9.00807559e-02, - 3.71458771e-02, - -7.86838200e-02, - -1.03837231e-01, - -1.26116949e-01, - 9.82006976e-02, - ], - atol=1e-4, - ) - ) + """ def testOptimize2(self): - mean_b_coef = np.array( - [ - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ] - ] - ) - + """ x = self.estimator_with_preconditioner.conj_grad(mean_b_coef) - self.assertTrue( - np.allclose( - x, - [ - 1.24325149e01, - 4.06481998e00, - 1.19149607e00, - -3.31414200e00, - -1.23897783e00, - 1.53987018e-01, - 2.50221093e00, - 9.18131863e-01, - 4.09624945e-02, - -1.81129255e00, - -2.58832135e-01, - -7.21149988e-01, - -1.00909836e00, - 5.72232366e-02, - -3.90701966e-01, - -3.65655187e-01, - 2.33601017e-01, - 1.75039197e-01, - 2.52945224e-01, - 3.29783105e-01, - 7.85601834e-02, - -3.96439884e-01, - -8.56255814e-01, - 7.35131473e-03, - 1.10704423e00, - 7.35615877e-02, - 5.61772211e-01, - 2.60428522e-01, - -5.41932165e-01, - 4.29851425e-01, - 3.86300956e-01, - -8.90168838e-02, - -1.02959264e-01, - 6.03104058e-01, - 1.85286462e-01, - -4.16434930e-01, - 2.11092135e-01, - -1.85514653e-01, - 9.80712710e-02, - -8.98429489e-01, - -9.54759574e-02, - -1.17952459e-01, - 1.41721715e-01, - -1.36184702e-01, - 3.23733962e-01, - -2.68721792e-01, - -1.42064052e-01, - 1.41909797e-01, - -2.24251300e-03, - -4.27538724e-01, - 1.28441757e-01, - -5.57623000e-01, - -1.54801935e-01, - 6.51729903e-01, - -2.15567768e-01, - 1.95041528e-01, - -4.18334680e-01, - 3.26735913e-02, - 6.35474331e-02, - 3.06828631e-01, - 1.43149180e-01, - -2.34377520e-01, - -1.54299735e-01, - 2.82627560e-01, - 9.60630473e-02, - 1.47687304e-01, - 1.38157247e-01, - -4.25581692e-01, - -5.62236939e-01, - 2.09287213e-01, - -1.14280315e-01, - 2.70617650e-02, - -1.19705716e-01, - 1.68350236e-02, - -1.20459065e-01, - 6.03971532e-02, - 3.21465643e-01, - 1.82032297e-01, - -2.95991444e-02, - 1.53711400e-01, - -1.30594319e-01, - -4.71412485e-02, - -1.35301477e-01, - -2.36292616e-01, - 1.95728111e-01, - 2.54618329e-01, - -1.81663289e-03, - 2.77960420e-02, - 3.58816749e-02, - -2.50138365e-02, - 2.54103161e-01, - 9.82534014e-02, - 9.00807559e-02, - 3.71458771e-02, - -7.86838200e-02, - -1.03837231e-01, - -1.26116949e-01, - 9.82006976e-02, - ], - atol=1e-4, - ) - ) + """ def testCheckpoint(self): """Exercise the checkpointing and max iterations branches.""" diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 7940ad0333..b6637fbbd5 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -73,440 +73,14 @@ def testAdjoint(self): ) def testOptimize1(self): - mean_b_coef = np.array( - [ - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ] - ] - * self.r, - dtype=self.dtype, - ) - - # Given equal weighting we should get the same result for all self.r volumes. + """ x = self.estimator.conj_grad(mean_b_coef) - - ref = np.array( - [ - 1.24325149e01, - 4.06481998e00, - 1.19149607e00, - -3.31414200e00, - -1.23897783e00, - 1.53987018e-01, - 2.50221093e00, - 9.18131863e-01, - 4.09624945e-02, - -1.81129255e00, - -2.58832135e-01, - -7.21149988e-01, - -1.00909836e00, - 5.72232366e-02, - -3.90701966e-01, - -3.65655187e-01, - 2.33601017e-01, - 1.75039197e-01, - 2.52945224e-01, - 3.29783105e-01, - 7.85601834e-02, - -3.96439884e-01, - -8.56255814e-01, - 7.35131473e-03, - 1.10704423e00, - 7.35615877e-02, - 5.61772211e-01, - 2.60428522e-01, - -5.41932165e-01, - 4.29851425e-01, - 3.86300956e-01, - -8.90168838e-02, - -1.02959264e-01, - 6.03104058e-01, - 1.85286462e-01, - -4.16434930e-01, - 2.11092135e-01, - -1.85514653e-01, - 9.80712710e-02, - -8.98429489e-01, - -9.54759574e-02, - -1.17952459e-01, - 1.41721715e-01, - -1.36184702e-01, - 3.23733962e-01, - -2.68721792e-01, - -1.42064052e-01, - 1.41909797e-01, - -2.24251300e-03, - -4.27538724e-01, - 1.28441757e-01, - -5.57623000e-01, - -1.54801935e-01, - 6.51729903e-01, - -2.15567768e-01, - 1.95041528e-01, - -4.18334680e-01, - 3.26735913e-02, - 6.35474331e-02, - 3.06828631e-01, - 1.43149180e-01, - -2.34377520e-01, - -1.54299735e-01, - 2.82627560e-01, - 9.60630473e-02, - 1.47687304e-01, - 1.38157247e-01, - -4.25581692e-01, - -5.62236939e-01, - 2.09287213e-01, - -1.14280315e-01, - 2.70617650e-02, - -1.19705716e-01, - 1.68350236e-02, - -1.20459065e-01, - 6.03971532e-02, - 3.21465643e-01, - 1.82032297e-01, - -2.95991444e-02, - 1.53711400e-01, - -1.30594319e-01, - -4.71412485e-02, - -1.35301477e-01, - -2.36292616e-01, - 1.95728111e-01, - 2.54618329e-01, - -1.81663289e-03, - 2.77960420e-02, - 3.58816749e-02, - -2.50138365e-02, - 2.54103161e-01, - 9.82534014e-02, - 9.00807559e-02, - 3.71458771e-02, - -7.86838200e-02, - -1.03837231e-01, - -1.26116949e-01, - 9.82006976e-02, - ] - * self.r - ) - - logger.info(f"max abs diff: {np.max(np.abs(x.flatten() - ref))}") - self.assertTrue(np.allclose(x.flatten(), ref, atol=1e-4)) + """ def testOptimize2(self): - mean_b_coef = np.array( - [ - [ - 1.07338590e-01, - 1.23690941e-01, - 6.44482039e-03, - -5.40484306e-02, - -4.85304586e-02, - 1.09852144e-02, - 3.87838396e-02, - 3.43796455e-02, - -6.43284705e-03, - -2.86677145e-02, - -1.42313328e-02, - -2.25684091e-03, - -3.31840727e-02, - -2.59706174e-03, - -5.91919887e-04, - -9.97433028e-03, - 9.19123928e-04, - 1.19891589e-03, - 7.49154982e-03, - 6.18865229e-03, - -8.13265715e-04, - -1.30715655e-02, - -1.44160603e-02, - 2.90379956e-03, - 2.37066082e-02, - 4.88805735e-03, - 1.47870707e-03, - 7.63376018e-03, - -5.60619559e-03, - 1.05165081e-02, - 3.30510143e-03, - -3.48652120e-03, - -4.23228797e-04, - 1.40484061e-02, - 1.42914291e-03, - -1.28129504e-02, - 2.19868825e-03, - -6.30835037e-03, - 1.18524223e-03, - -2.97855052e-02, - 1.15491057e-03, - -8.27947006e-03, - 3.45442781e-03, - -4.72868856e-03, - 2.66615329e-03, - -7.87929790e-03, - 8.84126590e-04, - 1.59402808e-03, - -9.06854048e-05, - -8.79119004e-03, - 1.76449039e-03, - -1.36414673e-02, - 1.56793855e-03, - 1.44708445e-02, - -2.55974802e-03, - 5.38506357e-03, - -3.24188673e-03, - 4.81582945e-04, - 7.74260101e-05, - 5.48772082e-03, - 1.92058500e-03, - -4.63538896e-03, - -2.02735133e-03, - 3.67592386e-03, - 7.23486969e-04, - 1.81838422e-03, - 1.78793284e-03, - -8.01474060e-03, - -8.54007528e-03, - 1.96353845e-03, - -2.16254252e-03, - -3.64243996e-05, - -2.27329863e-03, - 1.11424393e-03, - -1.39389189e-03, - 2.57787159e-04, - 3.66918811e-03, - 1.31477774e-03, - 6.82220128e-04, - 1.41822851e-03, - -1.89476924e-03, - -6.43966255e-05, - -7.87888465e-04, - -6.99459279e-04, - 1.08918981e-03, - 2.25264584e-03, - -1.43651015e-04, - 7.68377620e-04, - 5.05955256e-04, - 2.66936132e-06, - 2.24934884e-03, - 6.70529439e-04, - 4.81121742e-04, - -6.40789745e-05, - -3.35915672e-04, - -7.98651783e-04, - -9.82705453e-04, - 6.46337066e-05, - ] - ] - * self.r - ) - + """ x = self.estimator_with_preconditioner.conj_grad(mean_b_coef) - self.assertTrue( - np.allclose( - x, - [ - [ - 1.24325149e01, - 4.06481998e00, - 1.19149607e00, - -3.31414200e00, - -1.23897783e00, - 1.53987018e-01, - 2.50221093e00, - 9.18131863e-01, - 4.09624945e-02, - -1.81129255e00, - -2.58832135e-01, - -7.21149988e-01, - -1.00909836e00, - 5.72232366e-02, - -3.90701966e-01, - -3.65655187e-01, - 2.33601017e-01, - 1.75039197e-01, - 2.52945224e-01, - 3.29783105e-01, - 7.85601834e-02, - -3.96439884e-01, - -8.56255814e-01, - 7.35131473e-03, - 1.10704423e00, - 7.35615877e-02, - 5.61772211e-01, - 2.60428522e-01, - -5.41932165e-01, - 4.29851425e-01, - 3.86300956e-01, - -8.90168838e-02, - -1.02959264e-01, - 6.03104058e-01, - 1.85286462e-01, - -4.16434930e-01, - 2.11092135e-01, - -1.85514653e-01, - 9.80712710e-02, - -8.98429489e-01, - -9.54759574e-02, - -1.17952459e-01, - 1.41721715e-01, - -1.36184702e-01, - 3.23733962e-01, - -2.68721792e-01, - -1.42064052e-01, - 1.41909797e-01, - -2.24251300e-03, - -4.27538724e-01, - 1.28441757e-01, - -5.57623000e-01, - -1.54801935e-01, - 6.51729903e-01, - -2.15567768e-01, - 1.95041528e-01, - -4.18334680e-01, - 3.26735913e-02, - 6.35474331e-02, - 3.06828631e-01, - 1.43149180e-01, - -2.34377520e-01, - -1.54299735e-01, - 2.82627560e-01, - 9.60630473e-02, - 1.47687304e-01, - 1.38157247e-01, - -4.25581692e-01, - -5.62236939e-01, - 2.09287213e-01, - -1.14280315e-01, - 2.70617650e-02, - -1.19705716e-01, - 1.68350236e-02, - -1.20459065e-01, - 6.03971532e-02, - 3.21465643e-01, - 1.82032297e-01, - -2.95991444e-02, - 1.53711400e-01, - -1.30594319e-01, - -4.71412485e-02, - -1.35301477e-01, - -2.36292616e-01, - 1.95728111e-01, - 2.54618329e-01, - -1.81663289e-03, - 2.77960420e-02, - 3.58816749e-02, - -2.50138365e-02, - 2.54103161e-01, - 9.82534014e-02, - 9.00807559e-02, - 3.71458771e-02, - -7.86838200e-02, - -1.03837231e-01, - -1.26116949e-01, - 9.82006976e-02, - ] - ] - * self.r, - atol=1e-4, - ) - ) + """ def testNegativeWeightedEstimates(self): """ From 4574422783c50ff113cd862bdeb7413237944990 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Tue, 30 Apr 2024 16:10:22 -0400 Subject: [PATCH 05/22] Remove legacy volume/sim call from mean est tests --- tests/test_mean_estimator.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 00678ef5d1..2366d97e6e 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -8,9 +8,8 @@ from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import MeanEstimator -from aspire.source.simulation import _LegacySimulation +from aspire.source.simulation import Simulation from aspire.utils import grid_3d -from aspire.volume import LegacyVolume, Volume DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data") @@ -19,15 +18,16 @@ class MeanEstimatorTestCase(TestCase): def setUp(self): self.dtype = np.float32 self.L = 8 - self.vol = LegacyVolume(L=self.L, C=1, dtype=self.dtype).generate() - self.sim = _LegacySimulation( - n=1024, - vols=self.vol, + self.sim = Simulation( + n=512, + C=1, # single volume unique_filters=[ RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) ], dtype=self.dtype, + seed=1616, ) + # Todo, swap for default FFB self.basis = FBBasis3D((self.L,) * 3, dtype=self.dtype) self.estimator = MeanEstimator( @@ -57,10 +57,10 @@ def testEstimate(self): estimate = self.estimator.estimate() est = estimate.asnumpy() * self.mask - vol = self.vol.asnumpy() * self.mask + vol = self.sim.vols.asnumpy() * self.mask np.testing.assert_allclose( - est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.05 + est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 ) def testAdjoint(self): @@ -71,7 +71,7 @@ def testAdjoint(self): est = Coef(self.basis, mean_b_coef).evaluate() # Mask off corners of volume - vol = self.vol.asnumpy() * self.mask + vol = self.sim.vols.asnumpy() * self.mask # Assert the mean volume is close to original volume np.testing.assert_allclose( From 70eb82b473b4aa3ecf1492e92cf7210ccadae265 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Tue, 30 Apr 2024 16:14:58 -0400 Subject: [PATCH 06/22] Remove legacy volume/sim call from wt mean est tests --- tests/test_weighted_mean_estimator.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index b6637fbbd5..e8c6dd3caf 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -7,9 +7,8 @@ from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import WeightedVolumesEstimator -from aspire.source import _LegacySimulation +from aspire.source import Simulation from aspire.utils import grid_3d -from aspire.volume import LegacyVolume logger = logging.getLogger(__name__) @@ -19,17 +18,19 @@ class WeightedVolumesEstimatorTestCase(TestCase): def setUp(self): self.dtype = np.float32 - self.n = 1024 + self.n = 512 self.r = 2 self.L = L = 8 - self.sim = _LegacySimulation( - vols=LegacyVolume(L, C=1, dtype=self.dtype).generate(), + self.sim = Simulation( n=self.n, + C=1, # single volume unique_filters=[ RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) ], dtype=self.dtype, + seed=1617, ) + # Todo, swap for default FFB3D self.basis = FBBasis3D((L, L, L), dtype=self.dtype) self.weights = np.ones((self.n, self.r)) / np.sqrt(self.n) self.estimator = WeightedVolumesEstimator( @@ -53,7 +54,7 @@ def testPositiveWeightedEstimates(self): # Compare each output volume for _est in est: np.testing.assert_allclose( - _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.05 + _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.1 ) def testAdjoint(self): @@ -69,7 +70,7 @@ def testAdjoint(self): # Assert the mean volumes are close to original volume for _est in est: np.testing.assert_allclose( - _est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 + _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.1 ) def testOptimize1(self): @@ -102,10 +103,10 @@ def testNegativeWeightedEstimates(self): # Compare positive weighted output volume np.testing.assert_allclose( - est[0] / np.linalg.norm(est[0]), vol / np.linalg.norm(vol), atol=0.05 + est[0] / np.linalg.norm(est[0]), vol / np.linalg.norm(vol), atol=0.1 ) # Compare negative weighted output volume np.testing.assert_allclose( - -1 * est[1] / np.linalg.norm(est[1]), vol / np.linalg.norm(vol), atol=0.05 + -1 * est[1] / np.linalg.norm(est[1]), vol / np.linalg.norm(vol), atol=0.1 ) From b473c9ea2019fd226026d0fa119295ba2f1ef591 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Wed, 1 May 2024 10:18:33 -0400 Subject: [PATCH 07/22] refactor mean_est towards pytest --- tests/test_mean_estimator.py | 243 ++++++++++++++------------ tests/test_weighted_mean_estimator.py | 10 -- 2 files changed, 134 insertions(+), 119 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 2366d97e6e..521d13e020 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -1,8 +1,8 @@ import os.path import tempfile -from unittest import TestCase import numpy as np +import pytest from pytest import raises from aspire.basis import Coef, FBBasis3D @@ -13,129 +13,154 @@ DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data") +# Params -class MeanEstimatorTestCase(TestCase): - def setUp(self): - self.dtype = np.float32 - self.L = 8 - self.sim = Simulation( - n=512, - C=1, # single volume - unique_filters=[ - RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) - ], - dtype=self.dtype, - seed=1616, - ) - # Todo, swap for default FFB - self.basis = FBBasis3D((self.L,) * 3, dtype=self.dtype) +SEED = 1616 - self.estimator = MeanEstimator( - self.sim, basis=self.basis, preconditioner="none" - ) +DTYPE = [np.float32, np.float64] +L = [ + 8, + 9, +] - self.estimator_with_preconditioner = MeanEstimator( - self.sim, basis=self.basis, preconditioner="circulant" - ) - self.mask = grid_3d(self.L)["r"] < 1 +PRECONDITIONERS = ["none", None] # default, circulant - def tearDown(self): - pass +# Fixtures. - def testEstimateResolutionError(self): - """ - Test mismatched resolutions yields a relevant error message. - """ - with raises(ValueError, match=r".*resolution.*"): - # This basis is intentionally the wrong resolution. - incorrect_basis = FBBasis3D((2 * self.L,) * 3, dtype=self.dtype) +@pytest.fixture(params=L, ids=lambda x: f"L={x}", scope="module") +def L(request): + return request.param - _ = MeanEstimator(self.sim, basis=incorrect_basis, preconditioner="none") - def testEstimate(self): - estimate = self.estimator.estimate() +@pytest.fixture(params=DTYPE, ids=lambda x: f"dtype={x}", scope="module") +def dtype(request): + return request.param - est = estimate.asnumpy() * self.mask - vol = self.sim.vols.asnumpy() * self.mask - np.testing.assert_allclose( - est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 - ) +@pytest.fixture(scope="module") +def sim(L, dtype): + sim = Simulation( + L=L, + n=256, + C=1, # single volume + unique_filters=[ + RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) + ], + dtype=dtype, + seed=SEED, + ) + + sim = sim.cache() # precompute images + + return sim + + +@pytest.fixture(scope="module") +def basis(L, dtype): + return FBBasis3D(L, dtype=dtype) + + +@pytest.fixture( + params=PRECONDITIONERS, ids=lambda x: f"preconditioner={x}", scope="module" +) +def estimator(request, sim, basis): + preconditioner = request.param + return MeanEstimator(sim, basis=basis, preconditioner=preconditioner) + + +@pytest.fixture(scope="module") +def mask(L): + return grid_3d(L)["r"] < 1 - def testAdjoint(self): - # Mean coefs formed by backprojections - mean_b_coef = self.estimator.src_backward() - # Evaluate mean coefs into a volume - est = Coef(self.basis, mean_b_coef).evaluate() +# Tests +def test_estimate_resolution_error(sim, basis): + """ + Test mismatched resolutions yields a relevant error message. + """ - # Mask off corners of volume - vol = self.sim.vols.asnumpy() * self.mask + with raises(ValueError, match=r".*resolution.*"): + # This basis is intentionally the wrong resolution. + incorrect_basis = FBBasis3D(sim.L + 1, dtype=sim.dtype) - # Assert the mean volume is close to original volume - np.testing.assert_allclose( - est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 + _ = MeanEstimator(sim, basis=incorrect_basis, preconditioner="none") + + +def test_estimate(sim, estimator, mask): + estimate = estimator.estimate() + + est = estimate.asnumpy() * mask + vol = sim.vols.asnumpy() * mask + + np.testing.assert_allclose( + est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 + ) + + +def test_adjoint(sim, basis, estimator, mask): + # Mean coefs formed by backprojections + mean_b_coef = estimator.src_backward() + + # Evaluate mean coefs into a volume + est = Coef(basis, mean_b_coef).evaluate() * mask + + # Mask off corners of volume + vol = sim.vols.asnumpy() * mask + + # Assert the mean volume is close to original volume + np.testing.assert_allclose( + est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.11 + ) + + +def test_checkpoint(sim, basis, estimator): + """Exercise the checkpointing and max iterations branches.""" + test_iter = 2 + with tempfile.TemporaryDirectory() as tmp_input_dir: + prefix = os.path.join(tmp_input_dir, "new", "dirs", "chk") + _estimator = MeanEstimator( + sim, + basis=basis, + preconditioner="none", + checkpoint_iterations=test_iter, + maxiter=test_iter + 1, + checkpoint_prefix=prefix, ) - def testOptimize1(self): - """ - x = self.estimator.conj_grad(mean_b_coef) - """ - - def testOptimize2(self): - """ - x = self.estimator_with_preconditioner.conj_grad(mean_b_coef) - """ - - def testCheckpoint(self): - """Exercise the checkpointing and max iterations branches.""" - test_iter = 2 - with tempfile.TemporaryDirectory() as tmp_input_dir: - prefix = os.path.join(tmp_input_dir, "new", "dirs", "chk") - estimator = MeanEstimator( - self.sim, - basis=self.basis, - preconditioner="none", - checkpoint_iterations=test_iter, - maxiter=test_iter + 1, - checkpoint_prefix=prefix, - ) - - # Assert we raise when reading `maxiter`. - with raises(RuntimeError, match="Unable to converge!"): - _ = estimator.estimate() - - # Load the checkpoint coefficients while tmp_input_dir exists. - b_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") + # Assert we raise when reading `maxiter`. + with raises(RuntimeError, match="Unable to converge!"): + _ = _estimator.estimate() + + # Load the checkpoint coefficients while tmp_input_dir exists. + b_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") # Restart estimate from checkpoint - _ = self.estimator.estimate(b_coef=b_chk) - - def testCheckpointArgs(self): - with tempfile.TemporaryDirectory() as tmp_input_dir: - prefix = os.path.join(tmp_input_dir, "chk") - - for junk in [-1, 0, "abc"]: - # Junk `checkpoint_iterations` values - with raises( - ValueError, match=r".*iterations.*should be a positive integer.*" - ): - _ = MeanEstimator( - self.sim, - basis=self.basis, - preconditioner="none", - checkpoint_iterations=junk, - checkpoint_prefix=prefix, - ) - # Junk `maxiter` values - with raises( - ValueError, match=r".*maxiter.*should be a positive integer.*" - ): - _ = MeanEstimator( - self.sim, - basis=self.basis, - preconditioner="none", - maxiter=junk, - checkpoint_prefix=prefix, - ) + _ = estimator.estimate(b_coef=b_chk) + + +def test_checkpoint_args(sim, basis): + with tempfile.TemporaryDirectory() as tmp_input_dir: + prefix = os.path.join(tmp_input_dir, "chk") + + for junk in [-1, 0, "abc"]: + # Junk `checkpoint_iterations` values + with raises( + ValueError, match=r".*iterations.*should be a positive integer.*" + ): + _ = MeanEstimator( + sim, + basis=basis, + preconditioner="none", + checkpoint_iterations=junk, + checkpoint_prefix=prefix, + ) + # Junk `maxiter` values + with raises(ValueError, match=r".*maxiter.*should be a positive integer.*"): + _ = MeanEstimator( + sim, + basis=basis, + preconditioner="none", + maxiter=junk, + checkpoint_prefix=prefix, + ) diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index e8c6dd3caf..399b7ff47c 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -73,16 +73,6 @@ def testAdjoint(self): _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.1 ) - def testOptimize1(self): - """ - x = self.estimator.conj_grad(mean_b_coef) - """ - - def testOptimize2(self): - """ - x = self.estimator_with_preconditioner.conj_grad(mean_b_coef) - """ - def testNegativeWeightedEstimates(self): """ Here we'll test createing two volumes. From 3ecac64f349d3eed113de135a36a27662bc9d605 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 2 May 2024 08:53:29 -0400 Subject: [PATCH 08/22] fix small precon init bug --- src/aspire/reconstruction/mean.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index 22856d9196..bebdb54b78 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -64,7 +64,9 @@ def __getattr__(self, name): 1.0 / self.kernel.circularize() ) else: - if self.preconditioner.lower() not in (None, "none"): + if self.preconditioner and ( + self.preconditioner.lower() not in (None, "none") + ): logger.warning( f"Preconditioner {self.preconditioner} is not implemented, resetting to default of None." ) From 9ec354a2bcde3cbc75f113aa3f9123920f58dbe0 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 2 May 2024 08:54:47 -0400 Subject: [PATCH 09/22] cleanup unneeded asnumpy calls in mean estimate --- tests/test_mean_estimator.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 521d13e020..a4412e59d7 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -75,7 +75,7 @@ def mask(L): # Tests -def test_estimate_resolution_error(sim, basis): +def test_resolution_error(sim, basis): """ Test mismatched resolutions yields a relevant error message. """ @@ -90,8 +90,8 @@ def test_estimate_resolution_error(sim, basis): def test_estimate(sim, estimator, mask): estimate = estimator.estimate() - est = estimate.asnumpy() * mask - vol = sim.vols.asnumpy() * mask + est = estimate * mask + vol = sim.vols * mask np.testing.assert_allclose( est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.1 @@ -106,7 +106,7 @@ def test_adjoint(sim, basis, estimator, mask): est = Coef(basis, mean_b_coef).evaluate() * mask # Mask off corners of volume - vol = sim.vols.asnumpy() * mask + vol = sim.vols * mask # Assert the mean volume is close to original volume np.testing.assert_allclose( From c18fbe0c66c991486b863f6c205b745160e8023e Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 2 May 2024 09:03:40 -0400 Subject: [PATCH 10/22] refactor weigthed mean est test --- tests/test_weighted_mean_estimator.py | 247 +++++++++++++++++--------- 1 file changed, 166 insertions(+), 81 deletions(-) diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 399b7ff47c..dccd570e14 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -1,102 +1,187 @@ -import logging import os.path -from unittest import TestCase +import tempfile import numpy as np +import pytest +from pytest import raises from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import WeightedVolumesEstimator -from aspire.source import Simulation +from aspire.source.simulation import Simulation from aspire.utils import grid_3d -logger = logging.getLogger(__name__) - DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data") +# Params -class WeightedVolumesEstimatorTestCase(TestCase): - def setUp(self): - self.dtype = np.float32 - self.n = 512 - self.r = 2 - self.L = L = 8 - self.sim = Simulation( - n=self.n, - C=1, # single volume - unique_filters=[ - RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) - ], - dtype=self.dtype, - seed=1617, - ) - # Todo, swap for default FFB3D - self.basis = FBBasis3D((L, L, L), dtype=self.dtype) - self.weights = np.ones((self.n, self.r)) / np.sqrt(self.n) - self.estimator = WeightedVolumesEstimator( - self.weights, self.sim, basis=self.basis, preconditioner="none" - ) - self.estimator_with_preconditioner = WeightedVolumesEstimator( - self.weights, self.sim, basis=self.basis, preconditioner="circulant" - ) - self.mask = grid_3d(self.L)["r"] < 1 - - def tearDown(self): - pass - - def testPositiveWeightedEstimates(self): - estimate = self.estimator.estimate() - - est = estimate * self.mask - vol = self.sim.vols.asnumpy() * self.mask - vol /= np.linalg.norm(vol) - - # Compare each output volume - for _est in est: - np.testing.assert_allclose( - _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.1 - ) - - def testAdjoint(self): - # Mean coefs formed by backprojections - mean_b_coef = self.estimator.src_backward() - - # Evaluate mean coefs into a volume - est = Coef(self.basis, mean_b_coef).evaluate() - - # Mask off corners of volume - vol = self.sim.vols.asnumpy() * self.mask - - # Assert the mean volumes are close to original volume - for _est in est: - np.testing.assert_allclose( - _est / np.linalg.norm(_est), vol / np.linalg.norm(vol), atol=0.1 - ) - - def testNegativeWeightedEstimates(self): - """ - Here we'll test createing two volumes. - One with positive and another with negative weights. - """ - weights = np.ones((self.n, self.r)) / np.sqrt(self.n) - weights[:, 1] *= -1 # negate second set of weights - - estimator = WeightedVolumesEstimator( - weights, self.sim, basis=self.basis, preconditioner="none" +SEED = 1617 + +DTYPE = [np.float32, np.float64] +L = [ + 8, + 9, +] + +PRECONDITIONERS = ["none", None] + +# Fixtures. + + +@pytest.fixture(params=L, ids=lambda x: f"L={x}", scope="module") +def L(request): + return request.param + + +@pytest.fixture(params=DTYPE, ids=lambda x: f"dtype={x}", scope="module") +def dtype(request): + return request.param + + +@pytest.fixture(scope="module") +def sim(L, dtype): + sim = Simulation( + L=L, + n=256, + C=1, # single volume + unique_filters=[ + RadialCTFFilter(defocus=d) for d in np.linspace(1.5e4, 2.5e4, 7) + ], + dtype=dtype, + seed=SEED, + ) + + sim = sim.cache() # precompute images + + return sim + + +@pytest.fixture(scope="module") +def basis(L, dtype): + return FBBasis3D(L, dtype=dtype) + + +@pytest.fixture(scope="module") +def weights(sim): + # Construct simple test weights; + # one uniform positive and negative weighted volume respectively. + r = 2 # Number of weighted volumes + weights = np.ones((sim.n, r)) / np.sqrt(sim.n) + weights[:, 1] *= -1 # negate second weight vector + + return weights + + +@pytest.fixture( + params=PRECONDITIONERS, ids=lambda x: f"preconditioner={x}", scope="module" +) +def estimator(request, sim, basis, weights): + preconditioner = request.param + + return WeightedVolumesEstimator( + weights, sim, basis=basis, preconditioner=preconditioner + ) + + +@pytest.fixture(scope="module") +def mask(L): + return grid_3d(L)["r"] < 1 + + +# Tests +def test_resolution_error(sim, basis, weights): + """ + Test mismatched resolutions yields a relevant error message. + """ + + with raises(ValueError, match=r".*resolution.*"): + # This basis is intentionally the wrong resolution. + incorrect_basis = FBBasis3D(sim.L + 1, dtype=sim.dtype) + + _ = WeightedVolumesEstimator( + weights, sim, basis=incorrect_basis, preconditioner="none" ) - estimate = estimator.estimate() - est = estimate * self.mask - vol = self.sim.vols.asnumpy() * self.mask - vol /= np.linalg.norm(vol) +def test_estimate(sim, estimator, mask): + estimate = estimator.estimate() - # Compare positive weighted output volume + est = estimate * mask + vol = sim.vols * mask + + for i, w in enumerate([1, -1]): np.testing.assert_allclose( - est[0] / np.linalg.norm(est[0]), vol / np.linalg.norm(vol), atol=0.1 + w * est[i] / np.linalg.norm(est[i]), vol / np.linalg.norm(vol), atol=0.1 ) - # Compare negative weighted output volume + +def test_adjoint(sim, basis, estimator, mask): + # Mean coefs formed by backprojections + mean_b_coef = estimator.src_backward() + + # Evaluate mean coefs into a volume + est = Coef(basis, mean_b_coef).evaluate() + + # Mask off corners of volume + vol = sim.vols * mask + + # Assert the mean volume is close to original volume + for i, w in enumerate([1, -1]): np.testing.assert_allclose( - -1 * est[1] / np.linalg.norm(est[1]), vol / np.linalg.norm(vol), atol=0.1 + w * est[i] / np.linalg.norm(est[i]), vol / np.linalg.norm(vol), atol=0.11 ) + + +def test_checkpoint(sim, basis, estimator, weights): + """Exercise the checkpointing and max iterations branches.""" + test_iter = 2 + with tempfile.TemporaryDirectory() as tmp_input_dir: + prefix = os.path.join(tmp_input_dir, "new", "dirs", "chk") + _estimator = WeightedVolumesEstimator( + weights, + sim, + basis=basis, + preconditioner="none", + checkpoint_iterations=test_iter, + maxiter=test_iter + 1, + checkpoint_prefix=prefix, + ) + + # Assert we raise when reading `maxiter`. + with raises(RuntimeError, match="Unable to converge!"): + _ = _estimator.estimate() + + # Load the checkpoint coefficients while tmp_input_dir exists. + b_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") + + # Restart estimate from checkpoint + _ = estimator.estimate(b_coef=b_chk) + + +def test_checkpoint_args(sim, basis, weights): + with tempfile.TemporaryDirectory() as tmp_input_dir: + prefix = os.path.join(tmp_input_dir, "chk") + + for junk in [-1, 0, "abc"]: + # Junk `checkpoint_iterations` values + with raises( + ValueError, match=r".*iterations.*should be a positive integer.*" + ): + _ = WeightedVolumesEstimator( + weights, + sim, + basis=basis, + preconditioner="none", + checkpoint_iterations=junk, + checkpoint_prefix=prefix, + ) + # Junk `maxiter` values + with raises(ValueError, match=r".*maxiter.*should be a positive integer.*"): + _ = WeightedVolumesEstimator( + weights, + sim, + basis=basis, + preconditioner="none", + maxiter=junk, + checkpoint_prefix=prefix, + ) From 9af8f345fe7eed3e94f6adba542ce7aba79250d0 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 2 May 2024 11:51:16 -0400 Subject: [PATCH 11/22] covar 3d pts orientation swap and m_ rm --- src/aspire/covariance/covar.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/aspire/covariance/covar.py b/src/aspire/covariance/covar.py index 3e9a64f545..21da816cee 100644 --- a/src/aspire/covariance/covar.py +++ b/src/aspire/covariance/covar.py @@ -57,7 +57,7 @@ def compute_kernel(self): weights[:, 0, :] = 0 # TODO: This is where this differs from MeanEstimator - pts_rot = np.moveaxis(pts_rot[::-1], 1, 0).reshape(-1, 3, L**2) + pts_rot = np.moveaxis(pts_rot, 1, 0).reshape(-1, 3, L**2) weights = weights.T.reshape((-1, L**2)) batch_n = weights.shape[0] @@ -67,7 +67,7 @@ def compute_kernel(self): factors[j] = anufft(weights[j], pts_rot[j], (_2L, _2L, _2L), real=True) factors = Volume(factors).to_vec() - kernel += vecmat_to_volmat(factors.T @ factors) / (n * L**8) + kernel += (factors.T @ factors).reshape((_2L,) * 6) / (n * L**8) # Ensure symmetric kernel kernel[0, :, :, :, :, :] = 0 @@ -90,6 +90,8 @@ def estimate(self, mean_vol, noise_variance, tol=1e-5, regularizer=0): b_coef = self.src_backward(mean_vol, noise_variance) est_coef = self.conj_grad(b_coef, tol=tol, regularizer=regularizer) covar_est = self.basis.mat_evaluate(est_coef) + # Note, notice these cancel out, but requires a lot of changes elsewhere in this file, + # basically totally removing all the `utils/matrix` hacks ... todo. covar_est = vecmat_to_volmat(make_symmat(volmat_to_vecmat(covar_est))) return covar_est @@ -180,7 +182,9 @@ def src_backward(self, mean_vol, noise_variance, shrink_method=None): im_centered_b[j] = self.src.im_backward(im_centered[j], i + j) im_centered_b = Volume(im_centered_b).to_vec() - covar_b += vecmat_to_volmat(im_centered_b.T @ im_centered_b) / self.src.n + covar_b += (im_centered_b.T @ im_centered_b).reshape( + (self.src.L,) * 6 + ) / self.src.n covar_b_coef = self.basis.mat_evaluate_t(covar_b) return self._shrink(covar_b_coef, noise_variance, shrink_method) From 3e926f32ac8aed678ce135881a836c783bf62111 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 2 May 2024 13:47:05 -0400 Subject: [PATCH 12/22] None has no lower, rm --- src/aspire/reconstruction/mean.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index bebdb54b78..7c188bbda9 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -65,7 +65,7 @@ def __getattr__(self, name): ) else: if self.preconditioner and ( - self.preconditioner.lower() not in (None, "none") + self.preconditioner.lower() not in ("none") ): logger.warning( f"Preconditioner {self.preconditioner} is not implemented, resetting to default of None." From e1e01efdd9fd5755e7517e659ddfb69fe07dc297 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 16 May 2024 08:44:18 -0400 Subject: [PATCH 13/22] pass checkpoint as x0, rename x_chk --- src/aspire/reconstruction/estimator.py | 4 ++-- src/aspire/reconstruction/mean.py | 5 +++-- tests/test_mean_estimator.py | 4 ++-- tests/test_weighted_mean_estimator.py | 4 ++-- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/aspire/reconstruction/estimator.py b/src/aspire/reconstruction/estimator.py index 3e154800bb..d6455b6ea7 100644 --- a/src/aspire/reconstruction/estimator.py +++ b/src/aspire/reconstruction/estimator.py @@ -128,11 +128,11 @@ def __getattr__(self, name): def compute_kernel(self): raise NotImplementedError("Subclasses must implement the compute_kernel method") - def estimate(self, b_coef=None, tol=1e-5, regularizer=0): + def estimate(self, b_coef=None, x0=None, tol=1e-5, regularizer=0): """Return an estimate as a Volume instance.""" if b_coef is None: b_coef = self.src_backward() - est_coef = self.conj_grad(b_coef, tol=tol, regularizer=regularizer) + est_coef = self.conj_grad(b_coef, x0=x0, tol=tol, regularizer=regularizer) est = Coef(self.basis, est_coef).evaluate() return est diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index 7c188bbda9..d298ae3272 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -191,7 +191,7 @@ def src_backward(self): return res - def conj_grad(self, b_coef, tol=1e-5, regularizer=0): + def conj_grad(self, b_coef, x0=None, tol=1e-5, regularizer=0): count = b_coef.shape[-1] # b_coef should be (r, basis.count) kernel = self.kernel @@ -242,12 +242,13 @@ def cb(xk): # Construct checkpoint path path = f"{self.checkpoint_prefix}_iter{self.i:04d}.npy" # Write out the current solution - np.save(path, xk.reshape(self.r, self.basis.count)) + np.save(path, xk) logger.info(f"Checkpoint saved to `{path}`") x, info = cg( operator, b_coef.flatten(), + x0=x0, M=M, callback=cb, tol=tol, diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index a4412e59d7..7f2408b093 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -133,10 +133,10 @@ def test_checkpoint(sim, basis, estimator): _ = _estimator.estimate() # Load the checkpoint coefficients while tmp_input_dir exists. - b_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") + x_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") # Restart estimate from checkpoint - _ = estimator.estimate(b_coef=b_chk) + _ = estimator.estimate(x0=x_chk) def test_checkpoint_args(sim, basis): diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index dccd570e14..00b6939fa4 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -152,10 +152,10 @@ def test_checkpoint(sim, basis, estimator, weights): _ = _estimator.estimate() # Load the checkpoint coefficients while tmp_input_dir exists. - b_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") + x_chk = np.load(f"{prefix}_iter{test_iter:04d}.npy") # Restart estimate from checkpoint - _ = estimator.estimate(b_coef=b_chk) + _ = estimator.estimate(x0=x_chk) def test_checkpoint_args(sim, basis, weights): From 4936dca5389c79a54eee528a1b76a12fd1d0ecbc Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 16 May 2024 09:07:00 -0400 Subject: [PATCH 14/22] Call mean est tests with circulant --- src/aspire/reconstruction/estimator.py | 7 +++++++ tests/test_mean_estimator.py | 2 +- tests/test_weighted_mean_estimator.py | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/aspire/reconstruction/estimator.py b/src/aspire/reconstruction/estimator.py index d6455b6ea7..9d62a0b765 100644 --- a/src/aspire/reconstruction/estimator.py +++ b/src/aspire/reconstruction/estimator.py @@ -56,6 +56,13 @@ def __init__( self.basis = basis self.dtype = self.src.dtype self.batch_size = batch_size + if not preconditioner or preconditioner.lower() == "none": + # Resolve None and string nones to None + preconditioner = None + elif preconditioner not in ["circulant"]: + raise ValueError( + f"Supplied preconditioner {preconditioner} is not supported." + ) self.preconditioner = preconditioner self.boost = boost diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 7f2408b093..5e04e2757f 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -23,7 +23,7 @@ 9, ] -PRECONDITIONERS = ["none", None] # default, circulant +PRECONDITIONERS = ["none", None, "circulant"] # Fixtures. diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 00b6939fa4..f5f13297d0 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -23,7 +23,7 @@ 9, ] -PRECONDITIONERS = ["none", None] +PRECONDITIONERS = ["none", None, "circulant"] # Fixtures. From 031fd170898ebaa0af56040d66262905a1a63f6c Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 16 May 2024 09:17:56 -0400 Subject: [PATCH 15/22] Call extra mean est none-ish tests as expensive --- tests/test_mean_estimator.py | 7 ++++++- tests/test_weighted_mean_estimator.py | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 5e04e2757f..46fa579150 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -23,7 +23,12 @@ 9, ] -PRECONDITIONERS = ["none", None, "circulant"] +PRECONDITIONERS = [ + None, + "circulant", + pytest.param("none", marks=pytest.mark.expensive), + pytest.param("", marks=pytest.mark.expensive), +] # Fixtures. diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index f5f13297d0..f1184caa80 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -23,7 +23,12 @@ 9, ] -PRECONDITIONERS = ["none", None, "circulant"] +PRECONDITIONERS = [ + None, + "circulant", + pytest.param("none", marks=pytest.mark.expensive), + pytest.param("", marks=pytest.mark.expensive), +] # Fixtures. From 4a53d293fa073e6692ceedb07c31320e1fc8c6f8 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Wed, 29 May 2024 14:48:39 -0400 Subject: [PATCH 16/22] test adjoint formula in mean_est --- tests/test_mean_estimator.py | 30 +++++++++++++++++---------- tests/test_weighted_mean_estimator.py | 19 +---------------- 2 files changed, 20 insertions(+), 29 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 46fa579150..263831e250 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -5,11 +5,13 @@ import pytest from pytest import raises -from aspire.basis import Coef, FBBasis3D +from aspire.basis import FBBasis3D +from aspire.image import Image from aspire.operators import RadialCTFFilter from aspire.reconstruction import MeanEstimator from aspire.source.simulation import Simulation from aspire.utils import grid_3d +from aspire.volume import Volume DATA_DIR = os.path.join(os.path.dirname(__file__), "saved_test_data") @@ -104,19 +106,25 @@ def test_estimate(sim, estimator, mask): def test_adjoint(sim, basis, estimator, mask): - # Mean coefs formed by backprojections - mean_b_coef = estimator.src_backward() + """ + Test = + for random volume `v` and random images `u`. + """ + rots = sim.rotations - # Evaluate mean coefs into a volume - est = Coef(basis, mean_b_coef).evaluate() * mask + L = sim.L + n = sim.n - # Mask off corners of volume - vol = sim.vols * mask + u = np.random.rand(n, L, L).astype(sim.dtype, copy=False) + v = np.random.rand(L, L, L).astype(sim.dtype, copy=False) - # Assert the mean volume is close to original volume - np.testing.assert_allclose( - est / np.linalg.norm(est), vol / np.linalg.norm(vol), atol=0.11 - ) + proj = Volume(v).project(rots) + backproj = Image(u).backproject(rots) + + lhs = np.dot(proj.asnumpy().flatten(), u.flatten()) + rhs = np.dot(backproj.asnumpy().flatten(), v.flatten()) + + np.testing.assert_allclose(lhs, rhs, rtol=1e-6) def test_checkpoint(sim, basis, estimator): diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index f1184caa80..8c93a848ca 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -5,7 +5,7 @@ import pytest from pytest import raises -from aspire.basis import Coef, FBBasis3D +from aspire.basis import FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import WeightedVolumesEstimator from aspire.source.simulation import Simulation @@ -120,23 +120,6 @@ def test_estimate(sim, estimator, mask): ) -def test_adjoint(sim, basis, estimator, mask): - # Mean coefs formed by backprojections - mean_b_coef = estimator.src_backward() - - # Evaluate mean coefs into a volume - est = Coef(basis, mean_b_coef).evaluate() - - # Mask off corners of volume - vol = sim.vols * mask - - # Assert the mean volume is close to original volume - for i, w in enumerate([1, -1]): - np.testing.assert_allclose( - w * est[i] / np.linalg.norm(est[i]), vol / np.linalg.norm(vol), atol=0.11 - ) - - def test_checkpoint(sim, basis, estimator, weights): """Exercise the checkpointing and max iterations branches.""" test_iter = 2 From 2be2cf11583b3cb9e24240354c26f3dabb0912f4 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Thu, 30 May 2024 15:38:48 -0400 Subject: [PATCH 17/22] add src based adjoint test need to review that `n` scaling factor --- tests/test_mean_estimator.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 263831e250..611cf44d66 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -5,7 +5,7 @@ import pytest from pytest import raises -from aspire.basis import FBBasis3D +from aspire.basis import Coef, FBBasis3D from aspire.image import Image from aspire.operators import RadialCTFFilter from aspire.reconstruction import MeanEstimator @@ -105,7 +105,7 @@ def test_estimate(sim, estimator, mask): ) -def test_adjoint(sim, basis, estimator, mask): +def test_adjoint(sim, basis, estimator): """ Test = for random volume `v` and random images `u`. @@ -127,6 +127,24 @@ def test_adjoint(sim, basis, estimator, mask): np.testing.assert_allclose(lhs, rhs, rtol=1e-6) +def test_src_adjoint(sim, basis, estimator): + """ + Test the built-in source based estimator's `src_backward` has + adjoint like relationship with simulated image generation. + """ + + v = sim.vols.asnumpy()[0] # random volume + proj = sim.images[:] # projections of v + u = proj.asnumpy() # u = proj + + backproj = Coef(basis, estimator.src_backward() * sim.n).evaluate() + + lhs = np.dot(proj.asnumpy().flatten(), u.flatten()) + rhs = np.dot(backproj.asnumpy().flatten(), v.flatten()) + + np.testing.assert_allclose(lhs, rhs, rtol=0.02) + + def test_checkpoint(sim, basis, estimator): """Exercise the checkpointing and max iterations branches.""" test_iter = 2 From d72f2dbda4672782ac3da2c6a36e052a7fab80cf Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Fri, 31 May 2024 11:06:52 -0400 Subject: [PATCH 18/22] update tests with backward scaling factor --- tests/test_mean_estimator.py | 2 ++ tests/test_weighted_mean_estimator.py | 21 ++++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 611cf44d66..40081206b3 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -56,6 +56,8 @@ def sim(L, dtype): ], dtype=dtype, seed=SEED, + amplitudes=1, + offsets=0, ) sim = sim.cache() # precompute images diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 8c93a848ca..7c23d570bc 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -5,7 +5,7 @@ import pytest from pytest import raises -from aspire.basis import FBBasis3D +from aspire.basis import Coef, FBBasis3D from aspire.operators import RadialCTFFilter from aspire.reconstruction import WeightedVolumesEstimator from aspire.source.simulation import Simulation @@ -120,6 +120,25 @@ def test_estimate(sim, estimator, mask): ) +def test_src_adjoint(sim, basis, estimator): + """ + Test the built-in source based estimator's `src_backward` has + adjoint like relationship with simulated image generation. + """ + + v = sim.vols.asnumpy()[0] # random volume + proj = sim.images[:] # projections of v + u = proj.asnumpy() # u = proj + + backproj = Coef(basis, estimator.src_backward() * sim.n).evaluate() + + lhs = np.dot(proj.asnumpy().flatten(), u.flatten()) + + for i, w in enumerate([1, -1]): + rhs = np.dot(backproj[i].asnumpy().flatten(), w * v.flatten()) + np.testing.assert_allclose(lhs, rhs, rtol=0.02) + + def test_checkpoint(sim, basis, estimator, weights): """Exercise the checkpointing and max iterations branches.""" test_iter = 2 From 4691bbde69e8e5b0f6ab816c6056279a49f37309 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Fri, 31 May 2024 11:07:28 -0400 Subject: [PATCH 19/22] optional index cleanup commit clearer to me, but dont need to take this one --- src/aspire/covariance/covar.py | 2 +- src/aspire/reconstruction/mean.py | 6 +++--- src/aspire/source/image.py | 33 +++++++++++++++++++------------ 3 files changed, 24 insertions(+), 17 deletions(-) diff --git a/src/aspire/covariance/covar.py b/src/aspire/covariance/covar.py index 21da816cee..74f60862ae 100644 --- a/src/aspire/covariance/covar.py +++ b/src/aspire/covariance/covar.py @@ -179,7 +179,7 @@ def src_backward(self, mean_vol, noise_variance, shrink_method=None): (batch_n, self.src.L, self.src.L, self.src.L), dtype=self.dtype ) for j in range(batch_n): - im_centered_b[j] = self.src.im_backward(im_centered[j], i + j) + im_centered_b[j] = self.src.im_backward([i + j], im_centered[j]) im_centered_b = Volume(im_centered_b).to_vec() covar_b += (im_centered_b.T @ im_centered_b).reshape( diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index d298ae3272..f814d2545d 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -175,12 +175,12 @@ def src_backward(self): ) for i in range(0, self.src.n, self.batch_size): + idx = np.arange(i, min(self.src.n, i + self.batch_size), dtype=int) + im = self.src.images[idx] for k in range(self.r): - im = self.src.images[i : i + self.batch_size] - batch_vol_rhs = self.src.im_backward( + idx, im, - i, self.weights[:, k], symmetry_group=symmetry_group, ) / (self.src.n * sym_order) diff --git a/src/aspire/source/image.py b/src/aspire/source/image.py index 473585acb9..c36c7f6289 100644 --- a/src/aspire/source/image.py +++ b/src/aspire/source/image.py @@ -937,31 +937,38 @@ def normalize_background(self, bg_radius=1.0, do_ramp=True): LambdaXform(normalize_bg, bg_radius=bg_radius, do_ramp=do_ramp) ) - def im_backward(self, im, start, weights=None, symmetry_group=None): + def im_backward(self, idx, im=None, weights=None, symmetry_group=None): """ - Apply adjoint mapping to set of images + Apply adjoint mapping to set of images identified by indices `idx`. - :param im: An Image instance to which we wish to apply the adjoint of the forward model. - :param start: Start index of image to consider + :param idx: Source indices corresponding to a set of images. + :param im: An Image instance to which we wish to apply the + adjoint of the forward model. When `None`, infers + `im=self.images[idx]`. :param weights: Optional vector of weights to apply to images. Weights should be length `self.n`. :param symmetry_group: A SymmetryGroup instance. If supplied, uses symmetry to increase number of images used in back-projectioon. :return: An L-by-L-by-L volume containing the sum of the adjoint mappings applied to the start+num-1 images. """ - num = im.n_images - all_idx = np.arange(start, min(start + num, self.n)) - im *= self.amplitudes[all_idx, np.newaxis, np.newaxis] - im = im.shift(-self.offsets[all_idx, :]) - im = self._apply_source_filters(im, all_idx) + if im is None: + im = self.images[idx] + elif im.n_images != len(idx): + raise RuntimeError( + f"`im_backward` n_images != len(idx): {im.n_images} != {len(idx)}" + ) + + im *= self.amplitudes[idx, np.newaxis, np.newaxis] + im = im.shift(-self.offsets[idx, :]) + im = self._apply_source_filters(im, idx) if weights is not None: - im *= weights[all_idx, np.newaxis, np.newaxis] + im *= weights[idx, np.newaxis, np.newaxis] - vol = im.backproject( - self.rotations[start : start + num, :, :], symmetry_group=symmetry_group - )[0] + vol = im.backproject(self.rotations[idx, :, :], symmetry_group=symmetry_group)[ + 0 + ] return vol From 4ea38dc4467ce400027e7aa5c3eb845c39d241f9 Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Fri, 31 May 2024 14:32:18 -0400 Subject: [PATCH 20/22] revert weakening of simulation config for mean_est_tests (debugging) --- tests/test_mean_estimator.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 40081206b3..611cf44d66 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -56,8 +56,6 @@ def sim(L, dtype): ], dtype=dtype, seed=SEED, - amplitudes=1, - offsets=0, ) sim = sim.cache() # precompute images From f069f3fb4fe770e1d28a24754e892f20a65ad21c Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Fri, 31 May 2024 14:38:38 -0400 Subject: [PATCH 21/22] add scale comment --- tests/test_mean_estimator.py | 1 + tests/test_weighted_mean_estimator.py | 1 + 2 files changed, 2 insertions(+) diff --git a/tests/test_mean_estimator.py b/tests/test_mean_estimator.py index 611cf44d66..2650839272 100644 --- a/tests/test_mean_estimator.py +++ b/tests/test_mean_estimator.py @@ -137,6 +137,7 @@ def test_src_adjoint(sim, basis, estimator): proj = sim.images[:] # projections of v u = proj.asnumpy() # u = proj + # `src_backward` scales by 1/n backproj = Coef(basis, estimator.src_backward() * sim.n).evaluate() lhs = np.dot(proj.asnumpy().flatten(), u.flatten()) diff --git a/tests/test_weighted_mean_estimator.py b/tests/test_weighted_mean_estimator.py index 7c23d570bc..5d622c3865 100644 --- a/tests/test_weighted_mean_estimator.py +++ b/tests/test_weighted_mean_estimator.py @@ -130,6 +130,7 @@ def test_src_adjoint(sim, basis, estimator): proj = sim.images[:] # projections of v u = proj.asnumpy() # u = proj + # `src_backward` scales by 1/n backproj = Coef(basis, estimator.src_backward() * sim.n).evaluate() lhs = np.dot(proj.asnumpy().flatten(), u.flatten()) From 62a8d806ea3b0af1aaf6670b71f9ee0cc3aaacea Mon Sep 17 00:00:00 2001 From: Garrett Wright Date: Mon, 10 Jun 2024 09:56:41 -0400 Subject: [PATCH 22/22] Revert "optional index cleanup commit" This reverts commit 27d89b4bd4d50bb6f98f859e0869b1ce2af380ab. --- src/aspire/covariance/covar.py | 2 +- src/aspire/reconstruction/mean.py | 6 +++--- src/aspire/source/image.py | 33 ++++++++++++------------------- 3 files changed, 17 insertions(+), 24 deletions(-) diff --git a/src/aspire/covariance/covar.py b/src/aspire/covariance/covar.py index 74f60862ae..21da816cee 100644 --- a/src/aspire/covariance/covar.py +++ b/src/aspire/covariance/covar.py @@ -179,7 +179,7 @@ def src_backward(self, mean_vol, noise_variance, shrink_method=None): (batch_n, self.src.L, self.src.L, self.src.L), dtype=self.dtype ) for j in range(batch_n): - im_centered_b[j] = self.src.im_backward([i + j], im_centered[j]) + im_centered_b[j] = self.src.im_backward(im_centered[j], i + j) im_centered_b = Volume(im_centered_b).to_vec() covar_b += (im_centered_b.T @ im_centered_b).reshape( diff --git a/src/aspire/reconstruction/mean.py b/src/aspire/reconstruction/mean.py index f814d2545d..d298ae3272 100644 --- a/src/aspire/reconstruction/mean.py +++ b/src/aspire/reconstruction/mean.py @@ -175,12 +175,12 @@ def src_backward(self): ) for i in range(0, self.src.n, self.batch_size): - idx = np.arange(i, min(self.src.n, i + self.batch_size), dtype=int) - im = self.src.images[idx] for k in range(self.r): + im = self.src.images[i : i + self.batch_size] + batch_vol_rhs = self.src.im_backward( - idx, im, + i, self.weights[:, k], symmetry_group=symmetry_group, ) / (self.src.n * sym_order) diff --git a/src/aspire/source/image.py b/src/aspire/source/image.py index c36c7f6289..473585acb9 100644 --- a/src/aspire/source/image.py +++ b/src/aspire/source/image.py @@ -937,38 +937,31 @@ def normalize_background(self, bg_radius=1.0, do_ramp=True): LambdaXform(normalize_bg, bg_radius=bg_radius, do_ramp=do_ramp) ) - def im_backward(self, idx, im=None, weights=None, symmetry_group=None): + def im_backward(self, im, start, weights=None, symmetry_group=None): """ - Apply adjoint mapping to set of images identified by indices `idx`. + Apply adjoint mapping to set of images - :param idx: Source indices corresponding to a set of images. - :param im: An Image instance to which we wish to apply the - adjoint of the forward model. When `None`, infers - `im=self.images[idx]`. + :param im: An Image instance to which we wish to apply the adjoint of the forward model. + :param start: Start index of image to consider :param weights: Optional vector of weights to apply to images. Weights should be length `self.n`. :param symmetry_group: A SymmetryGroup instance. If supplied, uses symmetry to increase number of images used in back-projectioon. :return: An L-by-L-by-L volume containing the sum of the adjoint mappings applied to the start+num-1 images. """ + num = im.n_images - if im is None: - im = self.images[idx] - elif im.n_images != len(idx): - raise RuntimeError( - f"`im_backward` n_images != len(idx): {im.n_images} != {len(idx)}" - ) - - im *= self.amplitudes[idx, np.newaxis, np.newaxis] - im = im.shift(-self.offsets[idx, :]) - im = self._apply_source_filters(im, idx) + all_idx = np.arange(start, min(start + num, self.n)) + im *= self.amplitudes[all_idx, np.newaxis, np.newaxis] + im = im.shift(-self.offsets[all_idx, :]) + im = self._apply_source_filters(im, all_idx) if weights is not None: - im *= weights[idx, np.newaxis, np.newaxis] + im *= weights[all_idx, np.newaxis, np.newaxis] - vol = im.backproject(self.rotations[idx, :, :], symmetry_group=symmetry_group)[ - 0 - ] + vol = im.backproject( + self.rotations[start : start + num, :, :], symmetry_group=symmetry_group + )[0] return vol