Skip to content

Commit

Permalink
Sdi (#410)
Browse files Browse the repository at this point in the history
* First batch of SDI implementation as save copy: not yet useable; wait for further commits

* Second update for SDI implementation as save copy. Not yet useable.

* Added adv.dif.im. to multiprocessing and fixed several output bugs. Still not a final version.

* Complete update to implement SDI support to the psfsubtraction module. Includes changes to multiprocessing.

* Added SDI testing. Fixed minor buggs

* Further advanced SDI testing.

* Fixed output shape of the different differential imaging techniques.

* Changed sensitivity of sdi testing to account for machine accuracy

* Improved testing to include multiporcessing of sdi

* Further advanced sdi test coverage. Fixed minor bug.

* Further increased sdi testing coverage.

* Further extended sdi testing. Shorten sdi testing time.

* Excluding tests failed because of machine errors.

* Refactoring of the unit tests for SDI

* Increased sdi test coverage. Deleted redundant lines.

* Corrected jitter effects due to misalignments. Adjusted tests and checked codestyle.

* Changed jitter correction to allow astrometry.

* Refactoring of the IFS support by the PcaPsfSubtractionModule

* Minor adjustment (1e-10 to 1e-8) in the relative accuracy of one of the PCA background unit tests

* Minor adjustment (1e-8 to 1e-6) in the relative accuracy of one of the PCA background unit tests, presumably due a new version of sklearn that is used by the CI

* Minor adjustment (1e-6 to 1e-5) in the relative accuracy of one of the PCA background unit tests

* Further testing of the Travis CI

* More testing of the Travis CI

* Added testing. Minor bug fix.

* Further increased testing coverage.

* Correction of sdi test.

* Second sdi test correction.

* Added requested minor changes.

Co-authored-by: S.K. Kiefer <kiefer@para33.strw.leidenuniv.nl>
Co-authored-by: Tomas Stolker <tomas.stolker@phys.ethz.ch>
  • Loading branch information
3 people committed Jun 26, 2020
1 parent 1257f00 commit 99eb3ba
Show file tree
Hide file tree
Showing 12 changed files with 1,034 additions and 185 deletions.
16 changes: 16 additions & 0 deletions docs/pynpoint.util.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,14 @@ pynpoint.util.continuous module
:undoc-members:
:show-inheritance:

pynpoint.util.ifs module
------------------------

.. automodule:: pynpoint.util.ifs
:members:
:undoc-members:
:show-inheritance:

pynpoint.util.image module
--------------------------

Expand Down Expand Up @@ -116,6 +124,14 @@ pynpoint.util.residuals module
:undoc-members:
:show-inheritance:

pynpoint.util.sdi module
------------------------

.. automodule:: pynpoint.util.sdi
:members:
:undoc-members:
:show-inheritance:

pynpoint.util.star module
-------------------------

Expand Down
490 changes: 369 additions & 121 deletions pynpoint/processing/psfsubtraction.py

Large diffs are not rendered by default.

122 changes: 85 additions & 37 deletions pynpoint/util/multipca.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from pynpoint.core.dataio import OutputPort
from pynpoint.util.multiproc import TaskProcessor, TaskCreator, TaskWriter, TaskResult, \
TaskInput, MultiprocessingCapsule, to_slice
from pynpoint.util.psf import pca_psf_subtraction
from pynpoint.util.postproc import postprocessor
from pynpoint.util.residuals import combine_residuals


Expand All @@ -30,15 +30,15 @@ class PcaTaskCreator(TaskCreator):
def __init__(self,
tasks_queue_in: multiprocessing.JoinableQueue,
num_proc: int,
pca_numbers: np.ndarray) -> None:
pca_numbers: Union[np.ndarray, tuple]) -> None:
"""
Parameters
----------
tasks_queue_in : multiprocessing.queues.JoinableQueue
Input task queue.
num_proc : int
Number of processors.
pca_numbers : numpy.ndarray
pca_numbers : np.ndarray, tuple
Principal components for which the residuals are computed.
Returns
Expand All @@ -61,12 +61,20 @@ def run(self) -> None:
NoneType
None
"""
if isinstance(self.m_pca_numbers, tuple):
for i, pca_first in enumerate(self.m_pca_numbers[0]):
for j, pca_secon in enumerate(self.m_pca_numbers[1]):
parameters = (((i, i+1, None), (j, j+1, None), (None, None, None)), )
self.m_task_queue.put(TaskInput(tuple((pca_first, pca_secon)), parameters))

for i, pca_number in enumerate(self.m_pca_numbers):
parameters = (((i, i+1, None), (None, None, None), (None, None, None)), )
self.m_task_queue.put(TaskInput(pca_number, parameters))
self.create_poison_pills()

self.create_poison_pills()
else:
for i, pca_number in enumerate(self.m_pca_numbers):
parameters = (((i, i+1, None), (None, None, None), (None, None, None)), )
self.m_task_queue.put(TaskInput(pca_number, parameters))

self.create_poison_pills()


class PcaTaskProcessor(TaskProcessor):
Expand All @@ -89,29 +97,35 @@ def __init__(self,
result_queue_in: multiprocessing.JoinableQueue,
star_reshape: np.ndarray,
angles: np.ndarray,
pca_model: PCA,
im_shape: Tuple[int, int, int],
indices: np.ndarray,
requirements: Tuple[bool, bool, bool, bool]) -> None:
scales: Optional[np.ndarray],
pca_model: Optional[PCA],
im_shape: tuple,
indices: Optional[np.ndarray],
requirements: Tuple[bool, bool, bool, bool],
processing_type: str) -> None:
"""
Parameters
----------
tasks_queue_in : multiprocessing.queues.JoinableQueue
Input task queue.
result_queue_in : multiprocessing.queues.JoinableQueue
Input result queue.
star_reshape : numpy.ndarray
star_reshape : np.ndarray
Reshaped (2D) stack of images.
angles : numpy.ndarray
angles : np.ndarray
Derotation angles (deg).
scales : np.ndarray
scaling factors
pca_model : sklearn.decomposition.pca.PCA
PCA object with the basis.
im_shape : tuple(int, int, int)
Original shape of the stack of images.
indices : numpy.ndarray
indices : np.ndarray
Non-masked image indices.
requirements : tuple(bool, bool, bool, bool)
Required output residuals.
processing_type : str
selected processing type.
Returns
-------
Expand All @@ -124,9 +138,11 @@ def __init__(self,
self.m_star_reshape = star_reshape
self.m_pca_model = pca_model
self.m_angles = angles
self.m_scales = scales
self.m_im_shape = im_shape
self.m_indices = indices
self.m_requirements = requirements
self.m_processing_type = processing_type

@typechecked
def run_job(self,
Expand All @@ -145,20 +161,37 @@ def run_job(self,
Output residuals.
"""

residuals, res_rot = pca_psf_subtraction(images=self.m_star_reshape,
angles=self.m_angles,
pca_number=int(tmp_task.m_input_data),
pca_sklearn=self.m_pca_model,
im_shape=self.m_im_shape,
indices=self.m_indices)
# correct data type of pca_number if necessary
if isinstance(tmp_task.m_input_data, tuple):
pca_number = tmp_task.m_input_data
else:
pca_number = int(tmp_task.m_input_data)

residuals, res_rot = postprocessor(images=self.m_star_reshape,
angles=self.m_angles,
scales=self.m_scales,
pca_number=pca_number,
pca_sklearn=self.m_pca_model,
im_shape=self.m_im_shape,
indices=self.m_indices,
processing_type=self.m_processing_type)


# differentiate between IFS data or Mono-Wavelength data
if res_rot.ndim == 3:
res_output = np.zeros((4, res_rot.shape[-2], res_rot.shape[-1]))

res_output = np.zeros((4, res_rot.shape[1], res_rot.shape[2]))
else:
res_output = np.zeros((4, len(self.m_star_reshape),
res_rot.shape[-2], res_rot.shape[-1]))

if self.m_requirements[0]:
res_output[0, ] = combine_residuals(method='mean', res_rot=res_rot)
res_output[0, ] = combine_residuals(method='mean',
res_rot=res_rot)

if self.m_requirements[1]:
res_output[1, ] = combine_residuals(method='median', res_rot=res_rot)
res_output[1, ] = combine_residuals(method='median',
res_rot=res_rot)

if self.m_requirements[2]:
res_output[2, ] = combine_residuals(method='weighted',
Expand All @@ -167,7 +200,8 @@ def run_job(self,
angles=self.m_angles)

if self.m_requirements[3]:
res_output[3, ] = combine_residuals(method='clipped', res_rot=res_rot)
res_output[3, ] = combine_residuals(method='clipped',
res_rot=res_rot)

sys.stdout.write('.')
sys.stdout.flush()
Expand Down Expand Up @@ -247,25 +281,29 @@ def run(self) -> None:

with self.m_data_mutex:
res_slice = to_slice(next_result.m_position)
if next_result.m_position[1][0] is None:
res_slice = (next_result.m_position[0][0])
else:
res_slice = (next_result.m_position[0][0], next_result.m_position[1][0])

if self.m_requirements[0]:
self.m_mean_out_port._check_status_and_activate()
self.m_mean_out_port[res_slice] = next_result.m_data_array[0, :, :]
self.m_mean_out_port[res_slice] = next_result.m_data_array[0]
self.m_mean_out_port.close_port()

if self.m_requirements[1]:
self.m_median_out_port._check_status_and_activate()
self.m_median_out_port[res_slice] = next_result.m_data_array[1, :, :]
self.m_median_out_port[res_slice] = next_result.m_data_array[1]
self.m_median_out_port.close_port()

if self.m_requirements[2]:
self.m_weighted_out_port._check_status_and_activate()
self.m_weighted_out_port[res_slice] = next_result.m_data_array[2, :, :]
self.m_weighted_out_port[res_slice] = next_result.m_data_array[2]
self.m_weighted_out_port.close_port()

if self.m_requirements[3]:
self.m_clip_out_port._check_status_and_activate()
self.m_clip_out_port[res_slice] = next_result.m_data_array[3, :, :]
self.m_clip_out_port[res_slice] = next_result.m_data_array[3]
self.m_clip_out_port.close_port()

self.m_result_queue.task_done()
Expand All @@ -283,12 +321,14 @@ def __init__(self,
weighted_out_port: Optional[OutputPort],
clip_out_port: Optional[OutputPort],
num_proc: int,
pca_numbers: np.ndarray,
pca_model: PCA,
pca_numbers: Union[tuple, np.ndarray],
pca_model: Optional[PCA],
star_reshape: np.ndarray,
angles: np.ndarray,
im_shape: Tuple[int, int, int],
indices: np.ndarray) -> None:
scales: Optional[np.ndarray],
im_shape: tuple,
indices: Optional[np.ndarray],
processing_type: str) -> None:
"""
Constructor of PcaMultiprocessingCapsule.
Expand All @@ -304,18 +344,22 @@ def __init__(self,
Output port for the mean clipped residuals.
num_proc : int
Number of processors.
pca_numbers : numpy.ndarray
pca_numbers : np.ndarray
Number of principal components.
pca_model : sklearn.decomposition.pca.PCA
PCA object with the basis.
star_reshape : numpy.ndarray
star_reshape : np.ndarray
Reshaped (2D) input images.
angles : numpy.ndarray
angles : np.ndarray
Derotation angles (deg).
scales : np.ndarray
scaling factors.
im_shape : tuple(int, int, int)
Original shape of the input images.
indices : numpy.ndarray
indices : np.ndarray
Non-masked pixel indices.
processing_type : str
selection of processing type
Returns
-------
Expand All @@ -331,8 +375,10 @@ def __init__(self,
self.m_pca_model = pca_model
self.m_star_reshape = star_reshape
self.m_angles = angles
self.m_scales = scales
self.m_im_shape = im_shape
self.m_indices = indices
self.m_processing_type = processing_type

self.m_requirements = [False, False, False, False]

Expand Down Expand Up @@ -417,9 +463,11 @@ def create_processors(self) -> List[PcaTaskProcessor]:
self.m_result_queue,
self.m_star_reshape,
self.m_angles,
self.m_scales,
self.m_pca_model,
self.m_im_shape,
self.m_indices,
self.m_requirements))
self.m_requirements,
self.m_processing_type))

return processors
2 changes: 1 addition & 1 deletion pynpoint/util/multiproc.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class TaskInput:

@typechecked
def __init__(self,
input_data: Union[np.ndarray, np.int64],
input_data: Union[np.ndarray, np.int64, tuple],
job_parameter: tuple) -> None:
"""
Parameters
Expand Down

0 comments on commit 99eb3ba

Please sign in to comment.