Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add scaling to average_combine and median_combine #123

Merged
merged 5 commits into from
Jun 21, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
56 changes: 53 additions & 3 deletions ccdproc/combiner.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ def __init__(self, ccd_list):
else:
self.data_arr.mask[i] = ma.zeros((ydim, xdim))

# Must be after self.data_arr is defined because it checks the
# length of the data array.
self.scaling = None

@property
def weights(self):
return self._weights
Expand All @@ -102,6 +106,42 @@ def weights(self, value):
else:
self._weights = None

@property
def scaling(self):
"""
Scaling factor used in combining images.

Parameters
----------

scale : function or array-like or None, optional
Images are multiplied by scaling prior to combining them. Scaling
may be either a function, which will be applied to each image
to determine the scaling factor, or a list or array whose length
is the number of images in the `Combiner`. Default is ``None``.
"""
return self._scaling

@scaling.setter
def scaling(self, value):
if value is None:
self._scaling = value
else:
n_images = self.data_arr.data.shape[0]
if callable(value):
self._scaling = [value(self.data_arr[i]) for
i in range(n_images)]
self._scaling = np.array(self._scaling)
else:
try:
len(value) == n_images
self._scaling = np.array(value)
except TypeError:
raise TypeError("Scaling must be a function or an array "
"the same length as the number of images.")
# reshape so that broadcasting occurs properly
self._scaling = self.scaling[:, np.newaxis, np.newaxis]

#set up min/max clipping algorithms
def minmax_clipping(self, min_clip=None, max_clip=None):
"""Mask all pixels that are below min_clip or above max_clip.
Expand Down Expand Up @@ -198,8 +238,13 @@ def median_combine(self, median_func=ma.median):
deviation does not account for rejected pixels

"""
if self.scaling is not None:
scalings = self.scaling
else:
scalings = 1.0

#set the data
data = median_func(self.data_arr, axis=0)
data = median_func(scalings * self.data_arr, axis=0)

#set the mask
mask = self.data_arr.mask.sum(axis=0)
Expand All @@ -219,7 +264,7 @@ def median_combine(self, median_func=ma.median):
#return the combined image
return combined_image

def average_combine(self):
def average_combine(self, scale_func=None, scale_to=1.0):
"""Average combine together a set of arrays. A CCDData object is
returned with the data property set to the average of the arrays.
If the data was masked or any data have been rejected, those pixels
Expand All @@ -234,8 +279,13 @@ def average_combine(self):
CCDData object based on the combined input of CCDData objects.

"""
if self.scaling is not None:
scalings = self.scaling
else:
scalings = 1.0
#set up the data
data, wei = ma.average(self.data_arr, axis=0, weights=self.weights,
data, wei = ma.average(scalings * self.data_arr,
axis=0, weights=self.weights,
returned=True)

#set up the mask
Expand Down
35 changes: 35 additions & 0 deletions ccdproc/tests/test_combiner.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,41 @@ def test_combiner_mask_average(ccd_data):
assert not ccd.mask[5, 5]


def test_combiner_with_scaling(ccd_data):
# The factors below are not particularly important; just avoid anything
# whose average is 1.
ccd_data_lower = ccd_data.multiply(3)
ccd_data_higher = ccd_data.multiply(0.9)
combiner = Combiner([ccd_data, ccd_data_higher, ccd_data_lower])
# scale each array to the mean of the first image
scale_by_mean = lambda x: ccd_data.data.mean()/np.ma.average(x)
combiner.scaling = scale_by_mean
avg_ccd = combiner.average_combine()
# Does the mean of the scaled arrays match the value to which it was
# scaled?
np.testing.assert_almost_equal(avg_ccd.data.mean(),
ccd_data.data.mean())
assert avg_ccd.shape == ccd_data.shape
median_ccd = combiner.median_combine()
# Does median also scale to the correct value?
np.testing.assert_almost_equal(np.ma.median(median_ccd),
np.ma.median(ccd_data.data))

# Set the scaling manually...
combiner.scaling = [scale_by_mean(combiner.data_arr[i]) for i in range(3)]
avg_ccd = combiner.average_combine()
np.testing.assert_almost_equal(avg_ccd.data.mean(),
ccd_data.data.mean())
assert avg_ccd.shape == ccd_data.shape


def test_combiner_scaling_fails(ccd_data):
combiner = Combiner([ccd_data, ccd_data.copy()])
# Should fail unless scaling is set to a function or list-like
with pytest.raises(TypeError):
combiner.scaling = 5


#test data combined with mask is created correctly
def test_combiner_mask_media(ccd_data):
data = np.zeros((10, 10))
Expand Down
15 changes: 15 additions & 0 deletions docs/ccdproc/image_combination.rst
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,21 @@ argument ``median_func`` for calculating the median instead. One fast
alternative is provided by the `bottleneck`_ package; an example using it is
at :ref:`bottleneck_example`.

With image scaling
------------------

In some circumstances it may be convenient to scale all images to some value
before combining them. Do so by setting `~ccdproc.Combiner.scaling`:

>>> scaling_func = lambda arr: 1/np.ma.average(arr)
>>> combiner.scaling = scaling_func
>>> combined_average_scaled = combiner.average_combine()

This will normalize each image by its mean before combining (note that the
underlying images are *not* scaled; scaling is only done as part of combining
using `~ccdproc.Combiner.average_combine` or
`~ccdproc.Combiner.median_combine`).

With image transformation
-------------------------

Expand Down