Skip to content

Commit

Permalink
Merge branch 'release-v0.7.x' into stable
Browse files Browse the repository at this point in the history
# Conflicts:
#	.travis.yml
#	doc/conf.py
#	megaradrp/__init__.py
#	megaradrp/recipes/auxiliary/focustel.py
#	megaradrp/recipes/calibration/arc.py
#	setup.py
  • Loading branch information
sergiopasra committed Aug 6, 2018
2 parents 749e0d6 + 747c682 commit ed86460
Show file tree
Hide file tree
Showing 40 changed files with 528 additions and 148 deletions.
8 changes: 4 additions & 4 deletions .travis.yml
Expand Up @@ -4,11 +4,11 @@ sudo: false
matrix:
include:
- python: "2.7"
env: DEPS="numpy scipy astropy enum34"
env: DEPS="numpy scipy astropy=2.0 enum34"
- python: "3.5"
env: DEPS="numpy scipy astropy"
env: DEPS="numpy scipy astropy=3.0" COVERAGE=true
- python: "3.6"
env: DEPS="numpy scipy astropy"
env: DEPS="numpy scipy astropy=3.0"

cache:
apt: true
Expand Down Expand Up @@ -36,7 +36,7 @@ install:
- conda update --yes conda
- >
conda create -n testenv --yes python=$TRAVIS_PYTHON_VERSION
$DEPS scikit-image pip pytest setuptools Cython six>=1.7
$DEPS scikit-image pip pytest setuptools six>=1.7
pyyaml
- source activate testenv
- pip install pytest-benchmark pytest-cov
Expand Down
14 changes: 6 additions & 8 deletions README.rst
Expand Up @@ -21,20 +21,18 @@ This is Megara DRP, the data reduction pipeline for MEGARA
Megara DRP is distributed under GNU GPL, either version 3 of the License,
or (at your option) any later version. See the file LICENSE.txt for details.

Megara DRP requires the following packages installed in order to
be able to be installed and work properly:
Python 2.7 or 3.4 (or greater) is required. Megara DRP requires the following packages
installed in order to be able to be installed and work properly:

- python 2.7 (http://www.python.org)
- setuptools (http://peak.telecommunity.com/DevCenter/setuptools)
- numpy >= 1.7 (http://www.numpy.org/)
- scipy (http://www.scipy.org)
- astropy >= 1.0 (http://www.astropy.org/)
- numina >= 0.14 (http://guaix.fis.ucm.es/projects/numina/)
- astropy >= 2.0 (http://www.astropy.org/)
- numina >= 0.16 (http://guaix.fis.ucm.es/projects/numina/)
- scikit-image (http://scikit-image.org/)

Webpage: https://guaix.fis.ucm.es/megara
Maintainer: sergiopr@fis.ucm.es

.. |zenodo| image:: https://zenodo.org/badge/13088/guaix-ucm/megaradrp.svg
:target: https://zenodo.org/badge/latestdoi/13088/guaix-ucm/megaradrp

.. |zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.593647.svg
:target: https://zenodo.org/record/593647
4 changes: 2 additions & 2 deletions doc/conf.py
Expand Up @@ -27,8 +27,8 @@

project = u'MEGARA Data Reduction Pipeline'
copyright = u'2013-2018, Universidad Complutense de Madrid'
version = '0.6'
release = '0.6.1'
version = '0.7'
release = '0.7'
show_authors = True

numpydoc_show_class_members = False
Expand Down
2 changes: 1 addition & 1 deletion doc/installation.rst
Expand Up @@ -34,7 +34,7 @@ Using Conda

From the shell, execute:::

conda install -c sergiopasra/label/dev megaradrp
conda install -c conda-forge megaradrp


Using pip
Expand Down
2 changes: 1 addition & 1 deletion megaradrp/__init__.py
Expand Up @@ -12,7 +12,7 @@
import logging


__version__ = '0.6.1'
__version__ = '0.7'


# Top level NullHandler
Expand Down
14 changes: 14 additions & 0 deletions megaradrp/datamodel.py
Expand Up @@ -22,9 +22,23 @@
from numina.util.convert import convert_date


class QueryAttribute(object):
def __init__(self, name, tipo, description=""):
self.name = name
self.type = tipo
self.description = description


class MegaraDataModel(DataModel):
"""Data model of MEGARA images"""

query_attrs = {
'vph': QueryAttribute('vph', str),
'insmode': QueryAttribute('insmode', str),
'insconf': QueryAttribute('insconf', str),
'speclamp': QueryAttribute('speclamp', str)
}

meta_info_headers = [
'instrument',
'object',
Expand Down
2 changes: 1 addition & 1 deletion megaradrp/processing/combine.py
Expand Up @@ -172,7 +172,7 @@ def main(args=None):
with contextlib.nested(*[fits.open(fname) for fname in args.image]) as hduls:
result = combination_hdul(hduls, method=method, errors=errors, prolog=None)

result.writeto(args.output, clobber=True)
result.writeto(args.output, overwrite=True)
# with contextlib.ExitStack() as stack:
# hduls = [stack.enter_context(fits.open(fname)) for fname in args.image]
# combination_hdul(hduls, method=combine.mean, errors=False, prolog=None)
Expand Down
72 changes: 56 additions & 16 deletions megaradrp/processing/extractobj.py
Expand Up @@ -24,6 +24,38 @@
from megaradrp.processing.fluxcalib import update_flux_limits


def coverage_det(arr):
"""Compute coverage"""

ma = numpy.max(arr)
mn = numpy.min(arr)

maxcover = len(arr) + 1

if ma == 0:
return slice(0, 0)

if mn == 1:
return slice(0, maxcover)

# We have 0s and 1s
diffa = numpy.diff(arr)

# For the left, find first +1
val_f, = numpy.where(diffa==1)
pix_f = 0
if len(val_f) > 0:
pix_f = val_f[0] + 1

# For the rigth, find first -1
val_l, = numpy.where(diffa==-1)
pix_l = maxcover
if len(val_l) > 0:
pix_l = val_l[0] + 1

return slice(pix_f, pix_l)


def extract_star(rssimage, position, npoints, fiberconf, logger=None):
"""Extract a star given its center and the number of fibers to extract"""

Expand Down Expand Up @@ -77,12 +109,14 @@ def extract_star(rssimage, position, npoints, fiberconf, logger=None):
valid_region = max_value_region

# Interval with maximum coverage
nz_max, = numpy.nonzero(numpy.diff(max_value_region))
#nz_max, = numpy.nonzero(numpy.diff(max_value_region))
# Interval with at least 1 fiber
nz_some, = numpy.nonzero(numpy.diff(some_value_region))
#nz_some, = numpy.nonzero(numpy.diff(some_value_region))
nz_max_slice = coverage_det(max_value_region)
nz_some_slice = coverage_det(some_value_region)

# Collapse the flux in the optimal region
perf = flux_fiber[:, nz_max[0] + 1: nz_max[1] + 1].sum(axis=1)
perf = flux_fiber[:, nz_max_slice].sum(axis=1)
# Contribution of each fiber to the total flux, 1D
perf_norm = perf / perf.sum()
contributions = numpy.zeros(shape=(rssdata.shape[0],))
Expand Down Expand Up @@ -113,7 +147,7 @@ def extract_star(rssimage, position, npoints, fiberconf, logger=None):
# ax2.plot(coverage_total)
#
# plt.show()
pack = flux_total_c, colids, nz_max, nz_some
pack = flux_total_c, colids, nz_max_slice, nz_some_slice
# pack = flux_total_c
totals.append(pack)

Expand Down Expand Up @@ -267,7 +301,7 @@ def compute_dar(img, datamodel, logger=None, debug_plot=False):


def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
cover1, cover2, sigma=20.0):
wl_coverage1, wl_coverage2, sigma=20.0):

wcsl = astropy.wcs.WCS(final[0].header)

Expand All @@ -292,32 +326,33 @@ def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
r0 = response_0 / r0max
r1 = response_1 / r1max

pixm1, pixm2 = cover1
pixr1, pixr2 = cover2
pixm1 = wl_coverage1.start
pixm2 = wl_coverage1.stop
pixr1 = wl_coverage2.start
pixr2 = wl_coverage2.stop

pixlims = {}
pixlims['PIXLIMR1'] = pixr1
pixlims['PIXLIMR1'] = pixr1 + 1 # Convert to 1-ref
pixlims['PIXLIMR2'] = pixr2
pixlims['PIXLIMM1'] = pixm1
pixlims['PIXLIMM1'] = pixm1 + 1 # Convert to 1-ref
pixlims['PIXLIMM2'] = pixm2

max_valid = numpy.zeros_like(valid)
max_valid[pixm1:pixm2 + 1] = True
max_valid[wl_coverage1] = True

partial_valid = numpy.zeros_like(valid)
partial_valid[pixr1:pixr2 + 1] = True
partial_valid[wl_coverage2] = True

valid = numpy.ones_like(response_0)
valid[pixm2:] = 0
valid[:pixm1+1] = 0
valid = numpy.zeros_like(response_0)
valid[wl_coverage1] = 1

pixf1, pixf2 = int(math.floor(pixm1 + 2* sigma)), int(math.ceil(pixm2 - 2 * sigma))

pixlims['PIXLIMF1'] = pixf1
pixlims['PIXLIMF1'] = pixf1 + 1
pixlims['PIXLIMF2'] = pixf2

flux_valid = numpy.zeros_like(valid, dtype='bool')
flux_valid[pixf1:pixf2 + 1] = True
flux_valid[pixf1:pixf2] = True

r0_ens = gaussian_filter(r0, sigma=sigma)

Expand All @@ -326,6 +361,11 @@ def generate_sensitivity(final, spectrum, star_interp, extinc_interp,

# FIXME: add history
sens = fits.PrimaryHDU(s_response, header=final[0].header)
# delete second axis keywords
for key in ['CRPIX2', 'CRVAL2', 'CDELT2', 'CTYPE2']:
if key in sens.header:
del sens.header[key]

sens.header['uuid'] = str(uuid.uuid1())
sens.header['tunit'] = ('Jy', "Final units")

Expand Down
2 changes: 1 addition & 1 deletion megaradrp/processing/tests/test_aperture.py
Expand Up @@ -10,7 +10,7 @@
# eq = 0.8 * np.ones((4112, 4096))
# temporary_path = mkdtemp()
#
# fits.writeto('%s/eq.fits' % temporary_path, eq, clobber=True)
# fits.writeto('%s/eq.fits' % temporary_path, eq, overwrite=True)
#
# image =file(temporary_path + '/eq.fits')
# obj = ApertureExtractor(None)
Expand Down
32 changes: 32 additions & 0 deletions megaradrp/processing/tests/test_extractobj.py
@@ -0,0 +1,32 @@

import megaradrp.processing.extractobj as eobj


def test_cover_1():

arr = [1, 1, 1, 1, 1, 1]
res = eobj.coverage_det(arr)
assert res == slice(0, 7)

arr = [0, 0, 0, 0, 0, 0]
res = eobj.coverage_det(arr)
assert res == slice(0, 0)


def test_cover_2():

arr = [0, 1, 1, 1, 0, 0]
res = eobj.coverage_det(arr)
assert res == slice(1, 4, None)

arr = [0, 1, 1, 1, 1, 0]
res = eobj.coverage_det(arr)
assert res == slice(1, 5)

arr = [0, 1, 1, 1, 1, 1]
res = eobj.coverage_det(arr)
assert res == slice(1, 7)

arr = [1, 1, 1, 1, 1, 0]
res = eobj.coverage_det(arr)
assert res == slice(0, 5)
8 changes: 4 additions & 4 deletions megaradrp/processing/tests/test_processing.py
Expand Up @@ -44,7 +44,7 @@ def test_trim_and_o(direction):
'bng': [1,1]
}
fs = generate_bias_file()
fits.writeto('%s/flat.fits' % (temporary_path), fs, clobber=True)
fits.writeto('%s/flat.fits' % (temporary_path), fs, overwrite=True)
trimOut(
'%s/flat.fits' % (temporary_path),
detconf,
Expand All @@ -61,7 +61,7 @@ def test_trim_and_o(direction):
def test_trim_and_o_fail():
temporary_path = mkdtemp()
fs = generate_bias_file()
fits.writeto('%s/flat.fits' % (temporary_path), fs, clobber=True)
fits.writeto('%s/flat.fits' % (temporary_path), fs, overwrite=True)

direction = 'fails'
detconf = {
Expand All @@ -83,7 +83,7 @@ def test_trim_and_o_fail():
def test_trim_and_o_fail2():
temporary_path = mkdtemp()
fs = generate_bias_file()
fits.writeto('%s/flat.fits' % (temporary_path), fs, clobber=True)
fits.writeto('%s/flat.fits' % (temporary_path), fs, overwrite=True)

bins = 'fail'
detconf = {
Expand Down Expand Up @@ -112,7 +112,7 @@ def test_apextract_weights():
rss = apextract_weights(data, tarfile.open(file_name, 'r'))
hdu_rss = fits.PrimaryHDU(rss)
final = fits.HDUList([hdu_rss])
final.writeto('rss.fits', clobber=True)
final.writeto('rss.fits', overwrite=True)
assert True


Expand Down
4 changes: 2 additions & 2 deletions megaradrp/processing/trimover.py
Expand Up @@ -38,7 +38,7 @@ def trimOut(img, detconf, direction='normal', out='trimmed.fits'):
if issubclass(str, type(img)):
with fits.open(img) as hdul:
hdu = trim_and_o_array(hdul[0].data, detconf, direction=direction)
fits.writeto(out, hdu, clobber=True)
fits.writeto(out, hdu, overwrite=True)

elif isinstance(img, fits.PrimaryHDU):
finaldata = trim_and_o_array(img.data, detconf, direction=direction)
Expand Down Expand Up @@ -225,7 +225,7 @@ def test_image(self):
data[self.ocol2] += 500
data[self.trim2] += 5000

fits.writeto('eq_estimado.fits', data, clobber=True)
fits.writeto('eq_estimado.fits', data, overwrite=True)

def run(self, img):
imgid = self.get_imgid(img)
Expand Down
3 changes: 3 additions & 0 deletions megaradrp/processing/wcs.py
Expand Up @@ -30,4 +30,7 @@ def update_wcs_from_ipa(hdr, pa):
hdr['PC2_2'] = cos_pa
hdr['PC1_2'] = sin_pa
hdr['PC2_1'] = -sin_pa
# CDELT1 must be negative
hdr['CDELT1'] = -abs(hdr['CDELT1'])

return hdr
12 changes: 11 additions & 1 deletion megaradrp/products/structured.py
Expand Up @@ -11,11 +11,21 @@


import numina.types.structured as structured
import numina.core.tagexpr as tagexpr

import megaradrp.datamodel


class BaseStructuredCalibration(structured.BaseStructuredCalibration):
def __init__(self, instrument='unknown'):
super(BaseStructuredCalibration, self).__init__(instrument)
datamodel = megaradrp.datamodel.MegaraDataModel()
super(BaseStructuredCalibration, self).__init__(instrument, datamodel)
my_tag_table = self.datamodel.query_attrs
objtags = [my_tag_table[t] for t in self.tag_names()]
self.query_expr = tagexpr.query_expr_from_attr(objtags)
self.names_t = self.query_expr.tags()
self.names_f = self.query_expr.fields()

self.total_fibers = 0
self.missing_fibers = []
self.error_fitting = []
Expand Down
6 changes: 6 additions & 0 deletions megaradrp/products/tests/test_tracemap.py
Expand Up @@ -186,6 +186,12 @@ def __init__(self, destination):
assert (traces == state)


def test_query_fields():
tracemap = tm.TraceMap()
assert tracemap.query_expr.fields() == {'insmode', 'vph'}
assert tracemap.query_expr.tags() == {'insmode', 'vph'}


if __name__ == "__main__":
test_load_traceMap()
test_dump_traceMap()

0 comments on commit ed86460

Please sign in to comment.