diff --git a/.readthedocs.yml b/.readthedocs.yml
new file mode 100644
index 00000000..fb113890
--- /dev/null
+++ b/.readthedocs.yml
@@ -0,0 +1,25 @@
+# .readthedocs.yml
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
+
+# Required
+version: 2
+
+# Build documentation in the docs/ directory with Sphinx
+sphinx:
+ configuration: doc/conf.py
+
+# Build documentation with MkDocs
+#mkdocs:
+# configuration: mkdocs.yml
+
+# Optionally build your docs in additional formats such as PDF and ePub
+formats:
+ - htmlzip
+
+# Optionally set the version of Python and requirements required to build your docs
+python:
+ version: 3.7
+ install:
+ - requirements: doc/requirements.txt
+
diff --git a/.travis.yml b/.travis.yml
index 5e988c65..496dcf87 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,11 +4,15 @@ sudo: false
matrix:
include:
- python: "2.7"
- env: DEPS="numpy scipy astropy==2.0 enum34"
+ env: DEPS="numpy scipy astropy==2.0.16 enum34"
- python: "3.5"
- env: DEPS="numpy scipy astropy==3.0" COVERAGE=true
+ env: DEPS="numpy scipy astropy==3.2.3"
- python: "3.6"
- env: DEPS="numpy scipy astropy==3.0"
+ env: DEPS="numpy scipy astropy==4.0.1"
+ - python: "3.7"
+ env: DEPS="numpy scipy astropy==4.0.1" COVERAGE=true
+ - python: "3.8"
+ env: DEPS="numpy scipy astropy==4.0.1"
cache:
pip: true
@@ -19,15 +23,14 @@ install:
- pip install Cython
- pip install "numpy>=1.16"
- pip install git+git://github.com/guaix-ucm/numina.git#egg=numina
- - pip install $DEPS scikit-image pip pytest setuptools Cython six>=1.7
- - pip install pytest-benchmark pytest-cov
+ - pip install $DEPS scikit-image pytest setuptools six>=1.7 jsonschema
- pip install pytest-remotedata
- if [[ $COVERAGE == true ]]; then pip install coveralls; fi
- if [[ $COVERAGE == true ]]; then pip install codeclimate-test-reporter; fi
script:
- - py.test -s -v megaradrp
- - if [[ $COVERAGE == true ]]; then py.test megaradrp --cov=megaradrp; fi
+ - pytest megaradrp
+ - if [[ $COVERAGE == true ]]; then coverage run --source megaradrp -m pytest; fi
after_success:
- if [[ $COVERAGE == true ]]; then coveralls; fi
diff --git a/README.rst b/README.rst
index c71b7edc..15ca0bf2 100644
--- a/README.rst
+++ b/README.rst
@@ -3,33 +3,31 @@
MEGARA DRP
==========
-|zenodo|
+|zenodo| |docs| |pypi| |travis| |coveralls|
-.. image:: https://readthedocs.org/projects/megara-drp/badge/?version=latest
- :target: https://readthedocs.org/projects/megara-drp/?badge=latest
- :alt: Documentation Status
-.. image:: https://travis-ci.org/guaix-ucm/megaradrp.svg?branch=master
- :target: https://travis-ci.org/guaix-ucm/megaradrp
+This is Megara DRP, the data reduction pipeline for MEGARA,
+the optical Integral-Field Unit and Multi-Object Spectrograph
+designed for the Gran telescopio Canarias (GTC).
+
-.. image:: https://coveralls.io/repos/guaix-ucm/megaradrp/badge.svg?branch=master&service=github
- :target: https://coveralls.io/github/guaix-ucm/megaradrp?branch=master
+You can install `megaradrp` using
+the `released code in PyPI `_ or
+the `development version in Github `_.
+The installation instructions are available in the
+`online documentation `_
+or `doc/installation.rst `_ in the source distribution.
-This is Megara DRP, the data reduction pipeline for MEGARA
+
+Licensing
+---------
Megara DRP is distributed under GNU GPL, either version 3 of the License,
or (at your option) any later version. See the file LICENSE.txt for details.
-Python 2.7 or 3.4 (or greater) is required. Megara DRP requires the following packages
-installed in order to be able to be installed and work properly:
-
- - setuptools (http://peak.telecommunity.com/DevCenter/setuptools)
- - numpy >= 1.7 (http://www.numpy.org/)
- - scipy (http://www.scipy.org)
- - astropy >= 2.0 (http://www.astropy.org/)
- - numina >= 0.21 (http://guaix.fis.ucm.es/projects/numina/)
- - scikit-image (http://scikit-image.org/)
+Authors
+-------
Webpage: https://guaix.fis.ucm.es/megara
@@ -37,3 +35,19 @@ Maintainers: Sergio Pacual sergiopr@fis.ucm.es, Nicolás Cardiel cardiel@ucm.es
.. |zenodo| image:: https://zenodo.org/badge/DOI/10.5281/zenodo.593647.svg
:target: https://zenodo.org/record/593647
+
+.. |docs| image:: https://readthedocs.org/projects/megaradrp/badge/?version=latest
+ :target: https://readthedocs.org/projects/megaradrp/?badge=latest
+ :alt: Documentation Status
+
+.. |pypi| image:: https://badge.fury.io/py/megaradrp.svg
+ :target: https://badge.fury.io/py/megaradrp
+
+.. |travis| image:: https://img.shields.io/travis/guaix-ucm/megaradrp/master?logo=travis%20ci&logoColor=white&label=Travis%20CI
+ :target: https://travis-ci.org/guaix-ucm/megaradrp
+ :alt: megaradrp's Travis CI Status
+
+.. |coveralls| image:: https://coveralls.io/repos/guaix-ucm/megaradrp/badge.svg?branch=master&service=github
+ :target: https://coveralls.io/github/guaix-ucm/megaradrp?branch=master
+ :alt: megaradrp's Coverall Status
+
diff --git a/doc/calibration/index.rst b/doc/calibration/index.rst
index 8d411b45..fec2fdfc 100644
--- a/doc/calibration/index.rst
+++ b/doc/calibration/index.rst
@@ -78,6 +78,5 @@ MEGARA team.
fiberflat
twilight
bpm
- linearity
lcbstd
mosstd
diff --git a/doc/calibration/linearity.rst b/doc/calibration/linearity.ignore
similarity index 100%
rename from doc/calibration/linearity.rst
rename to doc/calibration/linearity.ignore
diff --git a/doc/conf.py b/doc/conf.py
index 53bd0528..50797b41 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -27,8 +27,8 @@
project = u'MEGARA Data Reduction Pipeline'
copyright = u'2013-2020, Universidad Complutense de Madrid'
-version = '0.9'
-release = '0.9.3'
+version = '0.10'
+release = '0.10.1'
show_authors = True
numpydoc_show_class_members = False
diff --git a/doc/installation.rst b/doc/installation.rst
index 3d5d9468..f144157a 100644
--- a/doc/installation.rst
+++ b/doc/installation.rst
@@ -9,13 +9,14 @@ Requirements
The MEGARA Pipeline package requires the following packages installed in order to
be able to be installed and work properly:
- - `python `_ either 2.7 or >= 3.4
+ - `python `_ either 2.7 or >= 3.5
- `setuptools `_
- - `numpy `_ >= 1.7
- - `scipy `_
- - `astropy `_ >= 2.0
+ - `numpy `_ >= 1.7
+ - `scipy `_
+ - `astropy `_ >= 2.0
- `numina `_ >= 0.17
- - `scikit-image `_
+ - `scikit-image `_
+ - `jsonschema `_
Additional packages are optionally required:
@@ -27,16 +28,6 @@ Additional packages are optionally required:
Installing MEGARA DRP
*********************
-Using Conda
-===========
-
-`megaradrp` can be installed with conda using a custom channel.
-
-From the shell, execute:::
-
- conda install -c conda-forge megaradrp
-
-
Using pip
=========
To install with pip, simply run:::
@@ -50,6 +41,14 @@ To install with pip, simply run:::
your Numpy installation, which may not always be desired.
+Using Conda
+===========
+
+`megaradrp` can be installed with conda using a custom channel.
+
+From the shell, execute:::
+
+ conda install -c conda-forge megaradrp
Building from source
@@ -119,148 +118,38 @@ It's a great way to quickly test new libraries without cluttering your
global site-packages or run multiple projects on the same machine which
depend on a particular library but not the same version of the library.
-Install virtualenv
-------------------
-I install it with the package system of my OS, so that it ends in my
-global site-packages.
-
-With Fedora/EL is just::
-
- $ sudo yum install python-virtualenv
-
+Since Python version 3.3, there is also a module in the standard library
+called `venv` with roughly the same functionality.
Create virtual environment
--------------------------
-Create the virtual environment enabling the packages already installed
-in the global site-packages via the OS package system. Some requirements
-(in particullar numpy and scipy) are difficult to build: they require
-compiling and external C and FORTRAN libraries to be installed.
-
-So the command is::
-
- $ virtualenv --system-site-packages myenv
+In order to create a virtual environment called e.g. megara using `venv`:
-If you need to create the virtualenv without global packages, drop the
-system-site-packages flag.
+ $ python3 -m venv megara
Activate the environment
------------------------
Once the environment is created, you need to activate it. Just change
-directory into it and load with your command line interpreter the
-script bin/activate.
+directory into it and source the script `bin/activate`.
With bash::
- $ cd myenv
+ $ cd megara
$ . bin/activate
- (myenv) $
+ (megara) $
With csh/tcsh::
- $ cd myenv
+ $ cd megara
$ source bin/activate
- (myenv) $
+ (megara) $
Notice that the prompt changes once you are activate the environment. To
deactivate it just type deactivate::
- (myenv) $ deactivate
+ (megara) $ deactivate
$
-*********************
-Installing MEGARA DFP
-*********************
-
-This section described how to install the MEGARA Pipeline inside
-the GTC Control system.
-
-In the following we assume that we are installing with user `gcsop`.
-
-Login in the `gcsop` account and activate the GTC environment::
-
- $ /opt/gcs/tools/nrp -p linux -s bash
-
-Change working directory to ``/work/gcsop/src_python/gtc``::
-
- $ cd /work/gcsop/src_python/gtc
- $ ls
- AL DSL SSL
-
-We have to install `numina` under `DSL` and `megaradrp` under `AL`.
-
-
-Please refer to :ref:`Numina manual ` to install Numina
-and its dependences under Solaris 10.
-
-Install numina
-==============
-
-First, install all the dependencies:
-
- - setuptools
- - six
- - numpy >= 1.7
- - scipy
- - astropy >= 1.0
- - PyYaml
- - singledispatch
-
-If you are installing a development version, Cython is also required.
-
-Most are available as precompiled packages in Linux.
-Please refer to :ref:`Numina manual ` to install Numina
-and its dependences under Solaris 10.
-
-Then, download the source code, either from PyPI or github::
-
- $ pwd
- /work/gcsop/src_python/gtc/DSL/
- $ git clone https://github.com/guaix-ucm/numina.git
- $ cd numina
-
-Create a file `numina.mod` with the following content::
-
- NAME=numina
- TYPE=device
-
- l:numina:python:y
-
-And then build and install using `nmk`::
-
- $ nmk -t module.rebuild
- $ nmk -t module.install
-
-Install megaradrp
-=================
-
-Change directory to `/work/gcsop/src_python/gtc/AL/` and download the source code
-of `megaradrp`, either from `PyPI `_
-or from `github `_::
-
- $ pwd
- /work/gcsop/src_python/gtc/AL/
- $ git clone https://github.com/guaix-ucm/megaradrp.git
- $ cd megaradrp
-
-Create a file `megaradrp.mod` with the following content::
-
- NAME=megaradrp
- TYPE=device
-
- l:megaradrp:python:y
-
-And then build and install using `nmk`::
-
- $ nmk -t module.rebuild
- $ nmk -t module.install
-
-You can check that everything works by running the `numina` command line tool::
-
- $ numina show-instruments
- Instrument: MEGARA
- has configuration 'default'
- has pipeline 'default', version 1
-
.. _virtualenv: http://pypi.python.org/pypi/virtualenv
.. _sphinx: http://sphinx.pocoo.org
diff --git a/doc/reference/datatype.rst b/doc/reference/datatype.rst
new file mode 100644
index 00000000..6464c138
--- /dev/null
+++ b/doc/reference/datatype.rst
@@ -0,0 +1,9 @@
+==================================================================
+:mod:`megaradrp.datatype` --- MEGARA types of data
+==================================================================
+
+
+.. automodule:: megaradrp.datatype
+ :members:
+ :undoc-members:
+
diff --git a/doc/reference/index.rst b/doc/reference/index.rst
index 0c24957d..10eaa1b6 100644
--- a/doc/reference/index.rst
+++ b/doc/reference/index.rst
@@ -26,8 +26,9 @@ Reference
processing
products
recipes
- types
+ ntypes
datamodel
+ datatype
utils
validators
visualization
diff --git a/doc/reference/instrument.rst b/doc/reference/instrument.rst
index 5d721908..21f91dec 100644
--- a/doc/reference/instrument.rst
+++ b/doc/reference/instrument.rst
@@ -7,5 +7,11 @@
:synopsis: Static configuration
-.. autoclass:: megaradrp.instrument.loader.Loader
+.. automodule:: megaradrp.instrument.components
+ :members:
+.. automodule:: megaradrp.instrument.configs
+ :members:
+
+.. automodule:: megaradrp.instrument.focalplane
+ :members:
diff --git a/doc/reference/types.rst b/doc/reference/ntypes.rst
similarity index 68%
rename from doc/reference/types.rst
rename to doc/reference/ntypes.rst
index 8afd2ae0..b6781030 100644
--- a/doc/reference/types.rst
+++ b/doc/reference/ntypes.rst
@@ -1,7 +1,7 @@
==================================================================
-:mod:`megaradrp.types` --- MEGARA data types
+:mod:`megaradrp.ntypes` --- MEGARA data types
==================================================================
-.. module:: megaradrp.types
+.. module:: megaradrp.ntypes
:synopsis: Data types
diff --git a/doc/reference/products.rst b/doc/reference/products.rst
index e7949c57..8cbf6681 100644
--- a/doc/reference/products.rst
+++ b/doc/reference/products.rst
@@ -6,35 +6,35 @@
.. module:: megaradrp.products
:synopsis: Data products of the MEGARA pipeline
-.. autoclass:: megaradrp.types.MegaraFrame
+.. autoclass:: megaradrp.ntypes.MegaraFrame
-.. autoclass:: megaradrp.types.ProcessedFrame
+.. autoclass:: megaradrp.ntypes.ProcessedFrame
-.. autoclass:: megaradrp.types.ProcessedImage
+.. autoclass:: megaradrp.ntypes.ProcessedImage
-.. autoclass:: megaradrp.types.ProcessedRSS
+.. autoclass:: megaradrp.ntypes.ProcessedRSS
-.. autoclass:: megaradrp.types.ProcessedMultiRSS
+.. autoclass:: megaradrp.ntypes.ProcessedMultiRSS
-.. autoclass:: megaradrp.types.ProcessedSpectrum
+.. autoclass:: megaradrp.ntypes.ProcessedSpectrum
-.. autoclass:: megaradrp.types.ProcessedImageProduct
+.. autoclass:: megaradrp.ntypes.ProcessedImageProduct
-.. autoclass:: megaradrp.types.ProcessedRSSProduct
+.. autoclass:: megaradrp.ntypes.ProcessedRSSProduct
-.. autoclass:: megaradrp.types.ProcessedSpectrumProduct
+.. autoclass:: megaradrp.ntypes.ProcessedSpectrumProduct
-.. autoclass:: megaradrp.types.MasterBPM
+.. autoclass:: megaradrp.ntypes.MasterBPM
-.. autoclass:: megaradrp.types.MasterBias
+.. autoclass:: megaradrp.ntypes.MasterBias
-.. autoclass:: megaradrp.types.MasterDark
+.. autoclass:: megaradrp.ntypes.MasterDark
-.. autoclass:: megaradrp.types.MasterSlitFlat
+.. autoclass:: megaradrp.ntypes.MasterSlitFlat
-.. autoclass:: megaradrp.types.MasterFiberFlat
+.. autoclass:: megaradrp.ntypes.MasterFiberFlat
-.. autoclass:: megaradrp.types.MasterTwilightFlat
+.. autoclass:: megaradrp.ntypes.MasterTwilightFlat
.. autoclass:: megaradrp.products.structured.BaseStructuredCalibration
@@ -44,10 +44,10 @@
.. autoclass:: megaradrp.products.wavecalibration.WavelengthCalibration
-.. autoclass:: megaradrp.types.MasterSensitivity
+.. autoclass:: megaradrp.ntypes.MasterSensitivity
-.. autoclass:: megaradrp.types.ReferenceExtinctionTable
+.. autoclass:: megaradrp.ntypes.ReferenceExtinctionTable
-.. autoclass:: megaradrp.types.ReferenceSpectrumTable
+.. autoclass:: megaradrp.ntypes.ReferenceSpectrumTable
diff --git a/doc/reference/simulation.rst b/doc/reference/simulation.rst
index 6af7e701..0c15a048 100644
--- a/doc/reference/simulation.rst
+++ b/doc/reference/simulation.rst
@@ -8,11 +8,8 @@
.. automodule:: megaradrp.simulation.actions
:members:
-.. automodule:: megaradrp.simulation.atmosphere
- :members:
-.. automodule:: megaradrp.simulation.calibrationunit
- :members:
+
.. automodule:: megaradrp.simulation.control
:members:
@@ -20,14 +17,6 @@
.. automodule:: megaradrp.simulation.convolution
:members:
-.. automodule:: megaradrp.simulation.cover
- :members:
-
-.. automodule:: megaradrp.simulation.detector
- :members:
-
-.. automodule:: megaradrp.simulation.device
- :members:
.. automodule:: megaradrp.simulation.efficiency
:members:
@@ -41,35 +30,14 @@
.. automodule:: megaradrp.simulation.fiberbundle
:members:
-.. automodule:: megaradrp.simulation.fibermos
- :members:
-
.. automodule:: megaradrp.simulation.focalplane
:members:
-.. automodule:: megaradrp.simulation.instrument
- :members:
-
-.. automodule:: megaradrp.simulation.lamps
- :members:
-
.. automodule:: megaradrp.simulation.lightfiber
:members:
-.. automodule:: megaradrp.simulation.psslit
- :members:
-
.. automodule:: megaradrp.simulation.refraction
:members:
-.. automodule:: megaradrp.simulation.shutter
- :members:
-
-.. automodule:: megaradrp.simulation.telescope
- :members:
-
.. automodule:: megaradrp.simulation.vph
:members:
-
-.. automodule:: megaradrp.simulation.wheel
- :members:
\ No newline at end of file
diff --git a/doc/requirements.txt b/doc/requirements.txt
new file mode 100644
index 00000000..cfd070a3
--- /dev/null
+++ b/doc/requirements.txt
@@ -0,0 +1,7 @@
+setuptools
+numpy
+astropy
+scipy
+numina>=0.21
+scikit-image
+numpydoc
diff --git a/doc/testing.rst b/doc/testing.rst
index b1f177e7..400b794f 100644
--- a/doc/testing.rst
+++ b/doc/testing.rst
@@ -8,23 +8,24 @@ This section describes the testing framework and options for testing MEGARA DRP
Running tests
**************
-MEGARA DRP uses `py.test `_ as its testing framework.
+MEGARA DRP uses `pytest `_ as its testing framework.
+We require also the package `pytest-remotedata` to control access to online
+resources during testing.
As MEGARA DRP does not contain C/Cython extensions, the tests can be run
directly in the source code, as::
cd megaradrp-0.4.0
- cd src
- py.test megaradrp
+ pytest megaradrp
Some of the tests rely on data downloaded from a server. These tests are
skipped by default. To enable them run instead::
- py.test --run-remote megaradrp
+ py.test --remote-data megaradrp
The reduction recipes are tested with remote data. Each recipe is run in
a directory created under the default ``$TMPDIR``, which is based on
the user temporal directory. The base of the created directories can be changed
with the option ``--basetemp=dir``::
- py.test --basetemp=/home/spr/test100 --run-remote megaradrp
+ py.test --basetemp=/home/spr/test100 --remote-data megaradrp
diff --git a/megaradrp/__init__.py b/megaradrp/__init__.py
index 38eadb09..c8bc6901 100644
--- a/megaradrp/__init__.py
+++ b/megaradrp/__init__.py
@@ -12,7 +12,7 @@
import logging
-__version__ = '0.9.3'
+__version__ = '0.10.1'
# Top level NullHandler
diff --git a/megaradrp/core/correctors.py b/megaradrp/core/correctors.py
index 828ae996..d15768c0 100644
--- a/megaradrp/core/correctors.py
+++ b/megaradrp/core/correctors.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2018 Universidad Complutense de Madrid
+# Copyright 2011-2019 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -7,7 +7,7 @@
# License-Filename: LICENSE.txt
#
-"""Load image correctos according to present calibrations"""
+"""Load image correctors according to present calibrations"""
import logging
@@ -19,6 +19,7 @@
from megaradrp.processing.trimover import OverscanCorrector, TrimImage
from megaradrp.processing.trimover import GainCorrector
from megaradrp.processing.slitflat import SlitFlatCorrector
+from megaradrp.processing.diffuselight import DiffuseLightCorrector
_logger = logging.getLogger(__name__)
@@ -35,7 +36,9 @@ def get_corrector_bpm(rinput, meta, ins, datamodel):
bpm_corrector = proc.BadPixelCorrector(
mbpm,
datamodel=datamodel,
- calibid=calibid
+ calibid=calibid,
+ hwin=0,
+ wwin=3
)
else:
_logger.info('BPM not provided, ignored')
@@ -92,6 +95,24 @@ def get_corrector_slit_flat(rinput, meta, ins, datamodel):
return corrector
+def get_corrector_diffuse_light(rinput, meta, ins, datamodel):
+ key = 'diffuse_light_image'
+ info = meta.get(key)
+ if info is not None:
+ req = getattr(rinput, key)
+ with req.open() as hdul:
+ _logger.info('loading diffuse light image')
+ _logger.debug('%s image: %s', key, info)
+ mbpm = hdul[0].data
+ calibid = datamodel.get_imgid(hdul)
+ corrector = DiffuseLightCorrector(mbpm, datamodel, calibid=calibid)
+ else:
+ _logger.info('%s not provided, ignored', key)
+ corrector = node.IdNode()
+
+ return corrector
+
+
def get_corrector_overscan(rinput, meta, ins, datamodel):
detconf = ins.get_property('detector.scan')
return OverscanCorrector(
diff --git a/megaradrp/core/recipe.py b/megaradrp/core/recipe.py
index 57ff51f5..77139040 100644
--- a/megaradrp/core/recipe.py
+++ b/megaradrp/core/recipe.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -21,14 +21,7 @@
class MegaraBaseRecipe(BaseRecipe):
- """Base clase for all MEGARA Recipes
-
-
- Parameters
- ----------
- intermediate_results : bool, optional
- If True, save intermediate results of the Recipe
-
+ """Base class for all MEGARA Recipes
Attributes
----------
@@ -46,16 +39,53 @@ class MegaraBaseRecipe(BaseRecipe):
datamodel = MegaraDataModel()
def validate_input(self, recipe_input):
- """Method to customize recipe input validation.
+ """"Validate the input of the recipe"""
+
+ import numina.types.multitype
+ # print('ATTRS', recipe_input.attrs())
+ # print('STORED', recipe_input.stored())
+ # print('tag_names', recipe_input.tag_names())
+
+ # Find reference tags
+ ref_tags = {}
+ for key, val in recipe_input.attrs().items():
+ if key == 'obresult':
+ ref_tags = val.tags
+ break
+
+ # super(MegaraBaseRecipe, self).validate_input(recipe_input)
+ # check all the rest against reference tags
+ stored = recipe_input.stored()
+ attrs = recipe_input.attrs()
+ val_results = []
+ for key, val in attrs.items():
+ if val is None:
+ continue
+ # If we evaluate query_expr with ref_tags and the tags from the object
+ # The result must be true
+ req = stored[key]
+ rtype = req.type
+ # TODO: develop a method to select images that are valid
+ # for different filters or insmodes; perhaps a ANY keyword
+ tags = rtype.extract_tags(val)
+ # FIXME: this should be handled by the type, not with a special case
+ if isinstance(rtype, numina.types.multitype.MultiType):
+ # get query from first node
+ query_expr = rtype.node_type[0].query_expr
+ else:
+ query_expr = rtype.query_expr
- See Also
- --------
- numina.core.validator.validate
+ q2 = query_expr.fill_placeholders(**ref_tags)
+ self.logger.debug('type %s with tags %s, expr %s', rtype, tags, q2)
+ is_valid = q2.eval(**tags)
+ if not is_valid:
+ val_results.append((key, q2, tags, ref_tags))
+ msg = 'invalid {} with expression {} and tags {} and obs_tags {}'
+ for key, q2, tags, ref_tags in val_results:
+ self.logger.error(msg.format(key, q2, tags, ref_tags))
+ if val_results:
+ raise ValueError('Validation error', val_results)
- """
- self.logger.info('start validating input')
- super(MegaraBaseRecipe, self).validate_input(recipe_input)
- self.logger.info('end validating input')
def run_qc(self, recipe_input, recipe_result):
"""Run Quality Control checks."""
@@ -63,11 +93,13 @@ def run_qc(self, recipe_input, recipe_result):
return recipe_result
def types_getter(self):
- from megaradrp.types import MasterBias, MasterDark, MasterBPM, MasterSlitFlat
- imgtypes = [MasterBPM, MasterBias, MasterDark, MasterSlitFlat]
+ from megaradrp.ntypes import MasterBias, MasterDark, MasterBPM, MasterSlitFlat
+ from megaradrp.ntypes import DiffuseLightCorrection
+ imgtypes = [MasterBPM, MasterBias, MasterDark, MasterSlitFlat, DiffuseLightCorrection]
getters = [cor.get_corrector_bpm, cor.get_corrector_bias,
[cor.get_corrector_dark, cor.get_corrector_gain],
- cor.get_corrector_slit_flat
+ cor.get_corrector_slit_flat,
+ cor.get_corrector_diffuse_light,
]
return imgtypes, getters
diff --git a/megaradrp/core/tests/test_utils.py b/megaradrp/core/tests/test_utils.py
new file mode 100644
index 00000000..addeaec3
--- /dev/null
+++ b/megaradrp/core/tests/test_utils.py
@@ -0,0 +1,32 @@
+
+import numpy
+import pytest
+
+from ..utils import atleast_2d_last
+
+@pytest.mark.parametrize("arr, shape, ndim", [
+ ([1, 2, 3], (3, 1), 2),
+ ([1, 2, 3, 789], (4, 1), 2),
+ (4, (1, 1), 2),
+ ([[1,2,3], [4,5,6]], (2,3), 2),
+ (numpy.empty((3,3,3)), (3,3,3), 3)
+])
+def test_utils_atleast1(arr, shape, ndim):
+ # With one input, return de object
+ res = atleast_2d_last(arr)
+ print(res, res.shape)
+ assert res.shape == shape
+ assert res.ndim == ndim
+
+@pytest.mark.parametrize("arrs, shapes, ndims", [
+ ([[1, 2, 3], [1, 2, 3, 789], 4, [[1, 2, 3], [4, 5, 6]], numpy.empty((3, 3, 3))],
+ [(3, 1), (4, 1), (1, 1), (2, 3), (3, 3, 3)],
+ [2, 2, 2, 2, 3])
+])
+def test_utils_atleast2(arrs, shapes, ndims):
+ # With more than one, a list
+ res = atleast_2d_last(*arrs)
+ assert isinstance(res, list)
+ for el, shape, ndim in zip(res, shapes, ndims):
+ assert el.shape == shape
+ assert el.ndim == ndim
diff --git a/megaradrp/core/utils.py b/megaradrp/core/utils.py
new file mode 100644
index 00000000..de925ec3
--- /dev/null
+++ b/megaradrp/core/utils.py
@@ -0,0 +1,28 @@
+#
+# Copyright 2017-2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+import numpy
+
+
+def atleast_2d_last(*arys):
+ """Equivalent to atleast_2d, adding the newaxis at the end"""
+ res = []
+ for ary in arys:
+ ary = numpy.asanyarray(ary)
+ if len(ary.shape) == 0:
+ result = ary.reshape(1, 1)
+ elif len(ary.shape) == 1:
+ result = ary[:, numpy.newaxis]
+ else:
+ result = ary
+ res.append(result)
+ if len(res) == 1:
+ return res[0]
+ else:
+ return res
diff --git a/megaradrp/datamodel.py b/megaradrp/datamodel.py
index a57c7684..a9b48018 100644
--- a/megaradrp/datamodel.py
+++ b/megaradrp/datamodel.py
@@ -1,5 +1,5 @@
#
-# Copyright 2016-2019 Universidad Complutense de Madrid
+# Copyright 2016-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -13,15 +13,18 @@
import re
import pkgutil
-import enum
+import logging
import astropy.io.fits as fits
-import astropy.table
from six import StringIO
from numina.datamodel import DataModel, QueryAttribute, KeyDefinition
from numina.util.convert import convert_date
-import megaradrp.instrument as megins
+from megaradrp.datatype import MegaraDataType, DataOrigin
+import megaradrp.instrument.constants as cons
+
+
+_logger = logging.getLogger(__name__)
class MegaraDataModel(DataModel):
@@ -89,7 +92,7 @@ class MegaraDataModel(DataModel):
'imgid'
]
- PLATESCALE = megins.MEGARA_PLATESCALE
+ PLATESCALE = cons.GTC_FC_A_PLATESCALE.value
def __init__(self):
@@ -112,40 +115,9 @@ def __init__(self):
instrument_mappings
)
- def get_imgid(self, img):
- hdr = self.get_header(img)
- if 'UUID' in hdr:
- return 'uuid:{}'.format(hdr['UUID'])
- elif 'DATE-OBS' in hdr:
- return 'dateobs:{}'.format(hdr['DATE-OBS'])
- else:
- return super(MegaraDataModel, self).get_imgid(img)
-
def get_fiberconf(self, img):
"""Obtain FiberConf from image"""
- main_insmode = img[0].header.get('INSMODE', '')
- if 'FIBERS' in img:
- # We have a 'fibers' extension
- # Information os there
- hdr_fiber = img['FIBERS'].header
- return read_fibers_extension(hdr_fiber, insmode=main_insmode)
- else:
- return self.get_fiberconf_default(main_insmode)
-
- def get_fiberconf_default(self, insmode):
- """Obtain default FiberConf object"""
- if insmode == 'LCB':
- slit_file = 'lcb_default_header.txt'
- elif insmode == 'MOS':
- slit_file = 'mos_default_header.txt'
- else:
- # Read fiber info from headers
- raise ValueError('Invalid INSMODE {}'.format(insmode))
-
- data = pkgutil.get_data('megaradrp.instrument.configs', slit_file)
- default_hdr = StringIO(data.decode('utf8'))
- hdr_fiber = fits.header.Header.fromfile(default_hdr)
- return read_fibers_extension(hdr_fiber)
+ return get_fiberconf(img)
def gather_info_oresult(self, val):
return [self.gather_info_dframe(f) for f in val.images]
@@ -158,169 +130,45 @@ def fiber_scale_unit(self, img, unit=False):
else:
scale = self.PLATESCALE
if unit:
- return (scale, funit)
+ return scale, funit
else:
return scale
-class FibersConf(object):
- """Global configuration of the fibers"""
- def __init__(self):
- self.name = ""
- self.conf_id = 1
- self.nbundles = 0
- self.nfibers = 0
- self.bundles = {}
- self.fibers = {}
- self.funit = "mm"
-
- def sky_fibers(self, valid_only=False, ignored_bundles=None):
- result = []
- if ignored_bundles is None:
- ignored_bundles = []
-
- for bundle in self.bundles.values():
- if bundle.id in ignored_bundles:
- continue
- if bundle.target_type is TargetType.SKY:
- if valid_only:
- for fib in bundle.fibers.values():
- if fib.valid:
- result.append(fib.fibid)
- else:
- result.extend(bundle.fibers.keys())
- return result
-
- def conected_fibers(self, valid_only=False):
-
- if self.name == 'MOS':
- raise ValueError('not working for MOS')
-
- result = []
- for bundle in self.bundles.values():
- if bundle.target_type is not TargetType.SKY:
- if valid_only:
- for fib in bundle.fibers.values():
- if fib.valid:
- result.append(fib)
- else:
- result.extend(bundle.fibers.values())
- return result
-
- def inactive_fibers(self):
- result = []
- for fiber in self.fibers.values():
- if fiber.inactive:
- result.append(fiber.fibid)
- return result
-
- def active_fibers(self):
- result = []
- for fiber in self.fibers.values():
- if not fiber.inactive:
- result.append(fiber.fibid)
- return result
-
- def valid_fibers(self):
- result = []
- for fiber in self.fibers.values():
- if fiber.valid:
- result.append(fiber.fibid)
- return result
-
- def invalid_fibers(self):
- result = []
- for fiber in self.fibers.values():
- if not fiber.valid:
- result.append(fiber.fibid)
- return result
-
- def spectral_coverage(self):
- lowc = []
- upperc = []
- for fibid, r in self.fibers.items():
- if r.w1:
- lowc.append(r.w1)
- if r.w2:
- upperc.append(r.w2)
-
- mn = max(lowc)
- nn = min(lowc)
-
- mx = min(upperc)
- nx = max(upperc)
- return (mn, mx), (nn, nx)
-
- def bundles_to_table(self):
- """Convert bundles to a Table"""
- attrnames = ['id', 'x', 'y', 'pa', 'enabled',
- 'target_type', 'target_priority', 'target_name']
- cnames = ['bundle_id', 'x', 'y', 'pa', 'enabled',
- 'target_type', 'target_priority', 'target_name']
- obj_data = {}
- for a, c in zip(attrnames, cnames):
- obj_data[c] = [getattr(ob, a) for ob in self.bundles.values()]
- result = astropy.table.Table(obj_data, names=cnames)
- result['x'].unit = self.funit
- result['y'].unit = self.funit
- result['pa'].unit = 'deg'
- return result
-
- def fibers_to_table(self):
- """Convert fibers to a Table"""
- attrnames = ['fibid', 'name', 'x', 'y', 'inactive', 'valid',
- 'bundle_id']
- cnames = ['fibid', 'name', 'x', 'y', 'inactive', 'valid',
- 'bundle_id']
- obj_data = {}
-
- for a, c in zip(attrnames, cnames):
- obj_data[c] = [getattr(ob, a) for ob in self.fibers.values()]
- result = astropy.table.Table(obj_data, names=cnames)
- result['x'].unit = self.funit
- result['y'].unit = self.funit
- return result
-
-
-
-class TargetType(enum.Enum):
- """Possible targest in a fiber bundle"""
- SOURCE = 1
- UNKNOWN = 2
- UNASSIGNED = 3
- SKY = 4
- REFERENCE = 5
- # aliases for the other fields
- STAR = 5
- BLANK = 4
-
-
-class BundleConf(object):
- """Description of a bundle"""
- def __init__(self):
- self.id = 0
- self.target_type = TargetType.UNASSIGNED
- self.target_priority = 0
- self.target_name = 'unknown'
- self.x_fix = 0
- self.y_fix = 0
- self.pa_fix = 0
- self.x = 0
- self.y = 0
- self.pa = 0
- self.enabled = True
-
-
-class FiberConf(object):
- """Description of the fiber"""
- def __init__(self):
- self.fibid = 0
- self.name = 'unknown'
- self.bundle_id = None
- self.inactive = False
- self.valid = True
- self.x = 0.0
- self.y = 0.0
+def get_fiberconf(img):
+ """Obtain FiberConf from image"""
+
+ main_insmode = img[0].header.get('INSMODE', '')
+
+ if 'FIBERS' in img:
+ # We have a 'fibers' extension
+ # Information os there
+ hdr_fiber = img['FIBERS'].header
+ return read_fibers_extension(hdr_fiber, insmode=main_insmode)
+ else:
+ return get_fiberconf_default(main_insmode)
+
+
+def create_default_fiber_header(insmode):
+ """Obtain default FIBER header"""
+ if insmode == 'LCB':
+ slit_file = 'lcb_default_header.txt'
+ elif insmode == 'MOS':
+ slit_file = 'mos_default_header.txt'
+ else:
+ # Read fiber info from headers
+ raise ValueError('Invalid INSMODE {}'.format(insmode))
+
+ data = pkgutil.get_data('megaradrp.instrument.configs', slit_file)
+ default_hdr = StringIO(data.decode('utf8'))
+ hdr_fiber = fits.header.Header.fromfile(default_hdr)
+ return hdr_fiber
+
+
+def get_fiberconf_default(insmode):
+ """Obtain default FiberConf object"""
+ hdr_fiber = create_default_fiber_header(insmode)
+ return read_fibers_extension(hdr_fiber)
def read_fibers_extension(hdr, insmode='LCB'):
@@ -328,9 +176,9 @@ def read_fibers_extension(hdr, insmode='LCB'):
Parameters
==========
- hdr:
+ hdr :
FITS header
- insmode: str
+ insmode : str
default INSMODE
Returns
@@ -339,80 +187,155 @@ def read_fibers_extension(hdr, insmode='LCB'):
"""
- conf = FibersConf()
- defaults = {}
- defaults['LCB'] = (9, 623)
- defaults['MOS'] = (92, 644)
-
- if insmode not in ['LCB', 'MOS']:
- raise ValueError('insmode %s not in [LCB, MOS]' % insmode)
-
- conf.name = hdr.get('INSMODE', insmode)
- conf.conf_id = hdr.get('CONFID', 1)
- conf.nbundles = hdr.get('NBUNDLES', defaults[insmode][0])
- conf.nfibers = hdr.get('NFIBERS', defaults[insmode][1])
- conf.funit = funit = hdr.get("FUNIT", "arcsec")
- # Read bundles
-
- bun_ids = []
- fib_ids = []
- bundles = conf.bundles
- fibers = conf.fibers
-
- # loop over everything, count BUN%03d_P and FIB%03d_B
- pattern1 = re.compile(r"BUN(\d+)_P")
- pattern2 = re.compile(r"FIB(\d+)_B")
- for key in hdr:
- bun_re = pattern1.match(key)
- fib_re = pattern2.match(key)
- if bun_re:
- bun_idx = int(bun_re.group(1))
- bun_ids.append(bun_idx)
- elif fib_re:
- fib_idx = int(fib_re.group(1))
- fib_ids.append(fib_idx)
-
- for i in bun_ids:
- bb = BundleConf()
- bb.id = i
- bb.target_priority = hdr["BUN%03d_P" % i]
- bb.target_name = hdr["BUN%03d_I" % i]
- bb.target_type = TargetType[hdr["BUN%03d_T" % i]]
- bb.enabled = hdr.get("BUN%03d_E" % i, True)
- bb.x = hdr.get("BUN%03d_X" % i, 0.0)
- bb.y = hdr.get("BUN%03d_Y" % i, 0.0)
- bb.pa = hdr.get("BUN%03d_O" % i, 0.0)
- bb.fibers = {}
- bundles[i] = bb
-
- for fibid in fib_ids:
- ff = FiberConf()
- ff.fibid = fibid
-
- # Coordinates
- ff.d = hdr["FIB%03d_D" % fibid]
- ff.r = hdr["FIB%03d_R" % fibid]
- ff.o = 0 #hdr["FIB%03d_O" % fibid]
- # Active
- ff.inactive = not hdr["FIB%03d_A" % fibid]
-
- # Coordinates XY
- ff.x = hdr["FIB%03d_X" % fibid]
- ff.y = hdr["FIB%03d_Y" % fibid]
-
- ff.bundle_id = hdr["FIB%03d_B" % fibid]
- ff.name = hdr.get("FIB%03d_N" % fibid, 'unknown')
-
- ff.w1 = hdr.get("FIB%03dW1" % fibid, None)
- ff.w2 = hdr.get("FIB%03dW2" % fibid, None)
-
- # Validity
- if ff.inactive:
- ff.valid = False
+ import megaradrp.instrument.focalplane as fp
+ return fp.FocalPlaneConf.from_header(hdr)
+
+
+
+def describe_hdulist_megara(hdulist):
+ prim = hdulist[0].header
+ instrument = prim.get("INSTRUME", "unknown")
+ image_type = prim.get("IMAGETYP")
+ if image_type is None:
+ # try this also
+ image_type = prim.get("NUMTYPE")
+
+ # date_obs = convert_date(prim.get("DATE-OBS"))
+ date_obs = prim.get("DATE-OBS")
+ img_uuid = prim.get("UUID")
+ insconf = prim.get("INSCONF", 'undefined')
+
+ if image_type is None:
+ # inferr from header
+ datatype = megara_inferr_datetype_from_image(hdulist)
+ else:
+ datatype = MegaraDataType[image_type]
+
+ obs = {}
+ proc = {}
+ if datatype.value < MegaraDataType.IMAGE_PROCESSED.value:
+ origin = DataOrigin.OBSERVED
+ else:
+ origin = DataOrigin.PROCESSED
+
+ return {'instrument': instrument, 'datatype': datatype,
+ 'origin': origin, 'uuid': img_uuid,
+ 'insconf': insconf,
+ 'observation': obs,
+ 'observation_date': date_obs,
+ 'processing': proc
+ }
+
+
+def megara_inferr_datatype(obj):
+
+ if isinstance(obj, fits.HDUList):
+ return megara_inferr_datetype_from_image(obj)
+ elif isinstance(obj, dict):
+ return megara_inferr_datetype_from_dict(obj)
+ else:
+ raise TypeError("I don't know how to inferr datatype from {}".format(obj))
+
+
+def megara_inferr_datetype_from_dict(obj):
+ # this comes from JSON
+ dtype = obj['type_fqn']
+ if dtype in ["megaradrp.products.tracemap.TraceMap"]:
+ return MegaraDataType.TRACE_MAP
+ elif dtype in ["megaradrp.products.modelmap.ModelMap"]:
+ return MegaraDataType.MODEL_MAP
+ elif dtype in ["megaradrp.products.wavecalibration.WavelengthCalibration"]:
+ return MegaraDataType.WAVE_CALIB
+ else:
+ return MegaraDataType.UNKNOWN
+
+
+def megara_inferr_datetype_from_image(hdulist):
+ IMAGE_RAW_SHAPE = (4212, 4196)
+ IMAGE_PROC_SHAPE = (4112, 4096)
+ RSS_IFU_PROC_SHAPE = (623, 4096)
+ RSS_MOS_PROC_SHAPE = (644, 4096)
+ RSS_IFU_PROC_WL_SHAPE = (623, 4300)
+ RSS_MOS_PROC_WL_SHAPE = (644, 4300)
+ SPECTRUM_PROC_SHAPE = (4300,)
+ prim = hdulist[0].header
+
+ image_type = prim.get("IMAGETYP")
+ if image_type is None:
+ # try this also
+ image_type = prim.get("NUMTYPE")
+
+ if image_type is not None:
+ datatype = MegaraDataType[image_type]
+ return datatype
+
+ pshape = hdulist[0].shape
+ obsmode = prim.get("OBSMODE", "unknown")
+ if pshape == IMAGE_RAW_SHAPE:
+ datatype = MegaraDataType.IMAGE_RAW
+ elif pshape == IMAGE_PROC_SHAPE:
+ datatype = MegaraDataType.IMAGE_PROCESSED
+ elif pshape == RSS_IFU_PROC_SHAPE: # IFU
+ datatype = MegaraDataType.RSS_PROCESSED
+ elif pshape == RSS_MOS_PROC_SHAPE: # MOS
+ datatype = MegaraDataType.RSS_PROCESSED
+ elif pshape == RSS_IFU_PROC_WL_SHAPE: # IFU
+ datatype = MegaraDataType.RSS_WL_PROCESSED
+ elif pshape == RSS_MOS_PROC_WL_SHAPE: # MOS
+ datatype = MegaraDataType.RSS_WL_PROCESSED
+ elif pshape == SPECTRUM_PROC_SHAPE:
+ datatype = MegaraDataType.SPEC_PROCESSED
+ else:
+ datatype = MegaraDataType.UNKNOWN
+
+ if datatype == MegaraDataType.IMAGE_RAW:
+ if obsmode in ["MegaraSuccess", "MegaraFail"]:
+ sub_datatype = MegaraDataType.IMAGE_TEST
+ elif obsmode in ["MegaraBiasImage"]:
+ sub_datatype = MegaraDataType.IMAGE_BIAS
+ elif obsmode in ["MegaraDarkImage"]:
+ sub_datatype = MegaraDataType.IMAGE_DARK
+ elif obsmode in ["MegaraSlitFlat"]:
+ sub_datatype = MegaraDataType.IMAGE_SLITFLAT
+ elif obsmode in ["MegaraFiberFlatImage", "MegaraTraceMap", "MegaraModelMap"]:
+ sub_datatype = MegaraDataType.IMAGE_FLAT
+ elif obsmode in ["MegaraArcCalibration"]:
+ sub_datatype = MegaraDataType.IMAGE_COMP
+ elif obsmode in ["MegaraTwilightFlatImage"]:
+ sub_datatype = MegaraDataType.IMAGE_TWILIGHT
+ elif obsmode in ["MegaraLcbImage", "MegaraMosImage"]:
+ sub_datatype = MegaraDataType.IMAGE_TARGET
+ elif obsmode in ["MegaraMosStdStar", "MegaraExtinctionStar",
+ "MegaraLcbStdStar", "MegaraSensitivityStar"]:
+ sub_datatype = MegaraDataType.IMAGE_TARGET
+ elif obsmode in ["MegaraFocusTelescope",
+ "MegaraLcbAcquisition", "MegaraMosAcquisition"]:
+ sub_datatype = MegaraDataType.IMAGE_TARGET
+ elif obsmode in ["MegaraBadPixelMask", "MegaraFocusSpectrograph"]:
+ sub_datatype = MegaraDataType.IMAGE_RAW
else:
- ff.valid = hdr.get("FIB%03d_V" % fibid, True)
+ sub_datatype = MegaraDataType.UNKNOWN
- bundles[ff.bundle_id].fibers[ff.fibid] = ff
- fibers[ff.fibid] = ff
-
- return conf
+ return sub_datatype
+ elif datatype == MegaraDataType.SPEC_PROCESSED:
+ numrnam = prim.get("NUMRNAM", "unknown")
+ if numrnam in ['LCBStandardRecipe', 'MOSStandardRecipe']:
+ sub_datatype = MegaraDataType.MASTER_SENSITIVITY
+ else:
+ sub_datatype = datatype
+ return sub_datatype
+ return datatype
+
+
+def check_obj_megara(obj, astype=None, level=None):
+ import megaradrp.validators as val
+ if astype is None:
+ datatype = megara_inferr_datatype(obj)
+ _logger.debug('check object as it says it is ({})'.format(datatype))
+ thistype = datatype
+ else:
+ _logger.debug('check object as {}'.format(astype))
+ thistype = astype
+ checker = val.check_as_datatype(thistype)
+ res = checker(obj, level=level)
+ return res
diff --git a/megaradrp/datatype.py b/megaradrp/datatype.py
new file mode 100644
index 00000000..d17ff387
--- /dev/null
+++ b/megaradrp/datatype.py
@@ -0,0 +1,43 @@
+
+import enum
+
+
+class DataOrigin(enum.Enum):
+ UNKNOWN = 0
+ OBSERVED = 1
+ PROCESSED = 2
+ GENERATED = 3
+
+
+class MegaraDataType(enum.Enum):
+ UNKNOWN = 1
+ IMAGE_RAW = 100
+ IMAGE_BIAS = 102
+ IMAGE_DARK = 103
+ IMAGE_SLITFLAT = 104
+ IMAGE_FLAT = 105
+ IMAGE_COMP = 106
+ #
+ IMAGE_TWILIGHT = 107
+ IMAGE_TEST = 109
+ IMAGE_TARGET = 150
+ #
+ IMAGE_PROCESSED = 200
+ MASTER_BPM = 201
+ MASTER_BIAS = 202
+ MasterBias = 202 # Alias
+ MASTER_DARK = 203
+ MASTER_SLITFLAT = 204
+ DIFFUSE_LIGHT = 211
+ #
+ RSS_PROCESSED = 300
+ MASTER_FLAT = 305
+ MasterFiberFlat = 305 # Alias
+ MASTER_TWILIGHT = 306
+ RSS_WL_PROCESSED = 400
+ SPEC_PROCESSED = 500
+ MASTER_SENSITIVITY = 503
+ STRUCT_PROCESSED = 600
+ TRACE_MAP = 601
+ MODEL_MAP = 602
+ WAVE_CALIB = 603
\ No newline at end of file
diff --git a/megaradrp/drp.yaml b/megaradrp/drp.yaml
index 0d4b7ce7..b8611db7 100644
--- a/megaradrp/drp.yaml
+++ b/megaradrp/drp.yaml
@@ -13,110 +13,134 @@ modes:
description: A mode where the recipe successes, used for testing
key: MegaraSuccess
tagger: null
+ rawimage: IMAGE_TEST
- name: Fail
summary: A mode where the recipe fails
description: A mode where the recipe fails, used for testing
key: MegaraFail
tagger: null
+ rawimage: IMAGE_TEST
- name: Bias Image
summary: Recipe to process bias images
description: Recipe to process bias images
key: MegaraBiasImage
+ rawimage: IMAGE_BIAS
tagger: null
- name: Dark current Image
summary: Summary of Dark current Image
description: Lines and mode lines
key: MegaraDarkImage
tagger: null
+ rawimage: IMAGE_DARK
- name: Fiber Flat Image
summary: Summary of Fiber Flat Image
description: Lines and mode lines
key: MegaraFiberFlatImage
tagger: null
+ rawimage: IMAGE_FLAT
+ validator: megaradrp.validators.validate_flat
- name: Image with the fiber MOS
summary: Summary of Fiber MOS image
description: Lines and mode lines
key: MegaraMosImage
tagger: null
+ rawimage: IMAGE_TARGET
- name: Trace Map
summary: Summary of Fiber MOS image
description: Lines and mode lines
key: MegaraTraceMap
tagger: null
+ rawimage: IMAGE_FLAT
- name: Arc Calibration
summary: Summary of Arc Calibration
description: Process an Arc image
key: MegaraArcCalibration
tagger: null
+ rawimage: IMAGE_COMP
+ validator: megaradrp.validators.validate_arc
- name: Bad Pixel Mask
summary: Bad Pixel Mask
description: Bad Pixels
key: MegaraBadPixelMask
tagger: null
+ rawimage: IMAGE_TEST
- name: Slit Flat
summary: Slit Flat
description: Slit Flat
key: MegaraSlitFlat
tagger: null
+ rawimage: IMAGE_SLITFLAT
- name: ModelMap
summary: Computes a ModelMap
description: Computes a ModelMap from FlatImages
key: MegaraModelMap
tagger: null
+ rawimage: IMAGE_FLAT
- key: MegaraFocusSpectrograph
name: Focus Spectrograph
tagger: null
+ rawimage: IMAGE_TEST
validator: megaradrp.validators.validate_focus
- name: Twillight fiber flat
summary: Twillight fiber flat spectrum
description: Twillight fiber flat spectrum
key: MegaraTwilightFlatImage
tagger: null
+ rawimage: IMAGE_TWILIGHT
- name: Image with the LCB
summary: Image with the LCB
description: Image with the LCB
key: MegaraLcbImage
tagger: null
+ rawimage: IMAGE_TARGET
- name: Image with the MOS
summary: Image with the MOS
description: Image with the MOS
key: MegaraMosImage
tagger: null
+ rawimage: IMAGE_TARGET
- name: Extinction Star Recipe
summary: Extinction Star Recipe
description: Extinction Star Recipe
key: MegaraExtinctionStar
tagger: null
+ rawimage: IMAGE_TARGET
- name: LCB Standard Recipe
summary: LCB Standard Recipe
description: LCB Standard Recipe
key: MegaraLcbStdStar
tagger: null
+ rawimage: IMAGE_TARGET
- name: MOS Standard Recipe
summary: MOS Standard Recipe
description: MOS Standard Recipe
key: MegaraMosStdStar
tagger: null
+ rawimage: IMAGE_TARGET
- name: Sensivity Star Recipe
summary: Sensivity Star Recipe
description: Sensivity Star Recipe
key: MegaraSensitivityStar
tagger: null
+ rawimage: IMAGE_TARGET
- name: Telescope Focus
summary: Telescope Focus
description: Telescope Focus
key: MegaraFocusTelescope
tagger: null
+ rawimage: IMAGE_TEST
- name: Acquisition with the LCB IFU
summary: Acquisition with the LCB IFU
description: Acquisition with the LCB IFU
key: MegaraLcbAcquisition
tagger: null
+ rawimage: IMAGE_TEST
- name: Acquisition with the Fiber MOS
summary: Acquisition with the Fiber MOS
description: Acquisition with the Fiber MOS
key: MegaraMosAcquisition
tagger: null
+ rawimage: IMAGE_TEST
pipelines:
default:
version: 1
diff --git a/megaradrp/instrument/__init__.py b/megaradrp/instrument/__init__.py
index 110f47ca..7beb0eb9 100644
--- a/megaradrp/instrument/__init__.py
+++ b/megaradrp/instrument/__init__.py
@@ -8,11 +8,6 @@
#
-MEGARA_PLATESCALE = 1.2120 # arcsec / mm
-
-MEGARA_IAA = -163.854 # deg
-
-
# Values for recipe Trace
# Relative threshold for each VPH in LCB
vph_thr = {
diff --git a/megaradrp/instrument/components/fibermos.py b/megaradrp/instrument/components/fibermos.py
index 604dcfec..ae6741f6 100644
--- a/megaradrp/instrument/components/fibermos.py
+++ b/megaradrp/instrument/components/fibermos.py
@@ -12,7 +12,7 @@
import numpy
from numina.instrument.hwdevice import HWDevice
-from megaradrp.datamodel import TargetType
+from megaradrp.instrument.focalplane import TargetType
class RoboticPositioner(HWDevice):
diff --git a/megaradrp/instrument/components/tests/__init__.py b/megaradrp/instrument/components/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/megaradrp/instrument/components/tests/test_detector.py b/megaradrp/instrument/components/tests/test_detector.py
new file mode 100644
index 00000000..6f785cd8
--- /dev/null
+++ b/megaradrp/instrument/components/tests/test_detector.py
@@ -0,0 +1,26 @@
+
+from ..detector import MegaraDetector, ReadParams
+
+
+def create_detector():
+ DSHAPE = (2056 * 2, 2048 * 2)
+ PSCAN = 50
+ OSCAN = 50
+ qe = 1.0
+ dcurrent = 3.0 / 3600
+
+ readpars1 = ReadParams(gain=1.0, ron=2.0, bias=1000.0)
+ readpars2 = ReadParams(gain=1.0, ron=2.0, bias=1005.0)
+
+ detector = MegaraDetector(
+ 'Detector',
+ DSHAPE, OSCAN, PSCAN, qe=qe, dark=dcurrent,
+ readpars1=readpars1, readpars2=readpars2, bins='11'
+ )
+ return detector
+
+
+def test_detector_shape():
+ det = create_detector()
+ img = det.readout()
+ assert img.shape == (4212, 4196)
\ No newline at end of file
diff --git a/megaradrp/instrument/components/tests/test_shutter.py b/megaradrp/instrument/components/tests/test_shutter.py
new file mode 100644
index 00000000..497a528e
--- /dev/null
+++ b/megaradrp/instrument/components/tests/test_shutter.py
@@ -0,0 +1,28 @@
+import pytest
+
+from ..shutter import MegaraShutter
+
+@pytest.fixture
+def shutter_dev():
+ wheel = MegaraShutter()
+ return wheel
+
+
+def test_shutter1(shutter_dev):
+ curr = shutter_dev.current()
+ assert shutter_dev.pos() == 1
+ assert curr.name == 'OPEN'
+
+
+def test_shutter_open(shutter_dev):
+ shutter_dev.open()
+ curr = shutter_dev.current()
+ assert shutter_dev.pos() == 1
+ assert curr.name == 'OPEN'
+
+
+def test_shutter_stop(shutter_dev):
+ shutter_dev.close()
+ curr = shutter_dev.current()
+ assert shutter_dev.pos() == 0
+ assert curr.name == 'STOP'
diff --git a/megaradrp/instrument/components/tests/test_wheel.py b/megaradrp/instrument/components/tests/test_wheel.py
new file mode 100644
index 00000000..20df28de
--- /dev/null
+++ b/megaradrp/instrument/components/tests/test_wheel.py
@@ -0,0 +1,23 @@
+import pytest
+
+from ..wheel import VPHWheel
+
+
+class Vph(object):
+ def __init__(self, name):
+ self.name = name
+
+
+@pytest.fixture
+def wheel_dev():
+ wheel = VPHWheel(3)
+ for idx in range(3):
+ wheel.put_in_pos(Vph(idx), idx)
+ return wheel
+
+
+def test_wheel(wheel_dev):
+ curr = wheel_dev.current()
+ assert isinstance(curr, Vph)
+ assert curr.name == 0
+ assert wheel_dev.pos() == 0
\ No newline at end of file
diff --git a/megaradrp/instrument/configs/component-2e02e135-2325-47c9-9975-466b445b0b8b.json b/megaradrp/instrument/configs/component-2e02e135-2325-47c9-9975-466b445b0b8b.json
index 98e8b389..203ca5d7 100644
--- a/megaradrp/instrument/configs/component-2e02e135-2325-47c9-9975-466b445b0b8b.json
+++ b/megaradrp/instrument/configs/component-2e02e135-2325-47c9-9975-466b445b0b8b.json
@@ -10,10 +10,10 @@
"trim1": [[0,2056],[50,4146]],
"trim2": [[2156,4212],[50,4146]],
"bng": [1,1],
- "overscan1": [[0,2056],[4149,4196]],
+ "overscan1": [[0,2056],[4146,4196]],
"overscan2": [[2156,4212],[0,50]],
"prescan1": [[0,2056],[0,50]],
- "prescan2": [[2156,4212],[4145,4196]],
+ "prescan2": [[2156,4212],[4146,4196]],
"middle1": [[2056,2106],[50,4146]],
"middle2": [[2106,2156],[50,4146]],
"gain1": 1.73,
diff --git a/megaradrp/instrument/constants/__init__.py b/megaradrp/instrument/constants/__init__.py
new file mode 100644
index 00000000..37a926a6
--- /dev/null
+++ b/megaradrp/instrument/constants/__init__.py
@@ -0,0 +1,25 @@
+#
+# Copyright 2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+"""MEGARA constants with units"""
+
+import astropy.units as u
+
+
+# FIXME: duplicated in megaradrp.instrument
+# without units
+
+# Platescale in focal plane of Folded-Cass
+GTC_FC_A_PLATESCALE = 1.212 * u.arcsec / u.mm
+
+# Reference instrument aligment angle
+MEGARA_IAA = -163.854 * u.deg
+
+# mm from center to center, upwards
+SPAXEL_SCALE = 0.443 * u.mm
\ No newline at end of file
diff --git a/megaradrp/instrument/focalplane.py b/megaradrp/instrument/focalplane.py
new file mode 100644
index 00000000..c86ccb99
--- /dev/null
+++ b/megaradrp/instrument/focalplane.py
@@ -0,0 +1,276 @@
+#
+# Copyright 2016-2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+"""Focal plane description for MEGARA"""
+
+from __future__ import division
+
+import re
+import enum
+import warnings
+
+
+class FocalPlaneConf(object):
+ """Configuration of focal plane"""
+ def __init__(self, name=""):
+ self.name = name
+ self.conf_id = 1
+ self.nbundles = 0
+ self.nfibers = 0
+ self.bundles = {}
+ self.fibers = {}
+ self.funit = "mm"
+
+ @classmethod
+ def from_header(cls, hdr):
+ """Create a FocalPlaneConf object from map-like header"""
+ conf = FocalPlaneConf()
+ defaults = {}
+ defaults['LCB'] = (9, 623)
+ defaults['MOS'] = (92, 644)
+
+ conf.name = hdr.get('INSMODE')
+ insmode = conf.name
+ conf.conf_id = hdr.get('CONFID', 1)
+ conf.nbundles = hdr.get('NBUNDLES', defaults[insmode][0])
+ conf.nfibers = hdr.get('NFIBERS', defaults[insmode][1])
+ conf.funit = funit = hdr.get("FUNIT", "arcsec")
+ # Read bundles
+
+ bun_ids = []
+ fib_ids = []
+ bundles = conf.bundles
+ fibers = conf.fibers
+
+ # loop over everything, count BUN%03d_P and FIB%03d_B
+ pattern1 = re.compile(r"BUN(\d+)_P")
+ pattern2 = re.compile(r"FIB(\d+)_B")
+ for key in hdr:
+ bun_re = pattern1.match(key)
+ fib_re = pattern2.match(key)
+ if bun_re:
+ bun_idx = int(bun_re.group(1))
+ bun_ids.append(bun_idx)
+ elif fib_re:
+ fib_idx = int(fib_re.group(1))
+ fib_ids.append(fib_idx)
+
+ for i in bun_ids:
+ bb = BundleConf()
+ bb.id = i
+ bb.target_priority = hdr["BUN%03d_P" % i]
+ bb.target_name = hdr["BUN%03d_I" % i]
+ bb.target_type = TargetType[hdr["BUN%03d_T" % i]]
+ bb.enabled = hdr.get("BUN%03d_E" % i, True)
+ bb.x = hdr.get("BUN%03d_X" % i, 0.0)
+ bb.y = hdr.get("BUN%03d_Y" % i, 0.0)
+ bb.pa = hdr.get("BUN%03d_O" % i, 0.0)
+ bb.fibers = {}
+ bundles[i] = bb
+
+ for fibid in fib_ids:
+ ff = FiberConf()
+ ff.fibid = fibid
+
+ # Coordinates
+ ff.d = hdr["FIB%03d_D" % fibid]
+ ff.r = hdr["FIB%03d_R" % fibid]
+ ff.o = 0 # hdr["FIB%03d_O" % fibid]
+ # Active
+ ff.inactive = not hdr["FIB%03d_A" % fibid]
+
+ # Coordinates XY
+ ff.x = hdr["FIB%03d_X" % fibid]
+ ff.y = hdr["FIB%03d_Y" % fibid]
+
+ ff.bundle_id = hdr["FIB%03d_B" % fibid]
+ ff.name = hdr.get("FIB%03d_N" % fibid, 'unknown')
+
+ ff.w1 = hdr.get("FIB%03dW1" % fibid, None)
+ ff.w2 = hdr.get("FIB%03dW2" % fibid, None)
+
+ # Validity
+ if ff.inactive:
+ ff.valid = False
+ else:
+ ff.valid = hdr.get("FIB%03d_V" % fibid, True)
+
+ bundles[ff.bundle_id].fibers[ff.fibid] = ff
+ fibers[ff.fibid] = ff
+
+ return conf
+
+ @classmethod
+ def from_img(cls, img):
+ """Create a FocalPlaneConf object from a FITS image"""
+ return cls.from_header(img['FIBERS'].header)
+
+ def sky_fibers(self, valid_only=False, ignored_bundles=None):
+ result = []
+ if ignored_bundles is None:
+ ignored_bundles = []
+
+ for bundle in self.bundles.values():
+ if bundle.id in ignored_bundles:
+ continue
+ if bundle.target_type is TargetType.SKY:
+ if valid_only:
+ for fib in bundle.fibers.values():
+ if fib.valid:
+ result.append(fib.fibid)
+ else:
+ result.extend(bundle.fibers.keys())
+ return result
+
+ def connected_fibers(self, valid_only=False):
+
+ if self.name == 'MOS':
+ raise ValueError('not working for MOS')
+
+ result = []
+ for bundle in self.bundles.values():
+ if bundle.target_type is not TargetType.SKY:
+ if valid_only:
+ for fib in bundle.fibers.values():
+ if fib.valid:
+ result.append(fib)
+ else:
+ result.extend(bundle.fibers.values())
+ return result
+
+ def inactive_fibers(self):
+ result = []
+ for fiber in self.fibers.values():
+ if fiber.inactive:
+ result.append(fiber.fibid)
+ return result
+
+ def active_fibers(self):
+ result = []
+ for fiber in self.fibers.values():
+ if not fiber.inactive:
+ result.append(fiber.fibid)
+ return result
+
+ def valid_fibers(self):
+ result = []
+ for fiber in self.fibers.values():
+ if fiber.valid:
+ result.append(fiber.fibid)
+ return result
+
+ def invalid_fibers(self):
+ result = []
+ for fiber in self.fibers.values():
+ if not fiber.valid:
+ result.append(fiber.fibid)
+ return result
+
+ def spectral_coverage(self):
+ lowc = []
+ upperc = []
+ for fibid, r in self.fibers.items():
+ if r.w1:
+ lowc.append(r.w1)
+ if r.w2:
+ upperc.append(r.w2)
+
+ mn = max(lowc)
+ nn = min(lowc)
+
+ mx = min(upperc)
+ nx = max(upperc)
+ return (mn, mx), (nn, nx)
+
+ def bundles_to_table(self):
+ """Convert bundles to a Table"""
+
+ import astropy.table
+
+ attrnames = ['id', 'x', 'y', 'pa', 'enabled',
+ 'target_type', 'target_priority', 'target_name']
+ cnames = ['bundle_id', 'x', 'y', 'pa', 'enabled',
+ 'target_type', 'target_priority', 'target_name']
+ obj_data = {}
+ for a, c in zip(attrnames, cnames):
+ obj_data[c] = [getattr(ob, a) for ob in self.bundles.values()]
+ result = astropy.table.Table(obj_data, names=cnames)
+ result['x'].unit = self.funit
+ result['y'].unit = self.funit
+ result['pa'].unit = 'deg'
+ return result
+
+ def fibers_to_table(self):
+ """Convert fibers to a Table"""
+ import astropy.table
+ attrnames = ['fibid', 'name', 'x', 'y', 'inactive', 'valid',
+ 'bundle_id']
+ cnames = ['fibid', 'name', 'x', 'y', 'inactive', 'valid',
+ 'bundle_id']
+ obj_data = {}
+
+ for a, c in zip(attrnames, cnames):
+ obj_data[c] = [getattr(ob, a) for ob in self.fibers.values()]
+ result = astropy.table.Table(obj_data, names=cnames)
+ result['x'].unit = self.funit
+ result['y'].unit = self.funit
+ return result
+
+
+class FiberConfs(FocalPlaneConf):
+ """Configuration of focal plane
+
+ .. deprecated:: 0.10
+ `FiberConfs` is replaced by `FocalPlaneConf`. It will
+ be removed in 1.0
+
+ """
+ def __init__(self):
+ super(FiberConfs, self).__init__()
+ warnings.warn("The 'FiberConfs' class was renamed to 'FocalPlaneConf'", DeprecationWarning, stacklevel=2)
+
+
+class TargetType(enum.Enum):
+ """Possible targest in a fiber bundle"""
+ SOURCE = 1
+ UNKNOWN = 2
+ UNASSIGNED = 3
+ SKY = 4
+ REFERENCE = 5
+ # aliases for the other fields
+ STAR = 5
+ BLANK = 4
+
+
+class BundleConf(object):
+ """Description of a bundle"""
+ def __init__(self):
+ self.id = 0
+ self.target_type = TargetType.UNASSIGNED
+ self.target_priority = 0
+ self.target_name = 'unknown'
+ self.x_fix = 0
+ self.y_fix = 0
+ self.pa_fix = 0
+ self.x = 0
+ self.y = 0
+ self.pa = 0
+ self.enabled = True
+
+
+class FiberConf(object):
+ """Description of the fiber"""
+ def __init__(self):
+ self.fibid = 0
+ self.name = 'unknown'
+ self.bundle_id = None
+ self.inactive = False
+ self.valid = True
+ self.x = 0.0
+ self.y = 0.0
diff --git a/megaradrp/instrument/tests/__init__.py b/megaradrp/instrument/tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/megaradrp/instrument/tests/test_configs.py b/megaradrp/instrument/tests/test_configs.py
new file mode 100644
index 00000000..1ef287c2
--- /dev/null
+++ b/megaradrp/instrument/tests/test_configs.py
@@ -0,0 +1,37 @@
+import pytest
+import astropy.io.fits as fits
+import numina.instrument.generic
+import numina.core
+from megaradrp.loader import load_drp
+
+
+def create_simple_frame():
+ hdr = {'INSTRUME': 'MEGARA', 'INSCONF': 'ca3558e3-e50d-4bbc-86bd-da50a0998a48'}
+ hdu = fits.PrimaryHDU(data=[[1]])
+ for k, v in hdr.items():
+ hdu.header[k] = v
+ hdulist = fits.HDUList([hdu])
+ frame = numina.core.DataFrame(frame=hdulist)
+ return frame
+
+
+@pytest.mark.parametrize("conf, uuix", [
+ ['default', "ca3558e3-e50d-4bbc-86bd-da50a0998a48"],
+ ["ca3558e3-e50d-4bbc-86bd-da50a0998a48", "ca3558e3-e50d-4bbc-86bd-da50a0998a48"],
+ ["9a86b2b2-3f7d-48ec-8f4f-3780ec967c90", "9a86b2b2-3f7d-48ec-8f4f-3780ec967c90"],
+ ["66f2283e-3049-4d4b-8ef1-14d62fcb611d", "66f2283e-3049-4d4b-8ef1-14d62fcb611d"],
+ ["4fd05b24-2ed9-457b-b563-a3c618bb1d4c", "4fd05b24-2ed9-457b-b563-a3c618bb1d4c"]
+])
+def test_loader1(conf, uuix):
+ import numina.core
+ from numina.instrument.assembly import assembly_instrument
+
+ obs = numina.core.ObservationResult(instrument='MEGARA')
+ obs.frames.append(create_simple_frame())
+ drpm = load_drp()
+ obs.configuration = conf
+
+ key, date_obs, keyname = drpm.select_profile(obs)
+ ins = assembly_instrument(drpm.configurations, key, date_obs, by_key=keyname)
+ assert isinstance(ins, numina.instrument.generic.InstrumentGeneric)
+ assert str(ins.origin.uuid) == uuix
diff --git a/megaradrp/loader.py b/megaradrp/loader.py
index 04d33936..a7b4c0f1 100644
--- a/megaradrp/loader.py
+++ b/megaradrp/loader.py
@@ -10,8 +10,52 @@
"""Load MEGARA DRP"""
from numina.core import drp_load
+import numina.core.config as cfg
+
+
+class MegaraDrpLoader(object):
+ """Custom loader class
+
+ This class modifies the rawimage field of the observing modes
+ of MEGARA
+ """
+ @staticmethod
+ def mode_loader(mode_node):
+ import megaradrp.datamodel as DM
+ if 'rawimage' in mode_node:
+ rname = mode_node['rawimage']
+ mode_node['rawimage'] = DM.MegaraDataType[rname]
+ return mode_node
def load_drp():
"""Entry point to load MEGARA DRP."""
- return drp_load('megaradrp', 'drp.yaml')
+ return drp_load('megaradrp', 'drp.yaml', confclass=MegaraDrpLoader)
+
+
+def is_fits_megara(pathname):
+ "Check is any FITS"
+ import astropy.io.fits as fits
+ # FIXME: incomplete
+ if pathname.endswith('.fits') or pathname.endswith('.fits.gz'):
+ with fits.open(pathname) as hdulist:
+ prim = hdulist[0].header
+ instrument = prim.get("INSTRUME", "unknown")
+ if instrument == "MEGARA":
+ return True
+ else:
+ return False
+
+
+@cfg.describe.register('image/fits', is_fits_megara, priority=15)
+def describe_fits_megara(pathname):
+ import megaradrp.datamodel as DM
+ import astropy.io.fits as fits
+ with fits.open(pathname) as hdulist:
+ return DM.describe_hdulist_megara(hdulist)
+
+
+@cfg.check.register('MEGARA')
+def check_obj_megara(obj, astype=None, level=None):
+ import megaradrp.datamodel as DM
+ return DM.check_obj_megara(obj, astype=astype, level=level)
diff --git a/megaradrp/types.py b/megaradrp/ntypes.py
similarity index 65%
rename from megaradrp/types.py
rename to megaradrp/ntypes.py
index c8400aac..38d63f09 100644
--- a/megaradrp/types.py
+++ b/megaradrp/ntypes.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2017 Universidad Complutense de Madrid
+# Copyright 2011-2019 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -9,40 +9,70 @@
"""Products of the Megara Pipeline"""
+import logging
from numina.core import DataFrameType
from numina.types.product import DataProductMixin
from numina.types.datatype import DataType
from numina.types.array import ArrayType
from numina.types.linescatalog import LinesCatalog
+from numina.exceptions import ValidationError
-
+import megaradrp.validators as valid
+from megaradrp.datatype import MegaraDataType
from megaradrp.datamodel import MegaraDataModel, QueryAttribute
+_logger = logging.getLogger(__name__)
+
+
+def validate_fiber_ext(header_f):
+ _logger.debug('validate fiber extension')
+
+
class MegaraFrame(DataFrameType):
"""A processed frame"""
-
+ DATATYPE = MegaraDataType.IMAGE_RAW
tags_headers = {}
def __init__(self, *args, **kwds):
super(MegaraFrame, self).__init__(datamodel=MegaraDataModel)
+ def validate_hdulist(self, hdulist):
+ _logger.debug('validate MasterBias')
+ checker = valid.check_as_datatype(self.DATATYPE)
+ return checker(hdulist)
+
+ def extract_tags(self, obj):
+ """Extract tags from serialized file"""
+
+ objl = self.convert(obj)
+ ext = self.datamodel.extractor_map['fits']
+ tags = {}
+
+ if objl:
+ with objl.open() as hdulist:
+ for field in self.names_t:
+ tags[field] = ext.extract(field, hdulist)
+ return tags
+ else:
+ return {}
+
class ProcessedFrame(MegaraFrame):
"""A processed frame"""
-
+ DATATYPE = MegaraDataType.IMAGE_PROCESSED
tags_headers = {}
class ProcessedImage(ProcessedFrame):
"""A processed image"""
- pass
+ DATATYPE = MegaraDataType.IMAGE_PROCESSED
class ProcessedRSS(ProcessedFrame):
"""A processed RSS image"""
- pass
+ DATATYPE = MegaraDataType.RSS_PROCESSED
class ProcessedMultiRSS(ProcessedFrame):
@@ -52,6 +82,7 @@ class ProcessedMultiRSS(ProcessedFrame):
class ProcessedSpectrum(ProcessedFrame):
"""A 1d spectrum"""
+ DATATYPE = MegaraDataType.SPEC_PROCESSED
pass
@@ -77,28 +108,36 @@ def name(self):
class MasterBias(ProcessedImageProduct):
"""A Master Bias image"""
- pass
+ DATATYPE = MegaraDataType.MASTER_BIAS
class MasterTwilightFlat(ProcessedRSSProduct):
__tags__ = ['insmode', 'vph', 'confid']
+ DATATYPE = MegaraDataType.MASTER_TWILIGHT
class MasterDark(ProcessedImageProduct):
"""A Master Dark image"""
- pass
+ DATATYPE = MegaraDataType.MASTER_DARK
class MasterFiberFlat(ProcessedRSSProduct):
__tags__ = ['insmode', 'vph', 'confid']
+ DATATYPE = MegaraDataType.MASTER_FLAT
class MasterSlitFlat(ProcessedImageProduct):
__tags__ = ['insmode', 'vph']
+ DATATYPE = MegaraDataType.MASTER_SLITFLAT
class MasterBPM(ProcessedImageProduct):
"""Bad Pixel Mask product"""
+ DATATYPE = MegaraDataType.MASTER_BPM
+
+
+class DiffuseLightCorrection(ProcessedImageProduct):
+ """Image to correct from diffuse light"""
pass
@@ -107,8 +146,21 @@ class MasterSensitivity(ProcessedSpectrumProduct):
pass
+class SkyRSS(ProcessedRSS):
+ """A processed RSS image"""
+ pass
+
+
class ReferenceExtinctionTable(DataProductMixin, ArrayType):
"""Atmospheric Extinction."""
+
+ def validate(self, obj):
+ if obj is None:
+ # None is valid
+ pass
+ else:
+ super(ReferenceExtinctionTable, self).validate(obj)
+
def convert(self, obj):
# Support None value
if obj is None:
diff --git a/megaradrp/processing/aperture.py b/megaradrp/processing/aperture.py
index d5224f01..adecfc0d 100644
--- a/megaradrp/processing/aperture.py
+++ b/megaradrp/processing/aperture.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2018 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -204,12 +204,14 @@ def run(self, img):
fibers_ext = img['FIBERS']
fibers_ext_headers = fibers_ext.header
for aper in self.trace_repr.contents:
- key = "FIB%03d_V" % aper.fibid
- fibers_ext_headers[key] = aper.valid
- key = "FIB%03dS1" % aper.fibid
- fibers_ext_headers[key] = aper.start
- key = "FIB%03dS2" % aper.fibid
- fibers_ext_headers[key] = aper.stop
+ # set the value only if invalid
+ if not aper.valid:
+ key = "FIB{:03d}_V".format(aper.fibid)
+ fibers_ext_headers[key] = (aper.valid, "Fiber is invalid")
+ key = "FIB{:03d}S1".format(aper.fibid)
+ fibers_ext_headers[key] = (aper.start, "[pix] Start of trace")
+ key = "FIB{:03d}S2".format(aper.fibid)
+ fibers_ext_headers[key] = (aper.stop, "[pix] End of trace")
newimg = fits.HDUList([img[0], fibers_ext])
return newimg
diff --git a/megaradrp/processing/cube.py b/megaradrp/processing/cube.py
new file mode 100644
index 00000000..2065da55
--- /dev/null
+++ b/megaradrp/processing/cube.py
@@ -0,0 +1,566 @@
+#
+# Copyright 2017-2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+"""
+Interpolation method based on:
+'Hex-Splines: A Novel Spline Family for Hexagonal Lattices'
+van de Ville et al. IEEE Transactions on Image Processing 2004, 13, 6
+"""
+
+from __future__ import print_function
+
+import math
+
+import numpy as np
+from scipy import signal
+import astropy.units as u
+import astropy.wcs
+from numina.frame.utils import copy_img
+
+from megaradrp.instrument.focalplane import FocalPlaneConf
+# from megaradrp.datamodel import MegaraDataModel
+from megaradrp.core.utils import atleast_2d_last
+import megaradrp.processing.wcs as mwcs
+import megaradrp.processing.hexspline as hspline
+import megaradrp.instrument.constants as cons
+
+# Normalized hexagon geometry
+M_SQRT3 = math.sqrt(3)
+H_HEX = 0.5
+R_HEX = 1 / M_SQRT3
+A_HEX = 0.5 * H_HEX * R_HEX
+HA_HEX = 6 * A_HEX # detR0 == ha
+
+
+# Size scale of the spaxel grid in arcseconds
+HEX_SCALE = (cons.GTC_FC_A_PLATESCALE * cons.SPAXEL_SCALE).to(u.arcsec).value
+
+
+def calc_matrix(nrow, ncol, grid_type=2):
+ """
+
+ Parameters
+ ----------
+ nrow : int
+ ncol : int
+ grid_type : int
+
+ Returns
+ -------
+
+ """
+
+ R0 = np.array([[M_SQRT3 / 2,0], [-0.5,1]]) # Unit scale
+
+ if grid_type == 2:
+ f = 0
+ else:
+ f = 1
+
+ kcol = []
+ krow = []
+ for i in range(ncol):
+ s = (i + f * (i % 2)) // 2
+ for j in range(nrow):
+ kcol.append(i)
+ krow.append(j+s)
+
+ sl = np.array([kcol, krow]) # x y
+ r0l = np.dot(R0, sl)
+ # r0l = R0 @ sl
+ return r0l
+
+
+def calc_matrix_from_fiberconf(fibersconf):
+ """
+
+ Parameters
+ ----------
+ fibersconf : megaradrp.instrument.focalplance.FocalPlabeConf
+
+ Returns
+ -------
+
+ """
+
+ # TODO: This should be in FIBERCONFS...
+ spos1_x = []
+ spos1_y = []
+ for fiber in fibersconf.connected_fibers():
+ spos1_x.append(fiber.x)
+ spos1_y.append(fiber.y)
+ spos1_x = np.asarray(spos1_x)
+ spos1_y = np.asarray(spos1_y)
+
+ # FIXME: workaround
+ # FIBER in LOW LEFT corner is 614
+ REFID = 614
+ ref_fiber = fibersconf.fibers[REFID]
+ minx, miny = ref_fiber.x, ref_fiber.y
+ if ref_fiber.x < -6:
+ # arcsec
+ ascale = HEX_SCALE
+ # print('fiber coordinates in arcsec')
+ else:
+ # mm
+ ascale = cons.SPAXEL_SCALE.to(u.mm).value
+ # print('fiber coordinates in mm')
+ refx, refy = minx / ascale, miny / ascale
+ rpos1_x = (spos1_x - minx) / ascale
+ rpos1_y = (spos1_y - miny) / ascale
+ r0l_1 = np.array([rpos1_x, rpos1_y])
+ return r0l_1, (refx, refy)
+
+
+def calc_grid(scale=1.0):
+ """
+
+ Parameters
+ ----------
+ scale : float
+
+ Returns
+ -------
+
+ """
+
+ G_TYPE = 2 # Values for MEGARA
+ ncol = 27 #
+ nrow = 21 #
+ r0l = calc_matrix(nrow, ncol, grid_type=G_TYPE)
+ # r0l = R0 @ sl
+ spos_x = scale * (r0l[0] - r0l[0].max() / 2)
+ spos_y = scale * (r0l[1] - r0l[1].max() / 2)
+
+ return spos_x, spos_y
+
+
+def hexgrid_extremes(r0l, target_scale):
+ """
+
+ Parameters
+ ----------
+ r0l
+ target_scale : float
+
+ Returns
+ -------
+
+ """
+ # geometry
+ # ha_hex = 6 * a_hex # detR0 == ha
+ # compute extremes of hexgrid to rectangular grid
+ # with pixel size 'scale'
+ x0min, y0min = r0l.min(axis=1)
+ x0max, y0max = r0l.max(axis=1)
+ y1min = y0min - H_HEX
+ y1max = y0max + H_HEX
+ x1min = x0min - R_HEX
+ x1max = x0max + R_HEX
+
+ j1min = int(math.floor(x1min / target_scale + 0.5))
+ i1min = int(math.floor(y1min / target_scale + 0.5))
+ j1max = int(math.ceil(x1max / target_scale - 0.5))
+ i1max = int(math.ceil(y1max / target_scale - 0.5))
+ return (i1min, i1max), (j1min, j1max)
+
+
+def create_cube(r0l, zval, p=1, target_scale=1.0):
+ """
+
+ Parameters
+ ----------
+ r0l
+ zval
+ p : {1, 2}
+ target_scale : float, optional
+
+ Returns
+ -------
+
+ Raises
+ ------
+ ValueError
+ If `p` > 2
+
+ """
+ # geometry
+ # Interpolation method. Allowed values are:
+ # P = 1 NN
+ # P = 2 Linear
+ if p > 2:
+ raise ValueError('p > 2 not implemented')
+
+ R1 = target_scale * np.array([[1.0 ,0], [0,1]]) # Unit scale
+
+ # compute extremes of hexgrid to rectangular grid
+ # with pixel size 'scale'
+
+ (i1min, i1max), (j1min, j1max) = hexgrid_extremes(r0l, target_scale)
+
+ # Rectangular grid
+ mk1 = np.arange(i1min, i1max + 1)
+ mk2 = np.arange(j1min, j1max + 1)
+ crow = len(mk1)
+ ccol = len(mk2)
+ # Result image
+ # Add third last axis
+ zval2 = atleast_2d_last(zval)
+ # disp axis is last axis...
+ dk = np.zeros((crow, ccol, zval2.shape[-1]))
+ # print('result shape is ', dk.shape)
+ # r1k = R1 @ sk
+ sk = np.flipud(np.transpose([np.tile(mk1, len(mk2)), np.repeat(mk2, len(mk1))]).T) # x y
+ r1k = np.dot(R1, sk)
+
+ # Prefiltering
+ # For p = 1, prefilter coefficients with p = 1, coeff = 1
+ # For p = 2, prefilter coefficients with p = 2, coeff = 1
+ # No prefiltering in zval2 is required if p <= 2
+
+ rbs = hspline.rescaling_kernel(p, scale=target_scale)
+
+ # Loop to compute integrals...
+ for s, r in zip(sk.T, r1k.T):
+ allpos = -(r0l - r[:, np.newaxis])
+ we = np.abs((rbs.ev(allpos[1], allpos[0])))
+ dk[s[1] - i1min, s[0] - j1min] = np.sum(we[:, np.newaxis] * zval2, axis=0)
+
+ # Postfiltering
+ # For p = 1, final image in NN, postfilter coefficients with n = 1
+ # For p = 2, final image is linear, postfilter coefficients with n = 3
+ #
+ if p == 1:
+ # Coefficients post filtering to n = 2 * p - 1 == 1
+ cpk = dk
+ # Nearest-neighbor samples equal to coefficients
+ img = cpk
+ elif p == 2:
+ # Coefficients post filtering to n = 2 * p - 1 == 3
+ cpk = np.zeros_like(dk)
+ # last axis
+ for k in range(dk.shape[-1]):
+ cpk[..., k] = signal.cspline2d(dk[..., k])
+ # Linear samples equal to coefficients
+ img = cpk
+ else:
+ raise ValueError('p > 2 not implemented')
+
+ return img
+
+
+def create_cube_from_array(rss_data, fiberconf, p=1, target_scale_arcsec=1.0, conserve_flux=True):
+ """
+
+ Parameters
+ ----------
+ rss_data
+ fiberconf : megaradrp.instrument.focalplance.FocalPlaneConf
+ p : {1, 2}
+ target_scale_arcsec : float
+ conserve_flux : bool
+
+ Returns
+ -------
+
+ """
+
+ target_scale = target_scale_arcsec / HEX_SCALE
+ conected = fiberconf.connected_fibers()
+ rows = [conf.fibid - 1 for conf in conected]
+
+ rss_data = atleast_2d_last(rss_data)
+
+ region = rss_data[rows, :]
+
+ r0l, (refx, refy) = calc_matrix_from_fiberconf(fiberconf)
+ cube_data = create_cube(r0l, region[:, :], p, target_scale)
+ # scale with areas
+ if conserve_flux:
+ cube_data *= (target_scale ** 2 / HA_HEX)
+ result = np.moveaxis(cube_data, 2, 0)
+ result.astype('float32')
+ return result
+
+
+def create_cube_from_rss(rss, p=1, target_scale_arcsec=1.0, conserve_flux=True):
+ """
+
+ Parameters
+ ----------
+ rss
+ p : {1, 2}
+ target_scale_arcsec : float, optional
+ conserve_flux : bool, optional
+
+ Returns
+ -------
+
+ """
+
+ target_scale = target_scale_arcsec / HEX_SCALE
+ # print('target scale is', target_scale)
+
+ rss_data = rss[0].data
+ # Operate on non-SKY fibers
+
+ fiberconf = FocalPlaneConf.from_img(rss)
+ conected = fiberconf.connected_fibers()
+ rows = [conf.fibid - 1 for conf in conected]
+ #
+ region = rss_data[rows, :]
+
+ # FIXME: workaround
+ # Get FUNIT keyword
+ r0l, (refx, refy) = calc_matrix_from_fiberconf(fiberconf)
+
+ (i1min, i1max), (j1min, j1max) = hexgrid_extremes(r0l, target_scale)
+ cube_data = create_cube(r0l, region[:, :], p, target_scale)
+
+ if conserve_flux:
+ # scale with areas
+ cube_data *= (target_scale ** 2 / HA_HEX)
+
+ cube = copy_img(rss)
+ # Move axis to put WL first
+ # so that is last in FITS
+ # plt.imshow(cube_data[:, :, 0], origin='lower', interpolation='bicubic')
+ # plt.show()
+
+ cube[0].data = np.moveaxis(cube_data, 2, 0)
+ cube[0].data.astype('float32')
+
+ # Merge headers
+ merge_wcs(rss['FIBERS'].header, rss[0].header, out=cube[0].header)
+ # Update values of WCS
+ # CRPIX1, CRPIX2
+ # CDELT1, CDELT2
+ # minx, miny
+ # After shifting the array
+ # refpixel is -i1min, -j1min
+ crpix_x = -refx / target_scale - j1min
+ crpix_y = -refy / target_scale - i1min
+ # Map the center of original field
+ #
+ #
+ cube[0].header['CRPIX1'] = crpix_x
+ cube[0].header['CRPIX2'] = crpix_y
+ cube[0].header['CDELT1'] = -target_scale_arcsec / (3600.0)
+ cube[0].header['CDELT2'] = target_scale_arcsec / (3600.0)
+ # 2D from FIBERS
+ # WL from PRIMARY
+ # done
+ return cube
+
+
+def recompute_wcs(hdr):
+ """Recompute the WCS rotations from IPA """
+ ipa = hdr['IPA']
+ pa = mwcs.compute_pa_from_ipa(ipa)
+ print('IPA angle is:', ipa, 'PA angle is', math.fmod(pa, 360))
+ x = hdr['PC1_1']
+ y = hdr['PC1_2']
+ print('PA from header is:', np.rad2deg(math.atan2(y, x)))
+ return mwcs.update_wcs_from_ipa(hdr, pa)
+
+
+def merge_wcs(hdr_sky, hdr_spec, out=None):
+ """Merge sky WCS with spectral WCS"""
+ if out is None:
+ hdr = hdr_spec.copy()
+ else:
+ hdr = out
+
+ allw = astropy.wcs.find_all_wcs(hdr_spec)
+ for w in allw:
+ ss = w.wcs.alt
+ merge_wcs_alt(hdr_sky, hdr_spec, hdr, spec_suffix=ss)
+
+ return hdr
+
+
+def merge_wcs_alt(hdr_sky, hdr_spec, out, spec_suffix=''):
+ """Merge sky WCS with spectral WCS"""
+
+ hdr = out
+ s = spec_suffix
+ sf = s
+ # Extend header for third axis
+ c_crpix = 'Pixel coordinate of reference point'
+ c_cunit = 'Units of coordinate increment and value'
+ hdr.set('CUNIT1{}'.format(sf), comment=c_cunit, after='CDELT1{}'.format(sf))
+ hdr.set('CUNIT2{}'.format(sf), comment=c_cunit, after='CUNIT1{}'.format(sf))
+ hdr.set('CUNIT3{}'.format(sf), value='', comment=c_cunit, after='CUNIT2{}'.format(sf))
+ hdr.set('CRPIX2{}'.format(sf), value=1, comment=c_crpix, after='CRPIX1{}'.format(sf))
+ hdr.set('CRPIX3{}'.format(sf), value=1, comment=c_crpix, after='CRPIX2{}'.format(sf))
+ hdr.set('CDELT3{}'.format(sf), after='CDELT2{}'.format(sf))
+ hdr.set('CTYPE3{}'.format(sf), after='CTYPE2{}'.format(sf))
+ hdr.set('CRVAL3{}'.format(sf), after='CRVAL2{}'.format(sf))
+ c_pc = 'Coordinate transformation matrix element'
+ hdr.set('PC1_1{}'.format(sf), value=1.0, comment=c_pc, after='CRVAL3{}'.format(sf))
+ hdr.set('PC1_2{}'.format(sf), value=0.0, comment=c_pc, after='PC1_1{}'.format(sf))
+ hdr.set('PC2_1{}'.format(sf), value=0.0, comment=c_pc, after='PC1_2{}'.format(sf))
+ hdr.set('PC2_2{}'.format(sf), value=1.0, comment=c_pc, after='PC2_1{}'.format(sf))
+ hdr.set('PC3_3{}'.format(sf), value=1.0, comment=c_pc, after='PC2_2{}'.format(sf))
+
+ # Mapping, which keyword comes from each header
+ mappings = [('CRPIX3', 'CRPIX1', s, 0),
+ ('CDELT3', 'CDELT1', s, 0),
+ ('CRVAL3', 'CRVAL1', s, 0),
+ ('CTYPE3', 'CTYPE1', s, 0),
+ ('CRPIX1', 'CRPIX1', '', 1),
+ ('CDELT1', 'CDELT1', '', 1),
+ ('CRVAL1', 'CRVAL1', '', 1),
+ ('CTYPE1', 'CTYPE1', '', 1),
+ ('CUNIT1', 'CUNIT1', '', 1),
+ ('PC1_1', 'PC1_1', '', 1),
+ ('PC1_2', 'PC1_2', '', 1),
+ ('CRPIX2', 'CRPIX2', '', 1),
+ ('CDELT2', 'CDELT2', '', 1),
+ ('CRVAL2', 'CRVAL2', '', 1),
+ ('CTYPE2', 'CTYPE2', '', 1),
+ ('CUNIT2', 'CUNIT2', '', 1),
+ ('PC2_1', 'PC2_1', '', 1),
+ ('PC2_2', 'PC2_2', '', 1),
+ ('LONPOLE', 'LONPOLE', '', 1),
+ ('RADESYS', 'READESYS', '', 1),
+ ('specsys', 'SPECSYS', s, 0),
+ ('ssysobs', 'SSYSOBS', s, 0),
+ ('velosys', 'VELOSYS', s, 0)
+ ]
+
+ hdr_in = {}
+ hdr_in[0] = hdr_spec
+ hdr_in[1] = hdr_sky
+
+ for dest, orig, key, idx in mappings:
+ hdr_orig = hdr_in[idx]
+ korig = orig + key
+ kdest = dest + sf
+ try:
+ hdr[kdest] = hdr_orig[korig], hdr_orig.comments[korig]
+ except KeyError:
+ # Ignoring errors. Copy only if keyword exists
+ pass
+
+ return hdr
+
+
+def _simulate(seeing_fwhm=1.0, hex_scale=HEX_SCALE):
+ # simulation tools
+ from numina.instrument.simulation.atmosphere import generate_gaussian_profile
+ from megaradrp.simulation.actions import simulate_point_like_profile
+
+ FIBRAD_ANG = R_HEX * hex_scale
+
+ fibrad = FIBRAD_ANG # arcsec
+ seeing_profile = generate_gaussian_profile(seeing_fwhm)
+ psf = None
+ fraction_of_flux = simulate_point_like_profile(seeing_profile, psf, fibrad)
+
+ spos_x, spos_y = calc_grid(scale=hex_scale)
+
+ offpos0 = spos_x - 1.5
+ offpos1 = spos_y + 1.6
+ f_o_f = fraction_of_flux(offpos0, offpos1)
+ b = 0.2
+ zval = b + f_o_f
+ zval = np.tile(zval[:, None], (1, 1000))
+ zval = np.random.normal(zval, 0.01)
+ return zval
+
+
+def _demo():
+ import matplotlib.pyplot as plt
+
+ seeing_fwhm = 1.1 # arcsec
+ print('simulation')
+ zval = _simulate(seeing_fwhm)
+ print('done')
+
+ _visualization(zval, scale=HEX_SCALE)
+
+ print('zval shape is', zval.shape)
+ G_TYPE = 2
+ ncol = 27
+ nrow = 21
+ r0l = calc_matrix(nrow, ncol, grid_type=G_TYPE)
+
+ result = create_cube(r0l, zval, target_scale=0.5)
+ print('result shape is', result.shape)
+ plt.imshow(result[:,:,0], origin='lower', interpolation='bicubic')
+ plt.show()
+
+
+def _visualization(zval, scale=1.0):
+
+ import matplotlib.pyplot as plt
+ import megaradrp.visualization as vi
+
+ spos_x, spos_y = calc_grid(scale=scale)
+ plt.subplots_adjust(hspace=0.5)
+ plt.subplot(111)
+ ax = plt.gca()
+ ll = 6.1
+ plt.xlim([-ll, ll])
+ plt.ylim([-ll, ll])
+ col = vi.hexplot(ax, spos_x, spos_y, zval, scale=scale, cmap=plt.cm.YlOrRd_r)
+ # plt.title("Fiber map")
+ # cb = plt.colorbar(col)
+ # cb.set_label('counts')
+ plt.show()
+
+
+def main(args=None):
+ import argparse
+ import astropy.io.fits as fits
+
+ # parse command-line options
+ parser = argparse.ArgumentParser(prog='convert_rss_cube')
+ # positional parameters
+
+ methods = {'nn': 1, 'linear': 2}
+
+ parser.add_argument("rss",
+ help="RSS file with fiber traces",
+ type=argparse.FileType('rb'))
+ parser.add_argument('-p', '--pixel-size', type=float, default=0.3,
+ metavar='PIXEL_SIZE',
+ help="Pixel size in arc seconds")
+ parser.add_argument('-o', '--outfile', default='cube.fits',
+ help="Name of the output cube file")
+ parser.add_argument('-d', '--disable-scaling', action='store_true',
+ help="Disable flux conservation")
+ parser.add_argument('-m', '--method', action='store', choices=['nn', 'linear'],
+ default='nn', help="Method of interpolation")
+ parser.add_argument('--wcs-pa-from-header', action='store_true',
+ help="Use PA angle from header", dest='pa_from_header')
+
+ args = parser.parse_args(args=args)
+
+ target_scale = args.pixel_size # Arcsec
+ p = methods[args.method]
+ print('interpolation method is "{}"'.format(args.method))
+ print('target scale is', target_scale, 'arcsec')
+ conserve_flux = not args.disable_scaling
+
+ with fits.open(args.rss) as rss:
+ if not args.pa_from_header:
+ # Doing it here so the change is propagated to
+ # all alternative coordinates
+ print('recompute WCS from IPA')
+ rss['FIBERS'].header = recompute_wcs(rss['FIBERS'].header)
+ cube = create_cube_from_rss(rss, p, target_scale, conserve_flux=conserve_flux)
+
+ cube.writeto(args.outfile, overwrite=True)
+
+
+if __name__ == '__main__':
+
+ main()
diff --git a/megaradrp/processing/diffuselight.py b/megaradrp/processing/diffuselight.py
new file mode 100644
index 00000000..421caf99
--- /dev/null
+++ b/megaradrp/processing/diffuselight.py
@@ -0,0 +1,53 @@
+#
+# Copyright 2019 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+import logging
+import datetime
+
+from astropy.io import fits
+import numpy
+from numina.processing import Corrector
+
+
+_logger = logging.getLogger(__name__)
+
+
+class DiffuseLightCorrector(Corrector):
+ """A Node that corrects a frame from diffuse light"""
+
+ def __init__(self, diffuse, datamodel=None, calibid='calibid-unknown',
+ dtype='float32'):
+
+ super(DiffuseLightCorrector, self).__init__(
+ datamodel=datamodel,
+ calibid=calibid,
+ dtype=dtype)
+
+ if isinstance(diffuse, fits.HDUList):
+ self.corr = diffuse[0].data
+ elif isinstance(diffuse, fits.ImageHDU):
+ self.corr = diffuse.data
+ else:
+ self.corr = numpy.asarray(diffuse)
+
+ def header_update(self, hdr, imgid):
+ hdr['NUM-DFL'] = self.calibid
+ hdr['history'] = 'Diffuse light correction {}'.format(imgid)
+ hdr['history'] = 'Diffuse light correction time {}'.format(datetime.datetime.utcnow().isoformat())
+
+ def run(self, img):
+ imgid = self.get_imgid(img)
+ _logger.debug('correct diffuse light in image %s', imgid)
+
+ img['primary'].data -= self.corr
+ hdr = img['primary'].header
+
+ self.header_update(hdr, imgid)
+
+ return img
diff --git a/megaradrp/processing/extractobj.py b/megaradrp/processing/extractobj.py
index 2a966148..ec79887c 100644
--- a/megaradrp/processing/extractobj.py
+++ b/megaradrp/processing/extractobj.py
@@ -1,5 +1,5 @@
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -11,7 +11,6 @@
import math
import uuid
-import logging
import numpy
import astropy.wcs
@@ -19,8 +18,10 @@
import astropy.units as u
from scipy.spatial import KDTree
from scipy.ndimage.filters import gaussian_filter
-from numina.frame.utils import copy_img
+from numina.array.wavecalib.crosscorrelation import periodic_corr1d
+#import megaradrp.datamodel as dm
+import megaradrp.instrument.focalplane as fp
from megaradrp.processing.fluxcalib import update_flux_limits
@@ -63,14 +64,14 @@ def extract_star(rssimage, position, npoints, fiberconf, logger=None):
logger.info('extracting star')
- # fiberconf = datamodel.get_fiberconf(rssimage)
+ # fiberconf = dm.get_fiberconf(rssimage)
logger.debug("Configuration UUID is %s", fiberconf.conf_id)
rssdata = rssimage[0].data
pdata = rssimage['wlmap'].data
points = [position]
- fibers = fiberconf.conected_fibers(valid_only=True)
+ fibers = fiberconf.connected_fibers(valid_only=True)
grid_coords = []
for fiber in fibers:
grid_coords.append((fiber.x, fiber.y))
@@ -159,7 +160,7 @@ def compute_centroid(rssdata, fiberconf, c1, c2, point, logger=None):
logger.debug("LCB configuration is %s", fiberconf.conf_id)
- fibers = fiberconf.conected_fibers(valid_only=True)
+ fibers = fiberconf.connected_fibers(valid_only=True)
grid_coords = []
for fiber in fibers:
grid_coords.append((fiber.x, fiber.y))
@@ -205,10 +206,10 @@ def compute_centroid(rssdata, fiberconf, c1, c2, point, logger=None):
return centroid
-def compute_dar(img, datamodel, logger=None, debug_plot=False):
+def compute_dar(img, logger=None, debug_plot=False):
"""Compute Diferencial Atmospheric Refraction"""
- fiberconf = datamodel.get_fiberconf(img)
+ fp_conf = fp.FocalPlaneConf.from_img(img)
wlcalib = astropy.wcs.WCS(img[0].header)
rssdata = img[0].data
@@ -217,7 +218,7 @@ def compute_dar(img, datamodel, logger=None, debug_plot=False):
colids = []
x = []
y = []
- for fiber in fiberconf.fibers.values():
+ for fiber in fp_conf.fibers.values():
colids.append(fiber.fibid - 1)
x.append(fiber.x)
y.append(fiber.y)
@@ -236,7 +237,7 @@ def compute_dar(img, datamodel, logger=None, debug_plot=False):
c2 = c + delt // 2
z = rssdata[colids, c1:c2].mean(axis=1)
- centroid = compute_centroid(rssdata, fiberconf, c1, c2, point, logger=logger)
+ centroid = compute_centroid(rssdata, fp_conf, c1, c2, point, logger=logger)
cols.append(c)
xdar.append(centroid[0])
ydar.append(centroid[1])
@@ -253,7 +254,7 @@ def compute_dar(img, datamodel, logger=None, debug_plot=False):
c1 = c - delt // 2
c2 = c + delt // 2
z = rssdata[colids, c1:c2].mean(axis=1)
- centroid = compute_centroid(rssdata, fiberconf, c1, c2, point)
+ centroid = compute_centroid(rssdata, fp_conf, c1, c2, point)
cols.append(c)
xdar.append(centroid[0])
ydar.append(centroid[1])
@@ -300,6 +301,74 @@ def compute_dar(img, datamodel, logger=None, debug_plot=False):
return world[:, 0], xdar, ydar
+def mix_values(wcsl, spectrum, star_interp):
+
+ r1 = numpy.arange(spectrum.shape[0])
+ r2 = r1 * 0.0
+ lm = numpy.array([r1, r2])
+ # Values are 0-based
+ wavelen_ = wcsl.all_pix2world(lm.T, 0)
+ if wcsl.wcs.cunit[0] == u.dimensionless_unscaled:
+ # CUNIT is empty, assume Angstroms
+ wavelen = wavelen_[:, 0] * u.AA
+ else:
+ wavelen = wavelen_[:, 0] * wcsl.wcs.cunit[0]
+
+ wavelen_aa = wavelen.to(u.AA)
+
+ response_0 = spectrum
+ mag_ref = star_interp(wavelen_aa) * u.ABmag
+ response_1 = mag_ref.to(u.Jy).value
+
+ return wavelen_aa, response_0, response_1
+
+
+def compute_broadening(flux_low, flux_high, sigmalist,
+ remove_mean=False, frac_cosbell=None, zero_padding=None,
+ fminmax=None, naround_zero=None, nfit_peak=None):
+
+ # normalize each spectrum dividing by its median
+ flux_low /= numpy.median(flux_low)
+ flux_high /= numpy.median(flux_high)
+
+ offsets = []
+ fpeaks = []
+ sigmalist = numpy.asarray(sigmalist)
+ for sigma in sigmalist:
+ # broaden reference spectrum
+ flux_ref_broad = gaussian_filter(flux_high, sigma)
+ # plot the two spectra
+
+ # periodic correlation between the two spectra
+ offset, fpeak = periodic_corr1d(
+ flux_ref_broad, flux_low,
+ remove_mean=remove_mean,
+ frac_cosbell=frac_cosbell,
+ zero_padding=zero_padding,
+ fminmax=fminmax,
+ naround_zero=naround_zero,
+ nfit_peak=nfit_peak,
+ norm_spectra=True,
+ )
+ offsets.append(offset)
+ fpeaks.append(fpeak)
+
+ fpeaks = numpy.asarray(fpeaks)
+ offsets = numpy.asarray(offsets)
+
+ # import matplotlib.pyplot as plt
+ # #
+ # plt.plot(sigmalist, offsets, color='r')
+ # ax2 = plt.gca().twinx()
+ # ax2.plot(sigmalist, fpeaks, color='b')
+ # plt.show()
+ #
+ offset_broad = offsets[numpy.argmax(fpeaks)]
+ sigma_broad = sigmalist[numpy.argmax(fpeaks)]
+
+ return offset_broad, sigma_broad
+
+
def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
wl_coverage1, wl_coverage2, sigma=20.0):
@@ -362,7 +431,10 @@ def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
flux_valid = numpy.zeros_like(valid, dtype='bool')
flux_valid[pixf1:pixf2] = True
- r0_ens = gaussian_filter(r0, sigma=sigma)
+ if sigma > 0:
+ r0_ens = gaussian_filter(r0, sigma=sigma)
+ else:
+ r0_ens = r0
ratio2 = r0_ens / r1
s_response = ratio2 * (r0max / r1max)
@@ -370,6 +442,7 @@ def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
# FIXME: add history
sens = fits.PrimaryHDU(s_response, header=final[0].header)
# delete second axis keywords
+ # FIXME: delete axis with wcslib
for key in ['CRPIX2', 'CRVAL2', 'CDELT2', 'CTYPE2']:
if key in sens.header:
del sens.header[key]
@@ -382,46 +455,3 @@ def generate_sensitivity(final, spectrum, star_interp, extinc_interp,
return sens
-def subtract_sky(img, datamodel, ignored_sky_bundles=None, logger=None):
- # Sky subtraction
-
- if logger is None:
- logger = logging.getLogger(__name__)
-
- logger.info('obtain fiber information')
- sky_img = copy_img(img)
- final_img = copy_img(img)
- fiberconf = datamodel.get_fiberconf(sky_img)
- # Sky fibers
- skyfibs = fiberconf.sky_fibers(valid_only=True,
- ignored_bundles=ignored_sky_bundles)
- logger.debug('sky fibers are: %s', skyfibs)
- # Create empty sky_data
- target_data = img[0].data
-
- target_map = img['WLMAP'].data
- sky_data = numpy.zeros_like(img[0].data)
- sky_map = numpy.zeros_like(img['WLMAP'].data)
- sky_img[0].data = sky_data
-
- for fibid in skyfibs:
- rowid = fibid - 1
- sky_data[rowid] = target_data[rowid]
- sky_map[rowid] = target_map[rowid]
- # Sum
- coldata = sky_data.sum(axis=0)
- colsum = sky_map.sum(axis=0)
-
- # Divide only where map is > 0
- mask = colsum > 0
- avg_sky = numpy.zeros_like(coldata)
- avg_sky[mask] = coldata[mask] / colsum[mask]
-
- # This should be done only on valid fibers
- # The information of which fiber is valid
- # is in the tracemap, not in the header
- for fibid in fiberconf.valid_fibers():
- rowid = fibid - 1
- final_img[0].data[rowid, mask] = img[0].data[rowid, mask] - avg_sky[mask]
-
- return final_img, img, sky_img
diff --git a/megaradrp/processing/fibermatch.py b/megaradrp/processing/fibermatch.py
index 0e83252d..3e126943 100644
--- a/megaradrp/processing/fibermatch.py
+++ b/megaradrp/processing/fibermatch.py
@@ -70,6 +70,7 @@ def count_peaks(peaks, tol=1.2, distance=6.0, start=1, max_scale_jump=3):
# print('im peak:', p1, ' next peak should be around:', p1 + scale * expected_distance)
p2 = rest[0]
dist = abs(p1 - p2)
+ last_info = values[-1]
while True:
sed = scale * expected_distance
# print('next peak is:', p2, 'distance from p1 is', dist)
@@ -91,7 +92,8 @@ def count_peaks(peaks, tol=1.2, distance=6.0, start=1, max_scale_jump=3):
# print('increase scale to:', scale)
if scale > max_scale_jump:
# print('moving to far')
- raise ValueError('moving too far apart')
+ msg = 'peak {} not found within expected distance from {}'.format(pid, last_info[0])
+ raise ValueError(msg)
p1, rest = rest[0], rest[1:]
return values
diff --git a/megaradrp/processing/hexspline.py b/megaradrp/processing/hexspline.py
new file mode 100644
index 00000000..3d73d9d5
--- /dev/null
+++ b/megaradrp/processing/hexspline.py
@@ -0,0 +1,473 @@
+#
+# Copyright 2017-2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+"""
+Interpolation method based on:
+'Hex-Splines: A Novel Spline Family for Hexagonal Lattices'
+van de Ville et al. IEEE Transactions on Image Processing 2004, 13, 6
+"""
+
+from __future__ import division
+
+import math
+import numpy as np
+from megaradrp.simulation.convolution import hex_c
+
+# Hexagon constants
+M_SQRT3 = math.sqrt(3)
+
+
+def hexspline1(xx, yy):
+ """Hexspline of order p=1"""
+ return hex_c(xx, yy, rad=1.0 / M_SQRT3, ang=0.0)
+
+
+def hexspline2(x1, x2):
+ """Hexspline of order p=2"""
+
+ # This function has been created with sympy
+
+ x1 = np.asanyarray(x1)
+ x2 = np.asanyarray(x2)
+ Heaviside = np.heaviside
+
+ M_SQRT3_D_3 = M_SQRT3 / 3
+ M_SQRT3_D_2 = M_SQRT3 / 2
+ M_SQRT3_D_6 = M_SQRT3 / 6
+ M_SQRT3_2D3 = 2 * M_SQRT3_D_3
+ ref = 1 / 2
+
+ x_p0_m1 = x2 - 1
+ x_p0_p1 = x2 + 1
+ x_p0_m12 = x2 - 1 / 2
+ x_p0_p12 = x2 + 1 / 2
+
+ x_p0_scaled = M_SQRT3_D_3 * x2
+ x_p0_m1_scaled = M_SQRT3_D_3 * x_p0_m1
+ x_p0_p1_scaled = M_SQRT3_D_3 * x_p0_p1
+ x_p0_m12_scaled = M_SQRT3_D_3 * x_p0_m12
+ x_p0_p12_scaled = M_SQRT3_D_3 * x_p0_p12
+
+ aux_p1 = x1 + x_p0_m1_scaled
+ aux_p1_m3 = aux_p1 - M_SQRT3_D_3
+
+ aux_m1 = x1 - x_p0_m1_scaled
+ aux_m1_p3 = x1 - x_p0_m1_scaled + M_SQRT3_D_3
+
+ aux_mm1 = x1 - x_p0_p1_scaled
+ aux_mm1_m3 = aux_mm1 - M_SQRT3_D_3
+ aux_pm1 = x1 + M_SQRT3 * (x_p0_p1) / 3
+ aux_pm1_p3 = aux_pm1 + M_SQRT3_D_3
+
+ aux_p12 = x1 + x_p0_m12_scaled
+ aux_p12_m2 = aux_p12 - M_SQRT3_D_2
+ aux_p12_p2 = aux_p12 + M_SQRT3_D_2
+ aux_p12_p6 = aux_p12 + M_SQRT3_D_6
+ aux_p12_m6 = aux_p12 - M_SQRT3_D_6
+
+ aux_m12 = x1 - x_p0_m12_scaled
+ aux_m12_m6 = aux_m12- M_SQRT3_D_6
+ aux_m12_p6 = aux_m12+ M_SQRT3_D_6
+ aux_m12_p2 = aux_m12+ M_SQRT3_D_2
+ aux_m12_m2 = aux_m12- M_SQRT3_D_2
+
+ aux_mm12 = x1 - x_p0_p12_scaled
+ aux_mm12_p6 = aux_mm12 + M_SQRT3_D_6
+ aux_mm12_m6 = aux_mm12 - M_SQRT3_D_6
+ aux_mm12_m2 = aux_mm12 - M_SQRT3_D_2
+ aux_mm12_p2 = aux_mm12 + M_SQRT3_D_2
+
+ aux_pm12 = x1 + x_p0_p12_scaled
+ aux_pm12_m6 = aux_pm12 - M_SQRT3_D_6
+ aux_pm12_p6 = aux_pm12 + M_SQRT3_D_6
+ aux_pm12_p2 = aux_pm12 + M_SQRT3_D_2
+ aux_pm12_m2 = aux_pm12 - M_SQRT3_D_2
+
+ aux_m0 = x1 - x_p0_scaled
+ aux_m0_p23 = aux_m0 + M_SQRT3_2D3
+ aux_m0_m23 = aux_m0 - M_SQRT3_2D3
+ aux_m0_p3 = aux_m0 + M_SQRT3_D_3
+ aux_m0_m3 = aux_m0 - M_SQRT3_D_3
+
+ aux_p0 = x1 + x_p0_scaled
+ aux_p0_m23 = aux_p0 - M_SQRT3_2D3
+ aux_p0_p23 = aux_p0 + M_SQRT3_2D3
+ aux_p0_p3 = aux_p0 + M_SQRT3_D_3
+ aux_p0_m3 = aux_p0 - M_SQRT3_D_3
+
+ h_p0 = Heaviside(x2, ref)
+ h_p0_m1 = Heaviside(x_p0_m1, ref)
+ h_p0_p1 = Heaviside(x_p0_p1, ref)
+ h_p0_m12 = Heaviside(x_p0_m12, ref)
+ h_p0_p12 = Heaviside(x_p0_p12, ref)
+
+ h_m0 = Heaviside(-x2, ref)
+ h_m0_p12 = Heaviside(1 / 2 - x2, ref)
+ h_m0_m12 = Heaviside(-x2 - 1 / 2, ref)
+ h_m0_p1 = Heaviside(1 - x2, ref)
+ h_m0_m1 = Heaviside(-x2 - 1, ref)
+
+ h_aux_m0 = Heaviside(aux_m0, ref)
+ h_aux_m1 = Heaviside(aux_m1, ref)
+ h_aux_p0 = Heaviside(aux_p0, ref)
+ h_aux_p1 = Heaviside(aux_p1, ref)
+ h_aux_m0_p23 = Heaviside(aux_m0_p23, ref)
+ h_aux_m0_m23 = Heaviside(aux_m0_m23, ref)
+ h_aux_p0_m23 = Heaviside(aux_p0_m23, ref)
+ h_aux_p0_p23 = Heaviside(aux_p0_p23, ref)
+ h_aux_m1_p3 = Heaviside(aux_m1_p3, ref)
+ h_aux_p1_m3 = Heaviside(aux_p1_m3, ref)
+ h_aux_m12_m6 = Heaviside(aux_m12_m6, ref)
+ h_aux_m12_p2 = Heaviside(aux_m12_p2, ref)
+ h_aux_p12_m2 = Heaviside(aux_p12_m2, ref)
+ h_aux_p12_p6 = Heaviside(aux_p12_p6, ref)
+ h_aux_mm12_m2 = Heaviside(aux_mm12_m2, ref)
+ h_aux_mm12_p6 = Heaviside(aux_mm12_p6, ref)
+ h_aux_pm12_m6 = Heaviside(aux_pm12_m6, ref)
+ h_aux_pm12_p2 = Heaviside(aux_pm12_p2, ref)
+ h_aux_mm1_m3 = Heaviside(aux_mm1_m3, ref)
+ h_aux_pm1_p3 = Heaviside(aux_pm1_p3, ref)
+ h_aux_mm1 = Heaviside(aux_mm1, ref)
+
+ scale = M_SQRT3_2D3
+ res = 4 * x2 * aux_m0 * h_p0 * h_aux_m0 - \
+ 4 * x2 * aux_p0 * h_m0 * h_aux_p0 + \
+ x2 * aux_m0_m23 * h_p0 * h_aux_m0_m23 + \
+ x2 * aux_m0_p23 * h_p0 * h_aux_m0_p23 - \
+ x2 * aux_p0_m23 * h_m0 * h_aux_p0_m23 - \
+ x2 * aux_p0_p23 * h_m0 * h_aux_p0_p23 + \
+ x_p0_m1 * aux_m1_p3 * h_p0_m1 * h_aux_m1_p3 - \
+ x_p0_m1 * aux_p1_m3 * h_m0_p1 * h_aux_p1_m3 - \
+ 2 * (x_p0_m12) * aux_m12_m6 * h_p0_m12 * h_aux_m12_m6 - \
+ 2 * (x_p0_m12) * aux_m12_p2 * h_p0_m12 * h_aux_m12_p2 + \
+ 2 * (x_p0_m12) * aux_p12_m2 * h_m0_p12 * h_aux_p12_m2 + \
+ 2 * (x_p0_m12) * aux_p12_p6 * h_m0_p12 * h_aux_p12_p6 - \
+ 2 * (x_p0_p12) * aux_mm12_m2 * h_p0_p12 * h_aux_mm12_m2 - \
+ 2 * (x_p0_p12) * aux_mm12_p6 * h_p0_p12 * h_aux_mm12_p6 + \
+ 2 * (x_p0_p12) * aux_pm12_m6 * h_m0_m12 * h_aux_pm12_m6 + \
+ 2 * (x_p0_p12) * aux_pm12_p2 * h_m0_m12 * h_aux_pm12_p2 + \
+ (x_p0_p1) * (aux_mm1_m3) * h_p0_p1 * h_aux_mm1_m3 - \
+ (x_p0_p1) * (aux_pm1_p3) * h_m0_m1 * h_aux_pm1_p3 + \
+ M_SQRT3 * ((aux_m0) ** 2 * h_p0 * h_aux_m0 +
+ (aux_p0) ** 2 * h_m0 * h_aux_p0 +
+ (aux_m1) ** 2 * h_aux_m1 * h_p0_m1 / 2 +
+ (aux_p1) ** 2 * h_m0_p1 * h_aux_p1 / 2 +
+ (aux_mm1) ** 2 * h_aux_mm1 * h_p0_p1 / 2 +
+ (aux_mm1) ** 2 * h_aux_mm1 * h_m0_m1 / 2 +
+ (aux_m0_m23) ** 2 * h_p0 * h_aux_m0_m23 / 2 +
+ (aux_m0_m3) ** 2 * h_p0 * Heaviside(aux_m0_m3, ref) / 2 +
+ (aux_m0_p3) ** 2 * h_p0 * Heaviside(aux_m0_p3, ref) / 2 +
+ (aux_m0_p23) ** 2 * h_p0 * h_aux_m0_p23 / 2 +
+ (aux_p0_m23) ** 2 * h_m0 * h_aux_p0_m23 / 2 +
+ (aux_p0_m3) ** 2 * h_m0 * Heaviside(aux_p0_m3, ref) / 2 +
+ (aux_p0_p3) ** 2 * h_m0 * Heaviside(aux_p0_p3, ref) / 2 +
+ (aux_p0_p23) ** 2 * h_m0 * h_aux_p0_p23 / 2 -
+ (aux_m12_m2) ** 2 * h_p0_m12 * Heaviside(aux_m12_m2, ref) / 2 -
+ (aux_m12_m6) ** 2 * h_p0_m12 * h_aux_m12_m6 / 2 -
+ (aux_m12_p6) ** 2 * h_p0_m12 * Heaviside(aux_m12_p6, ref) / 2 -
+ (aux_m12_p2) ** 2 * h_p0_m12 * h_aux_m12_p2 / 2 -
+ (aux_p12_m2) ** 2 * h_m0_p12 * h_aux_p12_m2 / 2 -
+ (aux_p12_m6) ** 2 * h_m0_p12 * Heaviside(aux_p12_m6, ref) / 2 -
+ (aux_p12_p6) ** 2 * h_m0_p12 * h_aux_p12_p6 / 2 -
+ (aux_p12_p2) ** 2 * h_m0_p12 * Heaviside(aux_p12_p2, ref) / 2 -
+ (aux_mm12_m2) ** 2 * h_p0_p12 * h_aux_mm12_m2 / 2 -
+ (aux_mm12_m6) ** 2 * h_p0_p12 * Heaviside(aux_mm12_m6, ref) / 2 -
+ (aux_mm12_p6) ** 2 * h_p0_p12 * h_aux_mm12_p6 / 2 -
+ (aux_mm12_p2) ** 2 * h_p0_p12 * Heaviside(aux_mm12_p2, ref) / 2 -
+ (aux_pm12_m2) ** 2 * h_m0_m12 * Heaviside(aux_pm12_m2, ref) / 2 -
+ (aux_pm12_m6) ** 2 * h_m0_m12 * h_aux_pm12_m6 / 2 -
+ (aux_pm12_p6) ** 2 * h_m0_m12 * Heaviside(aux_pm12_p6, ref) / 2 -
+ (aux_pm12_p2) ** 2 * h_m0_m12 * h_aux_pm12_p2 / 2)
+ res = res * scale
+ return res
+
+
+def _hexspline2_t1(x1, x2):
+ """Hexspline of order p=2 in region t1"""
+
+ # This function has been created with sympy
+
+ x1 = np.asanyarray(np.abs(x1))
+ x2 = np.asanyarray(np.abs(x2))
+
+ M_SQRT3_D_3 = M_SQRT3 / 3
+ M_SQRT3_D_2 = M_SQRT3 / 2
+ M_SQRT3_D_6 = M_SQRT3 / 6
+ M_SQRT3_2D3 = 2 * M_SQRT3_D_3
+
+ x_p0_m12 = x2 - 1 / 2
+ x_p0_p12 = x2 + 1 / 2
+
+ x_p0_scaled = M_SQRT3_D_3 * x2
+
+ x_p0_m12_scaled = M_SQRT3_D_3 * x_p0_m12
+ x_p0_p12_scaled = M_SQRT3_D_3 * x_p0_p12
+
+ aux_p12 = x1 + x_p0_m12_scaled
+ aux_p12_p2 = aux_p12 + M_SQRT3_D_2
+ aux_p12_p6 = aux_p12 + M_SQRT3_D_6
+ aux_mm12 = x1 - x_p0_p12_scaled
+ aux_mm12_p2 = aux_mm12 + M_SQRT3_D_2
+ aux_m0 = x1 - x_p0_scaled
+ aux_m0_p23 = aux_m0 + M_SQRT3_2D3
+ aux_m0_p3 = aux_m0 + M_SQRT3_D_3
+
+ scale = M_SQRT3_2D3
+ res = x2 * aux_m0_p23 + \
+ 2 * x_p0_m12 * aux_p12_p6 + \
+ M_SQRT3 * (
+ (aux_m0_p3) ** 2 / 2 +
+ (aux_m0_p23) ** 2 / 2 -
+ (aux_p12_p6) ** 2 / 2 -
+ (aux_p12_p2) ** 2 / 2 -
+ (aux_mm12_p2) ** 2 / 2
+ )
+ res = res * scale
+ return res
+
+
+def _hexspline2_regions(x1, x2):
+ """Hexspline regions of order p=2"""
+ x1 = np.asanyarray(x1)
+ x2 = np.asanyarray(x2)
+ Heaviside = np.heaviside
+
+ M_SQRT3_D_3 = M_SQRT3 / 3
+ M_SQRT3_D_2 = M_SQRT3 / 2
+ M_SQRT3_D_6 = M_SQRT3 / 6
+ M_SQRT3_2D3 = 2 * M_SQRT3_D_3
+ ref = 1 / 2
+
+ x_p0_m1 = x2 - 1
+ x_p0_p1 = x2 + 1
+ x_p0_m12 = x2 - 1 / 2
+ x_p0_p12 = x2 + 1 / 2
+
+ x_p0_scaled = M_SQRT3_D_3 * x2
+ x_p0_m1_scaled = M_SQRT3_D_3 * x_p0_m1
+ x_p0_p1_scaled = M_SQRT3_D_3 * x_p0_p1
+ x_p0_m12_scaled = M_SQRT3_D_3 * x_p0_m12
+ x_p0_p12_scaled = M_SQRT3_D_3 * x_p0_p12
+
+ aux_p1 = x1 + x_p0_m1_scaled
+ aux_p1_m3 = aux_p1 - M_SQRT3_D_3
+
+ aux_m1 = x1 - x_p0_m1_scaled
+ aux_m1_p3 = x1 - x_p0_m1_scaled + M_SQRT3_D_3
+
+ aux_mm1 = x1 - x_p0_p1_scaled
+ aux_mm1_m3 = aux_mm1 - M_SQRT3_D_3
+ aux_pm1 = x1 + M_SQRT3 * (x_p0_p1) / 3
+ aux_pm1_p3 = aux_pm1 + M_SQRT3_D_3
+
+ aux_p12 = x1 + x_p0_m12_scaled
+ aux_p12_m2 = aux_p12 - M_SQRT3_D_2
+ aux_p12_p2 = aux_p12 + M_SQRT3_D_2
+ aux_p12_p6 = aux_p12 + M_SQRT3_D_6
+ aux_p12_m6 = aux_p12 - M_SQRT3_D_6
+
+ aux_m12 = x1 - x_p0_m12_scaled
+ aux_m12_m6 = aux_m12- M_SQRT3_D_6
+ aux_m12_p6 = aux_m12+ M_SQRT3_D_6
+ aux_m12_p2 = aux_m12+ M_SQRT3_D_2
+ aux_m12_m2 = aux_m12- M_SQRT3_D_2
+
+ aux_mm12 = x1 - x_p0_p12_scaled
+ aux_mm12_p6 = aux_mm12 + M_SQRT3_D_6
+ aux_mm12_m6 = aux_mm12 - M_SQRT3_D_6
+ aux_mm12_m2 = aux_mm12 - M_SQRT3_D_2
+ aux_mm12_p2 = aux_mm12 + M_SQRT3_D_2
+
+ aux_pm12 = x1 + x_p0_p12_scaled
+ aux_pm12_m6 = aux_pm12 - M_SQRT3_D_6
+ aux_pm12_p6 = aux_pm12 + M_SQRT3_D_6
+ aux_pm12_p2 = aux_pm12 + M_SQRT3_D_2
+ aux_pm12_m2 = aux_pm12 - M_SQRT3_D_2
+
+ aux_m0 = x1 - x_p0_scaled
+ aux_m0_p23 = aux_m0 + M_SQRT3_2D3
+ aux_m0_m23 = aux_m0 - M_SQRT3_2D3
+ aux_m0_p3 = aux_m0 + M_SQRT3_D_3
+ aux_m0_m3 = aux_m0 - M_SQRT3_D_3
+
+ aux_p0 = x1 + x_p0_scaled
+ aux_p0_m23 = aux_p0 - M_SQRT3_2D3
+ aux_p0_p23 = aux_p0 + M_SQRT3_2D3
+ aux_p0_p3 = aux_p0 + M_SQRT3_D_3
+ aux_p0_m3 = aux_p0 - M_SQRT3_D_3
+
+ r = {}
+ h_p0 = Heaviside(x2, ref)
+ h_p0_m1 = Heaviside(x_p0_m1, ref)
+ h_p0_p1 = Heaviside(x_p0_p1, ref)
+ h_p0_m12 = Heaviside(x_p0_m12, ref)
+ h_p0_p12 = Heaviside(x_p0_p12, ref)
+
+ h_m0 = Heaviside(-x2, ref)
+ h_m0_p12 = Heaviside(1 / 2 - x2, ref)
+ h_m0_m12 = Heaviside(-x2 - 1 / 2, ref)
+ h_m0_p1 = Heaviside(1 - x2, ref)
+ h_m0_m1 = Heaviside(-x2 - 1, ref)
+
+ h_aux_m0 = Heaviside(aux_m0, ref)
+ h_aux_m1 = Heaviside(aux_m1, ref)
+ h_aux_p0 = Heaviside(aux_p0, ref)
+ h_aux_p1 = Heaviside(aux_p1, ref)
+ h_aux_m0_p23 = Heaviside(aux_m0_p23, ref)
+ h_aux_m0_m23 = Heaviside(aux_m0_m23, ref)
+ h_aux_p0_m23 = Heaviside(aux_p0_m23, ref)
+ h_aux_p0_p23 = Heaviside(aux_p0_p23, ref)
+ h_aux_m1_p3 = Heaviside(aux_m1_p3, ref)
+ h_aux_p1_m3 = Heaviside(aux_p1_m3, ref)
+ h_aux_m12_m6 = Heaviside(aux_m12_m6, ref)
+ h_aux_m12_p2 = Heaviside(aux_m12_p2, ref)
+ h_aux_p12_m2 = Heaviside(aux_p12_m2, ref)
+ h_aux_p12_p6 = Heaviside(aux_p12_p6, ref)
+ h_aux_mm12_m2 = Heaviside(aux_mm12_m2, ref)
+ h_aux_mm12_p6 = Heaviside(aux_mm12_p6, ref)
+ h_aux_pm12_m6 = Heaviside(aux_pm12_m6, ref)
+ h_aux_pm12_p2 = Heaviside(aux_pm12_p2, ref)
+ h_aux_mm1_m3 = Heaviside(aux_mm1_m3, ref)
+ h_aux_pm1_p3 = Heaviside(aux_pm1_p3, ref)
+ h_aux_mm1 = Heaviside(aux_mm1, ref)
+
+ h_aux_m0_m3 = Heaviside(aux_m0_m3, ref)
+ h_aux_m0_p3 = Heaviside(aux_m0_p3, ref)
+ h_aux_p0_m3 = Heaviside(aux_p0_m3, ref)
+ h_aux_p0_p3 = Heaviside(aux_p0_p3, ref)
+
+ h_aux_m12_m2 = Heaviside(aux_m12_m2, ref)
+ h_aux_m12_p6 = Heaviside(aux_m12_p6, ref)
+ h_aux_p12_m6 = Heaviside(aux_p12_m6, ref)
+ h_aux_p12_p2 = Heaviside(aux_p12_p2, ref)
+ h_aux_mm12_m6 = Heaviside(aux_mm12_m6, ref)
+ h_aux_mm12_p2 = Heaviside(aux_mm12_p2, ref)
+ h_aux_pm12_m2 = Heaviside(aux_pm12_m2, ref)
+ h_aux_pm12_p6 = Heaviside(aux_pm12_p6, ref)
+
+ inter = locals().copy()
+ for key in inter:
+ if key.startswith('h_'):
+ r[key] = inter[key]
+ return r
+
+
+def hexspline_support(xx, yy, p):
+ """Support of hexspline of order p
+
+ The support is an hexagon of area Omega p^2
+ Omega = M_SQRT3 / 2
+ """
+ return hex_c(xx, yy, rad=p / M_SQRT3, ang=0.0)
+
+
+def hexspline_bbox(p):
+ """Bounding box of hexspline of order p
+
+ The support is an hexagon of area Omega p^2
+ Omega = M_SQRT3 / 2
+ """
+ # (x1, x2), (y1, y2)
+ # TODO: review numbers
+ x1 = p / M_SQRT3
+ y1 = p / 2.0
+ return (-x1, x1), (-y1, y1)
+
+
+def hexspline_gauss(xx, yy, p):
+ """Approximation of the p-order hexspline by a bivariate normal
+
+ https://miplab.epfl.ch/pub/vandeville0202.pdf
+
+ Least-squares spline resampling to a hexagonal lattice
+ Signal Processing:Image Communication17 (2002) 393-408
+
+ Eq B.4
+ """
+ from scipy.stats import multivariate_normal
+ zz = np.array([xx, yy])
+ zz = np.moveaxis(zz, 0, -1)
+ in_support = hexspline_support(xx, yy, p)
+
+ coeff = 5 * M_SQRT3 / 144
+ omega = M_SQRT3 / 2
+ sigma = coeff * np.eye(2)
+ covar = p * sigma / omega
+
+ out = omega * multivariate_normal.pdf(zz, mean=[0, 0], cov=covar)
+ out[~in_support] = 0
+ return out
+
+
+def OsincH_2pi(x, y):
+ t1 = np.cos(-x / (2 * M_SQRT3) + y / 2) - np.cos(x / M_SQRT3)
+ t2 = np.cos(x / (2 * M_SQRT3) + y / 2) - np.cos(x / M_SQRT3)
+ ts = t1 / (x + M_SQRT3 * y) + t2 / (x - M_SQRT3 * y)
+ return 2 * M_SQRT3 * ts / x
+
+
+def sincH(x, y):
+ """Generalization of the sinc function to a hexagonal grid
+
+ The function is 1 in (0,0) and 0 in all the knots of the grid
+ """
+ # This expression seems numerically unstable near 0
+ # it should be 1
+ x = np.asanyarray(x)
+ y = np.asanyarray(y)
+
+ xx = 2 * np.pi * np.where(x == 0, 1.0e-20, x)
+ yy = 2 * np.pi * np.where(y == 0, 1.0e-20, y)
+
+ t3 = np.cos(xx / M_SQRT3)
+ t1 = np.cos(-xx / (2 * M_SQRT3) + yy / 2.0) - t3
+ t2 = np.cos(xx / (2 * M_SQRT3) + yy / 2.0) - t3
+ ts = t1 / (xx + M_SQRT3 * yy) + t2 / (xx - M_SQRT3 * yy)
+ return 4 * ts / xx
+
+
+# Convolution, compute kernel
+def rescaling_kernel(p, scale=1):
+ """Rescaling kernel from hexgrid to rectangular grid"""
+ from megaradrp.simulation.convolution import setup_grid
+ from scipy import signal
+ from scipy.interpolate import RectBivariateSpline
+
+ Dx = 0.005
+ Dy = 0.005
+ DA = Dx * Dy
+ # TODO: the support should be computed from the scale and p
+ xsize = ysize = 3.0
+ xx, yy, xs, ys, xl, yl = setup_grid(xsize, ysize, Dx, Dy)
+
+ detR0 = M_SQRT3 / 2
+ detR1 = scale * scale
+
+ # index of bspline
+ n = p - 1
+
+ rect_kernel = signal.bspline(xx / scale, n) * signal.bspline(yy / scale, n) / detR1
+
+ hex1 = hexspline1(xx, yy)
+
+ if p == 1:
+ hex_kernel = hex1
+ elif p == 2:
+ hex2 = signal.fftconvolve(hex1, hex1, mode='same') * DA / detR0
+ hex_kernel = hex2
+ elif p == 3:
+ hex2 = signal.fftconvolve(hex1, hex1, mode='same') * DA / detR0
+ hex3 = signal.fftconvolve(hex2, hex1, mode='same') * DA / detR0
+ hex_kernel = hex3
+ else:
+ raise ValueError('p>3 not implemented')
+
+ kernel = signal.fftconvolve(rect_kernel, hex_kernel, mode='same') * DA
+ rbs = RectBivariateSpline(xs, ys, kernel)
+ return rbs
diff --git a/megaradrp/processing/sky.py b/megaradrp/processing/sky.py
new file mode 100644
index 00000000..bdd285d6
--- /dev/null
+++ b/megaradrp/processing/sky.py
@@ -0,0 +1,88 @@
+#
+# Copyright 2019-2020 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+import logging
+
+import numpy
+
+import megaradrp.instrument.focalplane as fp
+from numina.frame.utils import copy_img
+
+
+def subtract_sky(img, ignored_sky_bundles=None, logger=None):
+ # Sky subtraction
+
+ if logger is None:
+ logger = logging.getLogger(__name__)
+
+ logger.info('obtain fiber information')
+ sky_img = copy_img(img)
+ final_img = copy_img(img)
+ fp_conf = fp.FocalPlaneConf.from_img(sky_img)
+ # Sky fibers
+ skyfibs = fp_conf.sky_fibers(valid_only=True,
+ ignored_bundles=ignored_sky_bundles)
+ logger.debug('sky fibers are: %s', skyfibs)
+ # Create empty sky_data
+ target_data = img[0].data
+
+ target_map = img['WLMAP'].data
+ sky_data = numpy.zeros_like(img[0].data)
+ sky_map = numpy.zeros_like(img['WLMAP'].data)
+ sky_img[0].data = sky_data
+
+ for fibid in skyfibs:
+ rowid = fibid - 1
+ sky_data[rowid] = target_data[rowid]
+ sky_map[rowid] = target_map[rowid]
+ # Sum
+ coldata = sky_data.sum(axis=0)
+ colsum = sky_map.sum(axis=0)
+
+ # Divide only where map is > 0
+ mask = colsum > 0
+ avg_sky = numpy.zeros_like(coldata)
+ avg_sky[mask] = coldata[mask] / colsum[mask]
+
+ # This should be done only on valid fibers
+ logger.info('ignoring invalid fibers: %s', fp_conf.invalid_fibers())
+ for fibid in fp_conf.valid_fibers():
+ rowid = fibid - 1
+ final_img[0].data[rowid, mask] = img[0].data[rowid, mask] - avg_sky[mask]
+ # Update headers
+ #
+ return final_img, img, sky_img
+
+
+def subtract_sky_rss(img, sky_img, ignored_sky_bundles=None, logger=None):
+ """Subtract a sky image from an image"""
+ # Sky subtraction
+
+ if logger is None:
+ logger = logging.getLogger(__name__)
+
+ #logger.info('obtain fiber information')
+ final_img = copy_img(img)
+ # fiberconf_sky = dm.get_fiberconf(sky_img)
+ # fiberconf_target = dm.get_fiberconf(img)
+
+ logger.debug('using WLMAP extension to compute valid regions')
+
+ v_map = img['WLMAP'].data > 0
+ sky_map = numpy.zeros_like(img['WLMAP'].data)
+ sky_data = sky_img[0].data
+ sky_map[:] = v_map[:]
+
+ # This should be done only on valid fibers
+ #logger.info('ignoring invalid fibers: %s', fiberconf_target.invalid_fibers())
+ final_img[0].data[v_map] = img[0].data[v_map] - sky_data[v_map]
+ final_img[0].data[~v_map] = 0.0
+ # Update headers
+ #
+ return final_img, img, sky_img
\ No newline at end of file
diff --git a/megaradrp/processing/tests/test_cube.py b/megaradrp/processing/tests/test_cube.py
new file mode 100644
index 00000000..26b66cea
--- /dev/null
+++ b/megaradrp/processing/tests/test_cube.py
@@ -0,0 +1,56 @@
+
+import pytest
+
+import astropy.wcs
+
+from megaradrp.tests.simpleobj import create_spec_header, create_sky_header
+from megaradrp.processing.wavecalibration import header_add_barycentric_correction
+
+from ..cube import create_cube, merge_wcs, merge_wcs_alt
+
+
+def test_create_cube_raise():
+ with pytest.raises(ValueError):
+ create_cube(None, None, 3)
+
+
+def test_merge_wcs():
+ hdr1 = create_spec_header()
+ hdr1 = header_add_barycentric_correction(hdr1)
+ hdr2 = create_sky_header()
+ res = merge_wcs(hdr2, hdr1)
+ cunit3 = res['CUNIT3']
+ assert cunit3 == ''
+
+
+def test_merge_wcs_2():
+ import astropy.wcs
+ hdr_sky = create_sky_header()
+ hdr_spec = create_spec_header()
+ hdr_spec = header_add_barycentric_correction(hdr_spec)
+ allw = astropy.wcs.find_all_wcs(hdr_spec)
+ out = hdr_spec.copy()
+ for w in allw:
+ ss = w.wcs.alt
+ merge_wcs_alt(hdr_sky, hdr_spec, out, spec_suffix=ss)
+
+ assert True
+
+
+def test_merge2_wcs():
+ hdr_sky = create_sky_header()
+ hdr_spec = create_spec_header()
+ hdr_spec = header_add_barycentric_correction(hdr_spec)
+ wcs_sky = astropy.wcs.WCS(header=hdr_sky)
+ wcs_spec = astropy.wcs.WCS(header=hdr_spec, key='B')
+ wcs3 = wcs_sky.sub([1,2,0])
+ wcs3.wcs.ctype[2] = wcs_spec.wcs.ctype[0]
+ wcs3.wcs.crval[2] = wcs_spec.wcs.crval[0]
+ wcs3.wcs.crpix[2] = wcs_spec.wcs.crpix[0]
+ wcs3.wcs.cdelt[2] = wcs_spec.wcs.cdelt[0]
+ wcs3.wcs.cunit[2] = wcs_spec.wcs.cunit[0]
+ wcs3.wcs.specsys = wcs_spec.wcs.specsys
+ wcs3.wcs.ssysobs = wcs_spec.wcs.ssysobs
+ wcs3.wcs.velosys = wcs_spec.wcs.velosys
+ hdr3 = wcs3.to_header(key='B')
+ assert True
\ No newline at end of file
diff --git a/megaradrp/processing/tests/test_hexspline.py b/megaradrp/processing/tests/test_hexspline.py
new file mode 100644
index 00000000..d19bf73e
--- /dev/null
+++ b/megaradrp/processing/tests/test_hexspline.py
@@ -0,0 +1,22 @@
+import pytest
+import numpy as np
+import math
+
+from ..hexspline import rescaling_kernel, hexspline2
+
+@pytest.mark.parametrize("p", [1, 2])
+@pytest.mark.parametrize("scale", [0.5, 0.8, 1])
+def test_rescaling_kernel_normalization(p, scale):
+ rbs = rescaling_kernel(p, scale=scale)
+ expected_area = math.sqrt(3) / 2
+ area = rbs.integral(-3.0, 3.0, -3.0, 3.0)
+ assert np.allclose(area, expected_area, rtol=1e-2)
+
+
+def test_hexspline2():
+ x = [0.45, -0.2, 1, 0, 0.68, 1.2]
+ y = [-0.8, -0.3, 0.5, 0, 0.2, 0.5]
+ expected_res = [5.60769515e-02, 5.78341925e-01, 6.40987562e-16, 1.00000000e+00,
+ 2.11842907e-01, 7.69185075e-16]
+ res = hexspline2(x, y)
+ assert np.allclose(res, expected_res, rtol=1e-2)
\ No newline at end of file
diff --git a/megaradrp/processing/tests/test_sky.py b/megaradrp/processing/tests/test_sky.py
new file mode 100644
index 00000000..269db7ed
--- /dev/null
+++ b/megaradrp/processing/tests/test_sky.py
@@ -0,0 +1,32 @@
+import numpy
+
+from megaradrp.datamodel import create_default_fiber_header
+from ..sky import subtract_sky_rss
+
+
+def create_rss(value, wlmap):
+ import astropy.io.fits as fits
+ data1 = value + numpy.zeros((623, 4300), dtype='float32')
+ hdu = fits.PrimaryHDU(data1)
+ hdrf = create_default_fiber_header('LCB')
+ fibers = fits.ImageHDU(header=hdrf, name='FIBERS')
+ rss_map = fits.ImageHDU(wlmap, name='WLMAP')
+ return fits.HDUList([hdu, fibers, rss_map])
+
+
+def test_subtract_sky_rss():
+
+ wlmap = numpy.zeros((623, 4300), dtype='float32')
+ wlmap[:,350:4105] = 1.0
+ wlmap[622,:] = 0
+ img1 = create_rss(1000, wlmap)
+ img2 = create_rss(400, wlmap)
+
+ final_img, img, sky_img = subtract_sky_rss(img1, img2)
+ assert img is img1
+ # In final image, regions outside WLMAP must be at zero
+ assert final_img[0].data[:, 100:200].min() == 0
+ assert final_img[0].data[:, 100:200].max() == 0
+
+ assert final_img[0].data[622, :].max() == 0
+ assert final_img[0].data[622, :].min() == 0
diff --git a/megaradrp/processing/tests/test_wcalib.py b/megaradrp/processing/tests/test_wcalib.py
new file mode 100644
index 00000000..e8e393df
--- /dev/null
+++ b/megaradrp/processing/tests/test_wcalib.py
@@ -0,0 +1,36 @@
+import pytest
+import astropy.io.fits as fits
+
+from megaradrp.tests.simpleobj import create_spec_header
+from ..wavecalibration import header_add_barycentric_correction
+
+
+def test_add_barycentric_missing1():
+ hdr = create_spec_header()
+ del hdr['RADEG']
+
+ with pytest.raises(KeyError):
+ header_add_barycentric_correction(hdr, key='b')
+
+
+def test_add_barycentric_missing2():
+ hdr = create_spec_header()
+ del hdr['DATE-OBS']
+
+ with pytest.raises(KeyError):
+ header_add_barycentric_correction(hdr, key='b')
+
+
+def test_add_barycentric_missing3():
+ hdr = fits.Header()
+ hdr['DATE-OBS'] = '2017-08-23T21:38:30.55'
+ # GTC
+ hdr['OBSGEO-X'] = 5327285.0921
+ hdr['OBSGEO-Y'] = -1718777.1125
+ hdr['OBSGEO-Z'] = 3051786.7327
+
+ hdr['RADEG'] = 285.481037748898
+ hdr['DECDEG'] = 42.4882140636786
+
+ with pytest.raises(TypeError):
+ header_add_barycentric_correction(hdr, key='b')
diff --git a/megaradrp/processing/wavecalibration.py b/megaradrp/processing/wavecalibration.py
index cbf4bc3f..77f7cef3 100644
--- a/megaradrp/processing/wavecalibration.py
+++ b/megaradrp/processing/wavecalibration.py
@@ -19,6 +19,7 @@
import astropy.wcs
import astropy.io.fits as fits
+import numina.datamodel as dm
import numina.array.utils as utils
from numina.frame.utils import copy_img
from numina.processing import Corrector
@@ -48,29 +49,6 @@ def create_internal_wcs_(self):
return w
-def get_imgid(img, prefix=True):
- hdr = img[0].header
- base = "{}"
- if 'UUID' in hdr:
- pre = 'uuid:{}'
- value = hdr['UUID']
- elif 'DATE-OBS' in hdr:
- pre = 'dateobs:{}'
- value = hdr['DATE-OBS']
- elif 'checksum' in hdr:
- pre = 'checksum:{}'
- value = hdr['checksum']
- elif 'filename' in hdr:
- pre = 'file:{}'
- value = hdr['filename']
- else:
- raise ValueError('no method to identity image')
- if prefix:
- return pre.format(value)
- else:
- return base.format(value)
-
-
class WavelengthCalibrator(Corrector):
"""A Node that applies wavelength calibration."""
@@ -115,7 +93,7 @@ def calibrate_wl_rss_megara(rss, solutionwl, dtype='float32', span=0, inplace=Fa
A Row stacked Spectra MEGARA image, WL calibrated
"""
- imgid = get_imgid(rss)
+ imgid = dm.get_imgid(rss)
_logger.debug('wavelength calibration in image %s', imgid)
_logger.debug('with wavecalib %s', solutionwl.calibid)
_logger.debug('offsets are %s', solutionwl.global_offset.coef)
@@ -176,7 +154,7 @@ def calibrate_wl_rss(rss, solutionwl, npix, targetwcs, dtype='float32', span=0,
# This is a new HDUList
rss = copy_img(rss)
- imgid = get_imgid(rss)
+ imgid = dm.get_imgid(rss)
_logger.debug('wavelength calibration in image %s', imgid)
_logger.debug('with wavecalib %s', solutionwl.calibid)
_logger.debug('offsets are %s', solutionwl.global_offset.coef)
@@ -195,7 +173,10 @@ def calibrate_wl_rss(rss, solutionwl, npix, targetwcs, dtype='float32', span=0,
hdr = rss[0].header
_logger.debug('Add WCS headers')
rss_add_wcs(hdr, targetwcs.crval, targetwcs.cdelt, targetwcs.crpix)
-
+ try:
+ header_add_barycentric_correction(hdr, key='B')
+ except KeyError as error:
+ _logger.warning('Missing key %s, cannot add barycentric correction', error)
_logger.debug('Add calibration headers')
hdr['NUM-WAV'] = solutionwl.calibid
hdr['history'] = 'Wavelength calibration with {}'.format(solutionwl.calibid)
@@ -272,6 +253,69 @@ def create_internal_wcs_(wlr0, delt, crpix):
return w
+def header_add_barycentric_correction(hdr, key='B', out=None):
+ """Add WCS keywords with barycentric correction
+
+ Raises
+ ------
+ KeyError
+ If a required keyword is missing
+ TypeError
+ If the header does not contain a spectral axis
+ """
+ from astropy.coordinates import SkyCoord, EarthLocation
+ import astropy.time
+ import astropy.constants as cons
+
+ # Header must have DATE-OBS
+ if 'DATE-OBS' not in hdr:
+ raise KeyError("Keyword 'DATE-OBS' not found.")
+ # Header must contain a primary WCS
+ # Header must contain RADEG and DECDEG
+
+ if 'OBSGEO-X' not in hdr:
+ warnings.warn('OBSGEO- keywords not defined, using default values for GTC', RuntimeWarning)
+ # Geocentric coordinates of GTC
+ hdr['OBSGEO-X'] = 5327285.0921
+ hdr['OBSGEO-Y'] = -1718777.1125
+ hdr['OBSGEO-Z'] = 3051786.7327
+
+ # Get main WCS
+ wcs0 = astropy.wcs.WCS(hdr)
+ if wcs0.wcs.spec == -1:
+ # We don't have a spec axis
+ raise TypeError('Header does not contain spectral axis')
+ gtc = EarthLocation.from_geocentric(wcs0.wcs.obsgeo[0], wcs0.wcs.obsgeo[1], wcs0.wcs.obsgeo[2], unit='m')
+ date_obs = astropy.time.Time(wcs0.wcs.dateobs, format='fits')
+ # if frame='fk5', we need to pass the epoch and equinox
+ sc = SkyCoord(ra=hdr['RADEG'], dec=hdr['DECDEG'], unit='deg')
+ rv = sc.radial_velocity_correction(obstime=date_obs, location=gtc)
+ factor = (1 + rv / cons.c).to('').value
+
+ if out is None:
+ out = hdr
+
+ out['WCSNAME{}'.format(key)] = 'Barycentric correction'
+ # out['CNAME1{}'.format(key)] = 'AxisV'
+ out['CTYPE1{}'.format(key)] = hdr['CTYPE1']
+ out['CRPIX1{}'.format(key)] = hdr['CRPIX1']
+ out['CRVAL1{}'.format(key)] = hdr['CRVAL1'] * factor
+ out['CDELT1{}'.format(key)] = hdr['CDELT1'] * factor
+ out['CUNIT1{}'.format(key)] = hdr['CUNIT1']
+
+ for keyword in ['CRPIX2', 'CRVAL2', 'CDELT2', 'CTYPE2']:
+ try:
+ out['{}{}'.format(keyword, key)] = hdr['{}'.format(keyword)]
+ except KeyError:
+ # Ignore non-existing key
+ pass
+
+ out['VELOSYS{}'.format(key)] = rv.to('m / s').value
+ out['SPECSYS{}'.format(key)] = 'BARYCENT'
+ out['SSYSOBS{}'.format(key)] = 'TOPOCENT'
+ return out
+
+
def resample_rss_flux(arr, solutionwl, npix, finalwcs, span=0, fill=0):
"""Resample array according to a wavelength calibration solution
diff --git a/megaradrp/processing/wcs.py b/megaradrp/processing/wcs.py
index 21319843..416aefe5 100644
--- a/megaradrp/processing/wcs.py
+++ b/megaradrp/processing/wcs.py
@@ -11,10 +11,10 @@
import numpy
-from megaradrp.instrument import MEGARA_IAA
+import megaradrp.instrument.constants as cons
-def compute_pa_from_ipa(ipa, iaa=MEGARA_IAA):
+def compute_pa_from_ipa(ipa, iaa=cons.MEGARA_IAA.value):
"""Recompute the PA from IPA
Parameters
diff --git a/megaradrp/processing/weights.py b/megaradrp/processing/weights.py
deleted file mode 100644
index 9cac7c0a..00000000
--- a/megaradrp/processing/weights.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#
-# Copyright 2011-2018 Universidad Complutense de Madrid
-#
-# This file is part of Megara DRP
-#
-# SPDX-License-Identifier: GPL-3.0+
-# License-Filename: LICENSE.txt
-#
-
-import logging
-import multiprocessing as mp
-from astropy.io import fits
-
-import numpy as np
-from numina.processing import Corrector
-
-_logger = logging.getLogger('numina.processing')
-
-
-class WeightsCorrector(Corrector):
- '''A Node that corrects from twilight.'''
-
- def __init__(self, master_weights, datamodel=None, mark=True,
- tagger=None, dtype='float32'):
- #tagger = TagFits('NUM-MFF', 'MEGARA master_weights correction')
-
- super(WeightsCorrector, self).__init__(datamodel=datamodel, dtype=dtype)
-
- self.master_weights = master_weights
- self.processes = mp.cpu_count() - 2
- self.SIZE = 4096
-
- def decompress(self):
- '''
- :param tar_name: name of the tar file
- :return: None
- '''
-
- name = self.master_weights.fileobj.name.split('.tar')[0]
-
- aux = self.master_weights.extractall(name + '/')
- return name
-
- def run(self, img):
- '''
- :param img: reduced image
- :param tar_file: tar file to be extracted
- :return: list of extracted weights
- '''
- img = img[0].data
- _logger.debug('correct from weights in image ')
-
- path = self.decompress()
-
- _logger.info('decompress done')
- _logger.info('Starting: _load_files_paralell')
- pool = mp.Pool(processes=self.processes)
- results = [pool.apply_async(_load_files_paralell,
- args=(ite, path)) for ite in
- range(self.SIZE)]
- results = [p.get() for p in results]
- # return results
-
- _logger.info('Starting: extract_w_paralell')
-
- pool2 = mp.Pool(processes=self.processes)
- extracted_w = [pool2.apply_async(extract_w_paralell,
- args=(img[:, ite], results[ite])) for
- ite in range(self.SIZE)]
- extracted_w = [p.get() for p in extracted_w]
-
- _logger.info('extracted')
-
- hdu = fits.PrimaryHDU(np.array(extracted_w).T)
-
- return fits.HDUList([hdu])
-
-
-def extract_w_paralell(img, mlist):
- '''
- :param img:
- :param mlist: one element of the csr_matrix
- :return: result of lsqr
- '''
- from scipy.sparse.linalg import lsqr
- x = lsqr(mlist, img)
- return x[0]
-
-
-def _load_files_paralell(col, path):
- '''
- :param col: name of the fits file. It is a counter
- :param path: path where *.npz are
- :return: csr_matrix
- '''
- from scipy.sparse import csr_matrix
-
- filename = '%s/%s.npz' % (path, col)
- loader = np.load(filename)
- return csr_matrix(
- (loader['data'], loader['indices'], loader['indptr']),
- shape=loader['shape'])
diff --git a/megaradrp/products/__init__.py b/megaradrp/products/__init__.py
index 2e3c68d0..82a7ca25 100644
--- a/megaradrp/products/__init__.py
+++ b/megaradrp/products/__init__.py
@@ -1,7 +1,7 @@
# FIXME: workaround
-from megaradrp.types import MasterBias
+from megaradrp.ntypes import MasterBias
from .tracemap import TraceMap, GeometricTrace
from .wavecalibration import WavelengthCalibration
diff --git a/megaradrp/products/aperture.py b/megaradrp/products/aperture.py
new file mode 100644
index 00000000..fcc8465a
--- /dev/null
+++ b/megaradrp/products/aperture.py
@@ -0,0 +1,35 @@
+#
+# Copyright 2019 Universidad Complutense de Madrid
+#
+# This file is part of Megara DRP
+#
+# SPDX-License-Identifier: GPL-3.0+
+# License-Filename: LICENSE.txt
+#
+
+"""Products of the Megara Pipeline"""
+
+
+class GeometricAperture(object):
+ def __init__(self, fibid, boxid, start, stop):
+ self.fibid = fibid
+ self.boxid = boxid
+ self.start = start
+ self.stop = stop
+
+ @property
+ def valid(self):
+ return self.is_valid()
+
+ def aper_center(self):
+ raise NotImplementedError
+
+ def is_valid(self):
+ return True
+
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__ = state
diff --git a/megaradrp/products/modelmap.py b/megaradrp/products/modelmap.py
index c49be798..56e5a381 100644
--- a/megaradrp/products/modelmap.py
+++ b/megaradrp/products/modelmap.py
@@ -1,5 +1,5 @@
#
-# Copyright 2017 Universidad Complutense de Madrid
+# Copyright 2017-2019 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -14,27 +14,28 @@
from numina.util.convertfunc import json_serial_function, convert_function
-from megaradrp.products.structured import BaseStructuredCalibration
+from .structured import BaseStructuredCalibration
+from .aperture import GeometricAperture
from .traces import to_ds9_reg as to_ds9_reg_function
-class GeometricModel(object):
+class GeometricModel(GeometricAperture):
def __init__(self, fibid, boxid, start, stop, model):
- self.fibid = fibid
- self.boxid = boxid
- self.start = start
- self.stop = stop
+ super(GeometricModel, self).__init__(fibid, boxid, start, stop)
self.model = model
@property
def valid(self):
+ return self.is_valid()
+
+ def is_valid(self):
if self.model:
return True
else:
return False
def __getstate__(self):
- state = self.__dict__.copy()
+ state = super(GeometricModel, self).__getstate__()
# del state['model']
params = state['model']['params']
@@ -47,7 +48,7 @@ def __getstate__(self):
return state
def __setstate__(self, state):
- self.__dict__ = state
+ super(GeometricModel, self).__setstate__(state)
self._set_model(state['model'])
def _set_model(self, model):
@@ -62,6 +63,9 @@ def polynomial(self):
# FIXME: this is a workaround
return self.model['params']['mean']
+ def aper_center(self):
+ return self.model['params']['mean']
+
class ModelMap(BaseStructuredCalibration):
diff --git a/megaradrp/products/structured.py b/megaradrp/products/structured.py
index 63cf22ff..5c5080d9 100644
--- a/megaradrp/products/structured.py
+++ b/megaradrp/products/structured.py
@@ -14,9 +14,11 @@
import numina.core.tagexpr as tagexpr
import megaradrp.datamodel
-
+from megaradrp.datatype import MegaraDataType
class BaseStructuredCalibration(structured.BaseStructuredCalibration):
+ DATATYPE = MegaraDataType.STRUCT_PROCESSED
+
def __init__(self, instrument='unknown'):
datamodel = megaradrp.datamodel.MegaraDataModel()
super(BaseStructuredCalibration, self).__init__(instrument, datamodel)
@@ -39,3 +41,18 @@ def __getstate__(self):
st[key] = self.__dict__[key]
return st
+
+ def validate(self, obj):
+ """Validate objects with the TRACE_MAP schema"""
+ import json
+ from numina.util.jsonencoder import ExtEncoder
+ import megaradrp.validators as valid
+
+ super(BaseStructuredCalibration, self).validate(obj)
+
+ checker = valid.check_as_datatype(self.DATATYPE)
+ # We have to convert obj to a dictionary
+ # FIXME: Dumping to a string and reloading. This can be done better
+ res = json.dumps(obj.__getstate__(), cls=ExtEncoder)
+ serialized = json.loads(res)
+ return checker(serialized)
\ No newline at end of file
diff --git a/megaradrp/products/tests/test_tracemap.py b/megaradrp/products/tests/test_tracemap.py
index ad606785..f414a499 100644
--- a/megaradrp/products/tests/test_tracemap.py
+++ b/megaradrp/products/tests/test_tracemap.py
@@ -16,6 +16,7 @@
import numina.types.qc
import numina.types.structured as structured
+from megaradrp.datatype import MegaraDataType
import megaradrp.products.tracemap as tm
@@ -27,6 +28,8 @@ def create_test_tracemap():
data.tags = tags
data.uuid = uuid
data.total_fibers = 623
+ data.expected_range = [2, 4092]
+ data.ref_column = 2001
meta_info = tm.TraceMap.create_meta_info()
meta_info['instrument_name'] = instrument
meta_info['creation_date'] = data.meta_info['creation_date']
@@ -41,7 +44,8 @@ def create_test_tracemap():
type_fqn='megaradrp.products.tracemap.TraceMap',
boxes_positions=[],
type=data.name(),
- ref_column=2000,
+ ref_column=2001,
+ expected_range=[2, 4092],
global_offset=[0.0],
quality_control=numina.types.qc.QC.UNKNOWN
)
@@ -166,6 +170,7 @@ def test_load_traceMap():
assert (my_open_file.tags == state['tags'])
assert (my_open_file.uuid == state['uuid'])
assert (my_open_file.contents == state['contents'])
+ assert (my_open_file.DATATYPE == MegaraDataType.TRACE_MAP)
def test_dump_traceMap(benchmark=None):
diff --git a/megaradrp/products/tracemap.py b/megaradrp/products/tracemap.py
index 51ea077d..d7db9797 100644
--- a/megaradrp/products/tracemap.py
+++ b/megaradrp/products/tracemap.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2017 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -9,19 +9,18 @@
"""Products of the Megara Pipeline"""
-import numpy
import numpy.polynomial.polynomial as nppol
+from megaradrp.datatype import MegaraDataType
from .structured import BaseStructuredCalibration
+from .aperture import GeometricAperture
from .traces import to_ds9_reg as to_ds9_reg_function
-class GeometricTrace(object):
+class GeometricTrace(GeometricAperture):
+ """Representation of fiber trace on the image"""
def __init__(self, fibid, boxid, start, stop, fitparms=None):
- self.fibid = fibid
- self.boxid = boxid
- self.start = start
- self.stop = stop
+ super(GeometricTrace, self).__init__(fibid, boxid, start, stop)
self.fitparms = fitparms if fitparms is not None else []
self.polynomial = None
# Update polynomial
@@ -29,18 +28,22 @@ def __init__(self, fibid, boxid, start, stop, fitparms=None):
@property
def valid(self):
+ return self.is_valid()
+
+ def is_valid(self):
if self.fitparms:
return True
else:
return False
def __getstate__(self):
- state = self.__dict__.copy()
+ state = super(GeometricTrace, self).__getstate__()
del state['polynomial']
return state
def __setstate__(self, state):
- self.__dict__ = state
+ super(GeometricTrace, self).__setstate__(state)
+
self._set_polynomial(state['fitparms'])
def _set_polynomial(self, fitparms):
@@ -49,18 +52,22 @@ def _set_polynomial(self, fitparms):
else:
self.polynomial = nppol.Polynomial([0.0])
+ def aper_center(self):
+ return self.polynomial
-class TraceMap(BaseStructuredCalibration):
+class TraceMap(BaseStructuredCalibration):
+ """Trace map calibration product"""
+ DATATYPE = MegaraDataType.TRACE_MAP
__tags__ = ['insmode', 'vph']
- """Trace map calibration product"""
- def __init__(self, instrument='unknown'):
+ def __init__(self, instrument='MEGARA'):
super(TraceMap, self).__init__(instrument)
self.contents = []
self.boxes_positions = []
self.global_offset = nppol.Polynomial([0.0])
self.ref_column = 2000
+ self.expected_range = [4, 4092]
#
def __getstate__(self):
@@ -69,14 +76,17 @@ def __getstate__(self):
st['boxes_positions'] = self.boxes_positions
st['global_offset'] = self.global_offset.coef
st['ref_column'] = self.ref_column
+ st['expected_range'] = self.expected_range
return st
def __setstate__(self, state):
super(TraceMap, self).__setstate__(state)
self.contents = [GeometricTrace(**trace) for trace in state['contents']]
+ # fibers in missing fibers and error_fitting are invalid
self.boxes_positions = state.get('boxes_positions', [])
self.global_offset = nppol.Polynomial(state.get('global_offset', [0.0]))
self.ref_column = state.get('ref_column', 2000)
+ self.expected_range = state.get('expected_range', [4, 4092])
return self
def to_ds9_reg(self, ds9reg, rawimage=False, numpix=100, fibid_at=0):
diff --git a/megaradrp/recipes/auxiliary/acquisitionlcb.py b/megaradrp/recipes/auxiliary/acquisitionlcb.py
index fe854648..c7212bd4 100644
--- a/megaradrp/recipes/auxiliary/acquisitionlcb.py
+++ b/megaradrp/recipes/auxiliary/acquisitionlcb.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -19,8 +19,9 @@
from numina.core.validator import range_validator
from numina.constants import FWHM_G
+from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.recipes.scientific.base import ImageRecipe
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import ProcessedRSS, ProcessedImage
class AcquireLCBRecipe(ImageRecipe):
@@ -29,7 +30,7 @@ class AcquireLCBRecipe(ImageRecipe):
This recipe processes a set of acquisition images
obtained in **LCB Acquisition** mode and returns
the offset and rotation required to center the
- fiducial object in its reference positions.
+ fiduciary object in its reference positions.
See Also
--------
@@ -48,7 +49,7 @@ class AcquireLCBRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -58,9 +59,9 @@ class AcquireLCBRecipe(ImageRecipe):
in the fibers configuration. The RSS with sky subtracted is returned ini the
field `final_rss` of the recipe result.
- Then, the centroid of the fiducial object nearest to the center of the field
+ Then, the centroid of the fiduciary object nearest to the center of the field
is computed. The offset needed to center
- the fiducial object in the center of the LCB is returned.
+ the fiduciary object in the center of the LCB is returned.
"""
@@ -74,7 +75,7 @@ class AcquireLCBRecipe(ImageRecipe):
nelem=2
)
- reduced_image = Result(ProcessedFrame)
+ reduced_image = Result(ProcessedImage)
reduced_rss = Result(ProcessedRSS)
final_rss = Result(ProcessedRSS)
offset = Result(list)
@@ -93,16 +94,19 @@ def run(self, rinput):
isb = rinput.ignored_sky_bundles
if isb:
self.logger.info('sky bundles ignored: %s', isb)
- final, origin, sky = self.run_sky_subtraction(reduced1d,
- ignored_sky_bundles=isb)
+ final, origin, sky = self.run_sky_subtraction(
+ reduced1d,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=isb
+ )
self.logger.info('end sky subtraction')
else:
final = reduced1d
origin = final
sky = final
- fiberconf = self.datamodel.get_fiberconf(final)
- self.logger.debug("LCB configuration is %s", fiberconf.conf_id)
+ fp_conf = FocalPlaneConf.from_img(final)
+ self.logger.debug("LCB configuration is %s", fp_conf.conf_id)
rssdata = final[0].data
@@ -118,13 +122,13 @@ def run(self, rinput):
flux_per_cell_all = rssdata[:, cut1:cut2].mean(axis=1)
max_cell = flux_per_cell_all.argmax() + 1
- max_fiber_ = fiberconf.fibers[max_cell]
+ max_fiber_ = fp_conf.fibers[max_cell]
self.logger.info("maximum flux in spaxel %d -- %s", max_cell, max_fiber_.name)
# Extend points with the brightest spaxel
points.append((max_fiber_.x, max_fiber_.y))
- fibers = fiberconf.conected_fibers(valid_only=True)
+ fibers = fp_conf.connected_fibers(valid_only=True)
grid_coords = []
for fiber in fibers:
diff --git a/megaradrp/recipes/auxiliary/acquisitionmos.py b/megaradrp/recipes/auxiliary/acquisitionmos.py
index f586bf54..8fd67246 100644
--- a/megaradrp/recipes/auxiliary/acquisitionmos.py
+++ b/megaradrp/recipes/auxiliary/acquisitionmos.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -17,9 +17,9 @@
from numina.core import Result, Parameter
from numina.core.qc import QC
-from megaradrp.datamodel import TargetType
+from megaradrp.instrument.focalplane import TargetType, FocalPlaneConf
from megaradrp.recipes.scientific.base import ImageRecipe
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame
from megaradrp.utils import add_collapsed_mos_extension
@@ -49,7 +49,7 @@ class AcquireMOSRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -90,19 +90,22 @@ def run(self, rinput):
isb = rinput.ignored_sky_bundles
if isb:
self.logger.info('sky bundles ignored: %s', isb)
- final, origin, sky = self.run_sky_subtraction(reduced1d,
- ignored_sky_bundles=isb)
+ final, origin, sky = self.run_sky_subtraction(
+ reduced1d,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=isb
+ )
self.logger.info('end sky subtraction')
else:
final = reduced1d
origin = final
sky = final
- fiberconf = self.datamodel.get_fiberconf(final)
+ fp_conf = FocalPlaneConf.from_img(final)
cut1, cut2 = rinput.extraction_region
- self.logger.debug("MOS configuration is %s", fiberconf.conf_id)
+ self.logger.debug("MOS configuration is %s", fp_conf.conf_id)
rssdata = final[0].data
scale, funit = self.datamodel.fiber_scale_unit(final, unit=True)
self.logger.debug('unit is %s', funit)
@@ -111,7 +114,7 @@ def run(self, rinput):
p1 = []
q1 = []
temp = []
- for key, bundle in fiberconf.bundles.items():
+ for key, bundle in fp_conf.bundles.items():
if bundle.target_type == TargetType.REFERENCE:
self.logger.debug("%s %s %s", key, bundle.target_name, bundle.target_type)
sorted_fibers = [bundle.fibers[key] for key in sorted(bundle.fibers)]
diff --git a/megaradrp/recipes/auxiliary/focusspec.py b/megaradrp/recipes/auxiliary/focusspec.py
index a59f14eb..e8d45b3e 100644
--- a/megaradrp/recipes/auxiliary/focusspec.py
+++ b/megaradrp/recipes/auxiliary/focusspec.py
@@ -1,5 +1,5 @@
#
-# Copyright 2016-2019 Universidad Complutense de Madrid
+# Copyright 2016-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -29,7 +29,7 @@
from numina.array.peaks.peakdet import find_peaks_indexes, refine_peaks
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import FocusWavelength, ProcessedFrame
+from megaradrp.ntypes import FocusWavelength, ProcessedFrame
import megaradrp.requirements as reqs
from megaradrp.processing.combine import basic_processing_with_combination_frames
from megaradrp.processing.aperture import ApertureExtractor
@@ -88,7 +88,7 @@ class FocusSpectrographRecipe(MegaraBaseRecipe):
master_bias = reqs.MasterBiasRequirement()
master_dark = reqs.MasterDarkRequirement()
master_bpm = reqs.MasterBPMRequirement()
- master_traces = reqs.MasterAperturesRequirement()
+ master_apertures = reqs.MasterAperturesRequirement(alias='master_traces')
extraction_offset = Parameter([0.0], 'Offset traces for extraction', accept_scalar=True)
master_wlcalib = reqs.WavelengthCalibrationRequirement()
@@ -163,7 +163,7 @@ def run(self, rinput):
try:
img = basic_processing_with_combination_frames(frames, flow, method=combine.median, errors=False)
calibrator_aper = ApertureExtractor(
- rinput.master_traces,
+ rinput.master_apertures,
self.datamodel,
offset=rinput.extraction_offset
)
@@ -173,7 +173,7 @@ def run(self, rinput):
self.save_intermediate_img(img1d, 'focus1d-%s.fits' % (focus,))
self.logger.info('find lines and compute FWHM')
- lines_rss_fwhm = self.run_on_image(img1d, rinput.master_traces,
+ lines_rss_fwhm = self.run_on_image(img1d, rinput.master_apertures,
flux_limit,
valid_traces=valid_traces,
times_sigma=rinput.tsigma
diff --git a/megaradrp/recipes/auxiliary/focustel.py b/megaradrp/recipes/auxiliary/focustel.py
index d11fe66f..91d94d6b 100644
--- a/megaradrp/recipes/auxiliary/focustel.py
+++ b/megaradrp/recipes/auxiliary/focustel.py
@@ -1,5 +1,5 @@
#
-# Copyright 2016-2018 Universidad Complutense de Madrid
+# Copyright 2016-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -17,6 +17,7 @@
from numina.core.requirements import ObservationResultRequirement
from numina.exceptions import RecipeError
+from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.recipes.scientific.base import ImageRecipe
import megaradrp.requirements as reqs
from megaradrp.processing.combine import basic_processing_with_combination_frames
@@ -59,7 +60,7 @@ class FocusTelescopeRecipe(ImageRecipe):
master_bias = reqs.MasterBiasRequirement()
master_dark = reqs.MasterDarkRequirement()
master_bpm = reqs.MasterBPMRequirement()
- master_traces = reqs.MasterAperturesRequirement()
+ master_apertures = reqs.MasterAperturesRequirement(alias='master_traces')
extraction_offset = Parameter([0.0], 'Offset traces for extraction', accept_scalar=True)
master_wlcalib = reqs.WavelengthCalibrationRequirement()
position = Requirement(list, "Position of the reference object", default=(0, 0))
@@ -105,7 +106,7 @@ def run(self, rinput):
# 1D, extraction, Wl calibration, Flat fielding
_, img1d = self.run_reduction_1d(
img,
- rinput.master_traces,
+ rinput.master_apertures,
rinput.master_wlcalib,
rinput.master_fiberflat,
offset=rinput.extraction_offset
@@ -143,13 +144,13 @@ def run_on_image(self, img, coors):
from scipy.spatial import KDTree
- fiberconf = self.datamodel.get_fiberconf(img)
- self.logger.debug("LCB configuration is %s", fiberconf.conf_id)
+ fp_conf = FocalPlaneConf.from_img(img)
+ self.logger.debug("LCB configuration is %s", fp_conf.conf_id)
rssdata = img[0].data
cut1 = 1000
cut2 = 3000
points = [(0, 0)] # Center of fiber 313
- fibers = fiberconf.conected_fibers(valid_only=True)
+ fibers = fp_conf.connected_fibers(valid_only=True)
grid_coords = []
for fiber in fibers:
grid_coords.append((fiber.x, fiber.y))
diff --git a/megaradrp/recipes/calibration/arc.py b/megaradrp/recipes/calibration/arc.py
index 78b6e02a..a443df77 100644
--- a/megaradrp/recipes/calibration/arc.py
+++ b/megaradrp/recipes/calibration/arc.py
@@ -37,7 +37,7 @@
from numina.util.flow import SerialFlow
from numina.array import combine
-from megaradrp.types import ProcessedFrame, ProcessedRSS
+from megaradrp.ntypes import ProcessedFrame, ProcessedRSS
from megaradrp.processing.combine import basic_processing_with_combination
from megaradrp.processing.aperture import ApertureExtractor
from megaradrp.processing.fiberflat import Splitter, FlipLR
@@ -388,8 +388,8 @@ def calibrate_wl(self, rss, lines_catalog, poldeg, tracemap, nlines,
peak, fwhm = self.calc_fwhm_of_line(row, peak_int,
lwidth=20)
except Exception as error:
- self.logger.error("%s", error)
- self.logger.error('error in feature %s', feature)
+ self.logger.warning("%s", error)
+ self.logger.warning('error in feature %s', feature)
# workaround
peak = row[peak_int]
fwhm = 0.0
@@ -401,9 +401,8 @@ def calibrate_wl(self, rss, lines_catalog, poldeg, tracemap, nlines,
initial_data_wlcalib.contents.append(new)
except (ValueError, TypeError, IndexError) as error:
- self.logger.error("%s", error)
- self.logger.error('error in row %d, fibid %d', idx, fibid)
- traceback.print_exc()
+ self.logger.warning("%s", error)
+ self.logger.warning('problem in row %d, fibid %d', idx, fibid)
initial_data_wlcalib.error_fitting.append(fibid)
error_contador += 1
@@ -453,8 +452,7 @@ def calibrate_wl(self, rss, lines_catalog, poldeg, tracemap, nlines,
# previous results stored in data_wlcalib
list_poly_vs_fiber = self.model_coeff_vs_fiber(
initial_data_wlcalib, poldeg_initial,
- times_sigma_reject=5,
- debugplot=0)
+ times_sigma_reject=5)
# recompute data_wlcalib from scratch
missing_fib = 0
error_contador = 0
@@ -639,8 +637,7 @@ def generate_fwhm_image(self, solutions):
return (final_image)
def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
- times_sigma_reject=5,
- debugplot=0):
+ times_sigma_reject=5):
"""Model polynomial coefficients vs. fiber number.
For each polynomial coefficient, a smooth polynomial dependence
@@ -648,6 +645,15 @@ def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
fibers which coefficients depart from that smooth variation.
"""
+ if self.intermediate_results:
+ from numina.array.display.matplotlib_qt import plt
+ from matplotlib.backends.backend_pdf import PdfPages
+ pdf = PdfPages('wavecal_refine_iter1.pdf')
+ local_debugplot = 11
+ else:
+ pdf = None
+ local_debugplot = 0
+
list_fibid = []
list_coeffs = []
for item in (data_wlcalib.contents):
@@ -658,6 +664,9 @@ def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
list_coeffs.append(item.solution.coeff)
# determine bad fits from each independent polynomial coefficient
+ # (bad fits correspond to unexpected coefficient values for any of
+ # the coefficients; i.e., the number of bad fits increases as we
+ # examine different coefficients)
poldeg_coeff_vs_fiber = 5
reject_all = None # avoid PyCharm warning
fibid = numpy.array(list_fibid)
@@ -669,7 +678,7 @@ def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
deg=poldeg_coeff_vs_fiber,
times_sigma_reject=times_sigma_reject,
)
- if abs(debugplot) % 10 != 0:
+ if pdf is not None:
polfit_residuals(
x=fibid,
y=coeff,
@@ -678,17 +687,22 @@ def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
xlabel='fibid',
ylabel='coeff a_' + str(i),
title='Identifying bad fits',
- debugplot=debugplot
+ show=False,
+ debugplot=local_debugplot
)
+ pdf.savefig()
+ plt.close()
+
if i == 0:
+ # initialize bad fits
reject_all = numpy.copy(reject)
- if abs(debugplot) >= 10:
- print('coeff a_' + str(i) + ': nreject=', sum(reject_all))
else:
# add new bad fits
reject_all = numpy.logical_or(reject_all, reject)
- if abs(debugplot) >= 10:
- print('coeff a_' + str(i) + ': nreject=', sum(reject_all))
+ dumlabel = 'coeff a_' + str(i) + ': nreject=' + \
+ str(sum(reject_all))
+ self.logger.info(dumlabel)
+ self.logger.info(fibid[reject_all])
# determine new fits excluding all fibers with bad fits
list_poly_vs_fiber = []
@@ -702,11 +716,19 @@ def model_coeff_vs_fiber(self, data_wlcalib, poldeg,
xlabel='fibid',
ylabel='coeff a_' + str(i),
title='Computing filtered fits',
- debugplot=debugplot
+ show=False,
+ debugplot=local_debugplot
)
+ if pdf is not None:
+ pdf.savefig()
+ plt.close()
list_poly_vs_fiber.append(poly)
- if abs(debugplot) >= 10:
- print("list_poly_vs_fiber:\n", list_poly_vs_fiber)
+ self.logger.info("list_poly_vs_fiber:")
+ for i in range(poldeg + 1):
+ self.logger.info(list_poly_vs_fiber[i])
+
+ if pdf is not None:
+ pdf.close()
return list_poly_vs_fiber
diff --git a/megaradrp/recipes/calibration/base.py b/megaradrp/recipes/calibration/base.py
index a8f5423c..11ee8c6e 100644
--- a/megaradrp/recipes/calibration/base.py
+++ b/megaradrp/recipes/calibration/base.py
@@ -17,7 +17,7 @@
from megaradrp.core.recipe import MegaraBaseRecipe
from megaradrp.requirements import MasterBiasRequirement
-from megaradrp.types import MasterFiberFlat
+from megaradrp.ntypes import MasterFiberFlat
_logger = logging.getLogger('numina.recipes.megara')
diff --git a/megaradrp/recipes/calibration/bias.py b/megaradrp/recipes/calibration/bias.py
index 33dddb02..c29e2169 100644
--- a/megaradrp/recipes/calibration/bias.py
+++ b/megaradrp/recipes/calibration/bias.py
@@ -7,12 +7,12 @@
# License-Filename: LICENSE.txt
#
-from numina.core import Result
+from numina.core import Result, Parameter
from numina.array import combine
from megaradrp.processing.combine import basic_processing_with_combination
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import MasterBias
+from megaradrp.ntypes import MasterBias
from megaradrp.requirements import MasterBPMRequirement
@@ -34,6 +34,16 @@ class BiasRecipe(MegaraBaseRecipe):
megaradrp.types.MasterBias: description of the MasterBias product
"""
+ method = Parameter(
+ 'median',
+ description='Combination method',
+ choices=['mean', 'median', 'sigmaclip']
+ )
+ method_kwargs = Parameter(
+ dict(),
+ description='Arguments for combination method',
+ optional=True
+ )
master_bpm = MasterBPMRequirement()
master_bias = Result(MasterBias)
@@ -56,7 +66,15 @@ def run(self, rinput):
errors = False
if not errors:
self.logger.info('not computing errors')
- hdulist = basic_processing_with_combination(rinput, flow, method=combine.median, errors=errors)
+
+ fmethod = getattr(combine, rinput.method)
+
+ hdulist = basic_processing_with_combination(
+ rinput, flow,
+ method=fmethod,
+ method_kwargs=rinput.method_kwargs,
+ errors=errors
+ )
hdr = hdulist[0].header
self.set_base_headers(hdr)
result = self.create_result(master_bias=hdulist)
@@ -67,4 +85,5 @@ def set_base_headers(self, hdr):
"""Set metadata in FITS headers."""
hdr = super(BiasRecipe, self).set_base_headers(hdr)
hdr['NUMTYPE'] = ('MasterBias', 'Product type')
+ hdr['IMGTYPE'] = ('MASTER_BIAS', 'Product type')
return hdr
diff --git a/megaradrp/recipes/calibration/bpm.py b/megaradrp/recipes/calibration/bpm.py
index 4e795281..abfc7c9b 100644
--- a/megaradrp/recipes/calibration/bpm.py
+++ b/megaradrp/recipes/calibration/bpm.py
@@ -20,7 +20,7 @@
from megaradrp.processing.combine import basic_processing_with_combination_frames
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import MasterBPM
+from megaradrp.ntypes import MasterBPM
import megaradrp.requirements as reqs
diff --git a/megaradrp/recipes/calibration/dark.py b/megaradrp/recipes/calibration/dark.py
index 0c4f5d4a..6127e08f 100644
--- a/megaradrp/recipes/calibration/dark.py
+++ b/megaradrp/recipes/calibration/dark.py
@@ -14,7 +14,7 @@
from megaradrp.core.recipe import MegaraBaseRecipe
from megaradrp.requirements import MasterBiasRequirement
-from megaradrp.types import MasterDark
+from megaradrp.ntypes import MasterDark
from megaradrp.processing.combine import basic_processing_with_combination
diff --git a/megaradrp/recipes/calibration/flat.py b/megaradrp/recipes/calibration/flat.py
index c96e3a4b..9a144461 100644
--- a/megaradrp/recipes/calibration/flat.py
+++ b/megaradrp/recipes/calibration/flat.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -16,10 +16,12 @@
import matplotlib.pyplot as plt
from numina.core import Result, Parameter
import numina.exceptions
+
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import MasterFiberFlat
+from megaradrp.instrument.focalplane import FocalPlaneConf
+from megaradrp.ntypes import MasterFiberFlat
import megaradrp.requirements as reqs
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame
# Flat 2D
from megaradrp.processing.combine import basic_processing_with_combination
@@ -68,7 +70,7 @@ class FiberFlatRecipe(MegaraBaseRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled accoding to the wavelength calibration in
+ `master_apertures` and resampled accoding to the wavelength calibration in
`master_wlcalib`. The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
`reduced_rss` of the recipe result.
@@ -81,11 +83,21 @@ class FiberFlatRecipe(MegaraBaseRecipe):
"""
# Requirements
+ method = Parameter(
+ 'median',
+ description='Combination method',
+ choices=['mean', 'median', 'sigmaclip']
+ )
+ method_kwargs = Parameter(
+ dict(),
+ description='Arguments for combination method',
+ optional=True
+ )
master_bias = reqs.MasterBiasRequirement()
master_dark = reqs.MasterDarkRequirement()
master_bpm = reqs.MasterBPMRequirement()
master_slitflat = reqs.MasterSlitFlatRequirement()
- master_traces = reqs.MasterAperturesRequirement()
+ master_apertures = reqs.MasterAperturesRequirement(alias='master_traces')
smoothing_window = Parameter(31, 'Window for smoothing (must be odd)',
validator=_smoothing_window_check
)
@@ -99,23 +111,25 @@ class FiberFlatRecipe(MegaraBaseRecipe):
def process_flat2d(self, rinput):
flow = self.init_filters(rinput, rinput.obresult.configuration)
- final_image = basic_processing_with_combination(rinput, flow, method=combine.median)
+ fmethod = getattr(combine, rinput.method)
+ final_image = basic_processing_with_combination(
+ rinput, flow, method=fmethod, method_kwargs=rinput.method_kwargs,
+ )
hdr = final_image[0].header
self.set_base_headers(hdr)
return final_image
- def obtain_fiber_flat(self, rss_wl, wlcalib, col1=1900, col2=2100, window=31, degree=3):
+ def obtain_fiber_flat(self, rss_wl, col1=1900, col2=2100, window=31, degree=3):
from scipy.signal import savgol_filter
from scipy.interpolate import UnivariateSpline
- # Bad fibers, join:
- bad_fibers = wlcalib.missing_fibers
- bad_fibers.extend(wlcalib.error_fitting)
- # print(bad_fibers)
+ # Bad fibers
+ fp_conf = FocalPlaneConf.from_img(rss_wl)
+ bad_fibers = fp_conf.invalid_fibers()
bad_idxs = [fibid - 1 for fibid in bad_fibers]
# print(bad_idxs)
- good_idxs_mask = numpy.ones((wlcalib.total_fibers,), dtype='bool')
+ good_idxs_mask = numpy.ones((fp_conf.nfibers,), dtype='bool')
good_idxs_mask[bad_idxs] = False
# Collapse all fiber spectrum
@@ -133,16 +147,17 @@ def obtain_fiber_flat(self, rss_wl, wlcalib, col1=1900, col2=2100, window=31, de
data_good = data0[valid_mask] / col_good_mean[:, numpy.newaxis]
data_good[numpy.isnan(data_good)] = 0.0
- # Crappy way
# This extension was created by WLcalibrator
wlmap = rss_wl['WLMAP'].data
mm = numpy.sum(wlmap, axis=0)
+ # The information is also in the keywords
+ # FIBxxxS1, FIBxxxS2
# skip 0 in divisions
mask_noinfo = mm < 1
mm[mask_noinfo] = 1
# Filter collapse to smooth it
collapse = numpy.sum(data_good, axis=0) / mm
- # Smooting works bad very near the border (overshooting)
+ # Smoothing works bad very near the border (overshooting)
collapse_smooth = savgol_filter(collapse, window, degree)
collapse_smooth[mask_noinfo] = 1.0
@@ -197,7 +212,7 @@ def run(self, rinput):
self.save_intermediate_img(img, 'reduced_image.fits')
splitter1 = Splitter()
calibrator_aper = ApertureExtractor(
- rinput.master_traces,
+ rinput.master_apertures,
self.datamodel,
offset=rinput.extraction_offset
)
@@ -218,7 +233,7 @@ def run(self, rinput):
# Obtain flat field
self.logger.info('Normalize flat field')
- rss_wl2 = self.obtain_fiber_flat(rss_wl, rinput.master_wlcalib, window=rinput.smoothing_window)
+ rss_wl2 = self.obtain_fiber_flat(rss_wl, window=rinput.smoothing_window)
rss_wl2[0].header = self.set_base_headers(rss_wl2[0].header)
result = self.create_result(
master_fiberflat=rss_wl2,
diff --git a/megaradrp/recipes/calibration/lcbstdstar.py b/megaradrp/recipes/calibration/lcbstdstar.py
index cebd50d6..952a3aa1 100644
--- a/megaradrp/recipes/calibration/lcbstdstar.py
+++ b/megaradrp/recipes/calibration/lcbstdstar.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -10,21 +10,26 @@
"""LCB Standard Star Image Recipe for Megara"""
-from scipy.interpolate import interp1d
+from astropy import constants as const
import astropy.io.fits as fits
import astropy.units as u
-from astropy import constants as const
+import astropy.wcs
+
+from scipy.interpolate import interp1d
+from numina.array.numsplines import AdaptiveLSQUnivariateSpline
from numina.core import Result, Parameter
from numina.core.requirements import Requirement
from numina.core.validator import range_validator
from numina.types.array import ArrayType
+from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.processing.extractobj import extract_star, generate_sensitivity
+from megaradrp.processing.extractobj import mix_values, compute_broadening
from megaradrp.recipes.scientific.base import ImageRecipe
-from megaradrp.types import ProcessedRSS, ProcessedFrame, ProcessedSpectrum
-from megaradrp.types import ReferenceSpectrumTable, ReferenceExtinctionTable
-from megaradrp.types import MasterSensitivity
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame, ProcessedSpectrum
+from megaradrp.ntypes import ReferenceSpectrumTable, ReferenceExtinctionTable
+from megaradrp.ntypes import MasterSensitivity
class LCBStandardRecipe(ImageRecipe):
@@ -51,7 +56,7 @@ class LCBStandardRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -71,6 +76,16 @@ class LCBStandardRecipe(ImageRecipe):
reference_spectrum = Requirement(ReferenceSpectrumTable, "Spectrum of reference star")
reference_spectrum_velocity = Parameter(0.0, 'Radial velocity (km/s) of reference spectrum')
reference_extinction = Requirement(ReferenceExtinctionTable, "Reference extinction")
+ degrade_resolution_target = Parameter('object', 'Spectrum with higher resolution',
+ choices=['object']
+ )
+ # TODO: Implement the possibility of the reference having higher resolution
+ # degrade_resolution_target = Parameter('object', 'Spectrum with higher resolution',
+ # choices=['object', 'reference']
+ # )
+ degrade_resolution_method = Parameter('fixed', 'Method to degrade the resolution',
+ choices=['none', 'fixed', 'auto']
+ )
sigma_resolution = Parameter(20.0, 'sigma Gaussian filter to degrade resolution ')
reduced_image = Result(ProcessedFrame)
@@ -79,31 +94,34 @@ class LCBStandardRecipe(ImageRecipe):
sky_rss = Result(ProcessedRSS)
star_spectrum = Result(ProcessedSpectrum)
master_sensitivity = Result(MasterSensitivity)
- fiber_ids = Result(ArrayType)
+ sensitivity_raw = Result(ProcessedSpectrum)
+ fiber_ids = Result(ArrayType(fmt='%d'))
+ sigma = Result(float)
+
+ def set_base_headers(self, hdr):
+ """Set metadata in FITS headers."""
+ hdr = super(LCBStandardRecipe, self).set_base_headers(hdr)
+ hdr['NUMTYPE'] = ('MASTER_SENSITIVITY', 'Product type')
+ hdr['IMGTYPE'] = ('MASTER_SENSITIVITY', 'Product type')
+ return hdr
def run(self, rinput):
self.logger.info('starting LCBStandardRecipe reduction')
# Create InstrumentModel
- ins1 = rinput.obresult.configuration
+ # ins1 = rinput.obresult.configuration
#
reduced2d, rss_data = super(LCBStandardRecipe, self).base_run(rinput)
- tags = rinput.obresult.tags
- #print(ins1.get('detector.scan'))
- #print(ins1.get('pseudoslit.boxes', **tags))
- #print(ins1.get('pseudoslit.boxes_positions', **tags))
+ # tags = rinput.obresult.tags
ins2 = rinput.obresult.profile
- #print(ins2.is_configured)
ins2.configure_with_image(rss_data)
- #print(ins2.is_configured)
- #print(ins2.get_property('detector.scan'))
- #print(ins2.get_property('pseudoslit.boxes'))
- #print(ins2.get_property('pseudoslit.boxes_positions'))
- print(tags)
- print(ins2.children['pseudoslit']._internal_state)
self.logger.info('start sky subtraction')
- final, origin, sky = self.run_sky_subtraction(rss_data, rinput.ignored_sky_bundles)
+ final, origin, sky = self.run_sky_subtraction(
+ rss_data,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=rinput.ignored_sky_bundles
+ )
self.logger.info('end sky subtraction')
# 1 + 6 for first ring
@@ -115,9 +133,9 @@ def run(self, rinput):
npoints = 1 + 3 * rinput.nrings * (rinput.nrings +1)
self.logger.debug('adding %d fibers', npoints)
- fiberconf = self.datamodel.get_fiberconf(final)
+ fp_conf = FocalPlaneConf.from_img(final)
spectra_pack = extract_star(final, rinput.position, npoints,
- fiberconf, logger=self.logger)
+ fp_conf, logger=self.logger)
spectrum, colids, wl_cover1, wl_cover2 = spectra_pack
star_spectrum = fits.PrimaryHDU(spectrum, header=final[0].header)
@@ -131,8 +149,46 @@ def run(self, rinput):
rinput.reference_extinction[:, 1])
fiber_ids = [colid + 1 for colid in colids]
- sigma = rinput.sigma_resolution
- sens = generate_sensitivity(final, spectrum, star_interp, extinc_interp, wl_cover1, wl_cover2, sigma)
+
+ wcsl = astropy.wcs.WCS(final[0].header)
+ wl_aa, response_m, response_r = mix_values(wcsl, spectrum, star_interp)
+ if rinput.degrade_resolution_method == 'none':
+ sigma = 0
+ self.logger.info('no broadening')
+ elif rinput.degrade_resolution_method == 'fixed':
+ sigma = rinput.sigma_resolution
+ self.logger.info('fixed sigma=%3.0f', sigma)
+ elif rinput.degrade_resolution_method == 'auto':
+ self.logger.info('compute auto broadening')
+ offset_broad, sigma_broad = compute_broadening(
+ response_r.copy(), response_m.copy(), sigmalist=range(1, 101),
+ remove_mean=False, frac_cosbell=0.10, zero_padding=50,
+ fminmax=(0.003, 0.3), naround_zero=25, nfit_peak=21
+ )
+ sigma = sigma_broad
+ self.logger.info('computed sigma=%3.0f', sigma)
+ else:
+ msg = "'degrade_resolution_method' has value {}".format(rinput.degrade_resolution_method)
+ raise ValueError(msg)
+
+ sens_raw = generate_sensitivity(final, spectrum, star_interp, extinc_interp, wl_cover1, wl_cover2, sigma)
+
+ # Compute smoothed version
+ self.logger.info('compute smoothed sensitivity')
+
+ sens = sens_raw.copy()
+ i_knots = 3
+ self.logger.debug('using sdaptive spline with t=%d interior knots', i_knots)
+ spl = AdaptiveLSQUnivariateSpline(x=wl_aa.value, y=sens_raw.data, t=i_knots)
+ sens.data = spl(wl_aa.value)
+
+ if self.intermediate_results:
+ import matplotlib.pyplot as plt
+ plt.plot(wl_aa, sens_raw.data, 'b')
+ plt.plot(wl_aa, sens.data, 'r')
+ plt.savefig('smoothed.png')
+ plt.close()
+
self.logger.info('end LCBStandardRecipe reduction')
return self.create_result(
@@ -142,5 +198,7 @@ def run(self, rinput):
sky_rss=sky,
star_spectrum=star_spectrum,
master_sensitivity=sens,
- fiber_ids=fiber_ids
+ sensitivity_raw=sens_raw,
+ fiber_ids=fiber_ids,
+ sigma=sigma
)
diff --git a/megaradrp/recipes/calibration/modelmap.py b/megaradrp/recipes/calibration/modelmap.py
index 4b24aecc..2c4e6c4d 100644
--- a/megaradrp/recipes/calibration/modelmap.py
+++ b/megaradrp/recipes/calibration/modelmap.py
@@ -1,5 +1,5 @@
#
-# Copyright 2015-2019 Universidad Complutense de Madrid
+# Copyright 2015-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -25,10 +25,11 @@
from numina.modeling.gaussbox import GaussBox, gauss_box_model
from numina.frame.utils import copy_img
+from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.products.modelmap import ModelMap
from megaradrp.products.modelmap import GeometricModel
from megaradrp.processing.aperture import ApertureExtractor
-from megaradrp.types import ProcessedImage, ProcessedRSS
+from megaradrp.ntypes import ProcessedImage, ProcessedRSS
from megaradrp.processing.combine import basic_processing_with_combination
from megaradrp.core.recipe import MegaraBaseRecipe
import megaradrp.requirements as reqs
@@ -138,8 +139,8 @@ def run(self, rinput):
self.logger.debug('update metadata in model')
model_map.update_metadata(self)
- fiberconf = self.datamodel.get_fiberconf(reduced)
- model_map.total_fibers = fiberconf.nfibers
+ fp_conf = FocalPlaneConf.from_img(reduced)
+ model_map.total_fibers = fp_conf.nfibers
model_map.missing_fibers = rinput.master_traces.missing_fibers
model_map.tags = self.extract_tags_from_ref(reduced, model_map.tag_names(), base=obresult.labels)
# model_map.boxes_positions = box_borders
diff --git a/megaradrp/recipes/calibration/mosstdstar.py b/megaradrp/recipes/calibration/mosstdstar.py
index fb9a43b3..fc6dcc0c 100644
--- a/megaradrp/recipes/calibration/mosstdstar.py
+++ b/megaradrp/recipes/calibration/mosstdstar.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -13,17 +13,21 @@
from scipy.interpolate import interp1d
import astropy.io.fits as fits
import astropy.units as u
+import astropy.wcs
from astropy import constants as const
+from numina.array.numsplines import AdaptiveLSQUnivariateSpline
from numina.core import Result, Parameter
from numina.core.requirements import Requirement
from numina.types.array import ArrayType
+from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.processing.extractobj import extract_star, generate_sensitivity
+from megaradrp.processing.extractobj import mix_values, compute_broadening
from megaradrp.recipes.scientific.base import ImageRecipe
-from megaradrp.types import ProcessedRSS, ProcessedFrame, ProcessedSpectrum
-from megaradrp.types import ReferenceSpectrumTable, ReferenceExtinctionTable
-from megaradrp.types import MasterSensitivity
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame, ProcessedSpectrum
+from megaradrp.ntypes import ReferenceSpectrumTable, ReferenceExtinctionTable
+from megaradrp.ntypes import MasterSensitivity
class MOSStandardRecipe(ImageRecipe):
@@ -50,7 +54,7 @@ class MOSStandardRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -69,6 +73,16 @@ class MOSStandardRecipe(ImageRecipe):
reference_spectrum = Requirement(ReferenceSpectrumTable, "Spectrum of reference star")
reference_spectrum_velocity = Parameter(0.0, 'Radial velocity of reference spectrum')
reference_extinction = Requirement(ReferenceExtinctionTable, "Reference extinction")
+ degrade_resolution_target = Parameter('object', 'Spectrum with higher resolution',
+ choices=['object']
+ )
+ # TODO: Implement the posibility of the reference having higher resolution
+ # degrade_resolution_target = Parameter('object', 'Spectrum with higher resolution',
+ # choices=['object', 'reference']
+ # )
+ degrade_resolution_method = Parameter('fixed', 'Method to degrade the resolution',
+ choices=['none', 'fixed', 'auto']
+ )
sigma_resolution = Parameter(20.0, 'sigma Gaussian filter to degrade resolution ')
reduced_image = Result(ProcessedFrame)
@@ -77,7 +91,9 @@ class MOSStandardRecipe(ImageRecipe):
sky_rss = Result(ProcessedRSS)
star_spectrum = Result(ProcessedSpectrum)
master_sensitivity = Result(MasterSensitivity)
+ sensitivity_raw = Result(ProcessedSpectrum)
fiber_ids = Result(ArrayType)
+ sigma = Result(float)
def run(self, rinput):
@@ -86,7 +102,11 @@ def run(self, rinput):
reduced2d, rss_data = super(MOSStandardRecipe, self).base_run(rinput)
self.logger.info('start sky subtraction')
- final, origin, sky = self.run_sky_subtraction(rss_data, rinput.ignored_sky_bundles)
+ final, origin, sky = self.run_sky_subtraction(
+ rss_data,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=rinput.ignored_sky_bundles
+ )
self.logger.info('end sky subtraction')
# 1 + 6 for first ring
@@ -98,11 +118,11 @@ def run(self, rinput):
npoints = 7
self.logger.debug('adding %d fibers', npoints)
- fiberconf = self.datamodel.get_fiberconf(final)
+ fp_conf = FocalPlaneConf.from_img(final)
spectra_pack = extract_star(final, rinput.position, npoints,
- fiberconf, logger=self.logger)
+ fp_conf, logger=self.logger)
- spectrum, colids, cover1, cover2 = spectra_pack
+ spectrum, colids, wl_cover1, wl_cover2 = spectra_pack
star_spectrum = fits.PrimaryHDU(spectrum, header=final[0].header)
rad_vel = rinput.reference_spectrum_velocity * u.km / u.s
@@ -114,8 +134,46 @@ def run(self, rinput):
rinput.reference_extinction[:, 1])
fiber_ids = [colid + 1 for colid in colids]
- sigma = rinput.sigma_resolution
- sens = generate_sensitivity(final, spectrum, star_interp, extinc_interp, cover1, cover2, sigma)
+
+ wcsl = astropy.wcs.WCS(final[0].header)
+ wl_aa, response_m, response_r = mix_values(wcsl, spectrum, star_interp)
+ if rinput.degrade_resolution_method == 'none':
+ sigma = 0
+ self.logger.info('no broadening')
+ elif rinput.degrade_resolution_method == 'fixed':
+ sigma = rinput.sigma_resolution
+ self.logger.info('fixed sigma=%3.0f', sigma)
+ elif rinput.degrade_resolution_method == 'auto':
+ self.logger.info('compute auto broadening')
+ offset_broad, sigma_broad = compute_broadening(
+ response_r.copy(), response_m.copy(), sigmalist=range(1, 101),
+ remove_mean=False, frac_cosbell=0.10, zero_padding=50,
+ fminmax=(0.003, 0.3), naround_zero=25, nfit_peak=21
+ )
+ sigma = sigma_broad
+ self.logger.info('computed sigma=%3.0f', sigma)
+ else:
+ msg = "'degrade_resolution_method' has value {}".format(rinput.degrade_resolution_method)
+ raise ValueError(msg)
+
+ sens_raw = generate_sensitivity(final, spectrum, star_interp, extinc_interp, wl_cover1, wl_cover2, sigma)
+
+ # Compute smoothed version
+ self.logger.info('compute smoothed sensitivity')
+
+ sens = sens_raw.copy()
+ i_knots = 3
+ self.logger.debug('using sdaptive spline with t=%d interior knots', i_knots)
+ spl = AdaptiveLSQUnivariateSpline(x=wl_aa.value, y=sens_raw.data, t=i_knots)
+ sens.data = spl(wl_aa.value)
+
+ if self.intermediate_results:
+ import matplotlib.pyplot as plt
+ plt.plot(wl_aa, sens_raw.data, 'b')
+ plt.plot(wl_aa, sens.data, 'r')
+ plt.savefig('smoothed.png')
+ plt.close()
+
self.logger.info('end MOSStandardRecipe reduction')
return self.create_result(
@@ -125,5 +183,7 @@ def run(self, rinput):
sky_rss=sky,
star_spectrum=star_spectrum,
master_sensitivity=sens,
- fiber_ids=fiber_ids
+ sensitivity_raw=sens_raw,
+ fiber_ids=fiber_ids,
+ sigma=sigma
)
diff --git a/megaradrp/recipes/calibration/slitflat.py b/megaradrp/recipes/calibration/slitflat.py
index 0714baff..4a5cc840 100644
--- a/megaradrp/recipes/calibration/slitflat.py
+++ b/megaradrp/recipes/calibration/slitflat.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -17,10 +17,10 @@
from numina.array import combine
from numina.core import Result, Parameter
-from megaradrp.types import ProcessedFrame
+from megaradrp.ntypes import ProcessedFrame
from megaradrp.processing.combine import basic_processing_with_combination
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import MasterSlitFlat
+from megaradrp.ntypes import MasterSlitFlat
import megaradrp.requirements as reqs
import megaradrp.core.correctors as cor
@@ -42,11 +42,6 @@ class SlitFlatRecipe(MegaraBaseRecipe):
reduced_image = Result(ProcessedFrame)
master_slitflat = Result(MasterSlitFlat)
- def get_filters(self):
- return [cor.get_corrector_overscan, cor.get_corrector_trimming,
- cor.get_corrector_bpm, cor.get_corrector_bias,
- cor.get_corrector_dark, cor.get_corrector_gain]
-
def run(self, rinput):
from scipy.signal import savgol_filter
diff --git a/megaradrp/recipes/calibration/trace.py b/megaradrp/recipes/calibration/trace.py
index 19b64520..5fdfa9ea 100644
--- a/megaradrp/recipes/calibration/trace.py
+++ b/megaradrp/recipes/calibration/trace.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -12,18 +12,18 @@
from __future__ import division, print_function
import logging
-from datetime import datetime
+import warnings
import numpy
import numpy.polynomial.polynomial as nppol
from numina.array.peaks.peakdet import refine_peaks
-from numina.array.trace.traces import trace
+from numina.array.trace.traces import trace, tracing_limits
from numina.core import Result, Parameter
import matplotlib.pyplot as plt
import numina.types.qc as qc
from numina.array import combine
-import numina.core.validator
+from numina.array.wavecalib.crosscorrelation import cosinebell
from skimage.filters import threshold_otsu
from skimage.feature import peak_local_max
from scipy.ndimage.filters import minimum_filter
@@ -33,12 +33,13 @@
from megaradrp.processing.combine import basic_processing_with_combination
from megaradrp.products import TraceMap
from megaradrp.products.tracemap import GeometricTrace
-from megaradrp.types import ProcessedImage, ProcessedRSS
+from megaradrp.ntypes import ProcessedImage, ProcessedRSS
from megaradrp.core.recipe import MegaraBaseRecipe
import megaradrp.requirements as reqs
import megaradrp.products
import megaradrp.processing.fibermatch as fibermatch
from megaradrp.instrument import vph_thr
+from megaradrp.instrument.focalplane import FocalPlaneConf
class TraceMapRecipe(MegaraBaseRecipe):
@@ -104,12 +105,50 @@ class TraceMapRecipe(MegaraBaseRecipe):
def run_qc(self, recipe_input, recipe_result):
"""Run quality control checks"""
self.logger.info('start trace recipe QC')
- recipe_result.qc = qc.QC.GOOD
- recipe_result.master_traces.quality_control = qc.QC.GOOD
+
+ self.check_qc_tracemap(recipe_result.master_traces)
+
+ recipe_result.qc = recipe_result.master_traces.quality_control
+ self.logger.info('Result QC is %s', recipe_result.qc)
self.logger.info('end trace recipe QC')
return recipe_result
- @numina.core.validator.validate
+ def check_qc_tracemap(self, tracemap):
+ # check full range in all fibers
+ if tracemap.tags['insmode'] == 'LCB':
+ nfibers = 623
+ ignored_ids = [623]
+ else:
+ nfibers = 644
+ ignored_ids = [635]
+
+ cost_ids = nfibers * [0]
+
+ start_min, end_max = tracemap.expected_range
+
+ for trace in tracemap.contents:
+ idx = trace.fibid - 1
+ if trace.fibid in ignored_ids:
+ continue
+ if trace.start > start_min:
+ cost_ids[idx] += 1
+ msg = 'In fiber {}, trace start > {}'.format(trace.fibid, start_min)
+ warnings.warn(msg)
+ if trace.stop < end_max:
+ cost_ids[idx] += 1
+ msg = 'In fiber {}, trace end < {}'.format(trace.fibid, end_max)
+ warnings.warn(msg)
+
+ total = sum(cost_ids)
+
+ if total > 300:
+ tracemap.quality_control = qc.QC.BAD
+ elif total > 0:
+ tracemap.quality_control = qc.QC.PARTIAL
+ else:
+ tracemap.quality_control = qc.QC.GOOD
+
+ #@numina.core.validator.validate
def run(self, rinput):
"""Execute the recipe.
@@ -142,7 +181,7 @@ def run(self, rinput):
self.save_intermediate_img(reduced, 'reduced_image.fits')
- #insconf = obresult.configuration
+ # insconf = obresult.configuration
insconf = obresult.profile
boxes = insconf.get_property('pseudoslit.boxes')
@@ -166,22 +205,43 @@ def run(self, rinput):
self.logger.info('rel threshold not defined for %s, using %4.2f', current_vph, threshold)
final = megaradrp.products.TraceMap(instrument=obresult.instrument)
- fiberconf = self.datamodel.get_fiberconf(reduced)
- final.total_fibers = fiberconf.nfibers
+ fp_conf = FocalPlaneConf.from_img(reduced)
+ # As of 2019-10-15, headers do not contain information
+ # about inactive fibers, i.e. all have active=True
+ # inactive_fibers = fp_conf.inactive_fibers()
+ # We do it manually
+ if fp_conf.name == 'LCB':
+ inactive_fibers = [623]
+ else:
+ inactive_fibers = [635]
+
+ final.total_fibers = fp_conf.nfibers
final.tags = self.extract_tags_from_ref(reduced, final.tag_names(), base=obresult.labels)
final.boxes_positions = box_borders
final.ref_column = cstart
+ # Searching for peaks
+ # step of
+ step = 2
+ # number of the columns to add
+ hs = 3
+ # Expected range of computed traces
+ xx_start, xx_end = tracing_limits(reduced[0].shape[1], cstart, step, hs)
+ final.expected_range = [xx_start, xx_end]
+
final.update_metadata(self)
final.update_metadata_origin(obresult_meta)
# Temperature in Celsius with 2 decimals
final.tags['temp'] = round(obresult_meta['info'][0]['temp'] - 273.15, 2)
- contents, error_fitting = self.search_traces(
+ contents, error_fitting, missing_fibers = self.search_traces(
reduced,
boxes,
box_borders,
+ inactive_fibers=inactive_fibers,
cstart=cstart,
+ step=step,
+ hs=hs,
threshold=threshold,
poldeg=rinput.polynomial_degree,
debug_plot=debug_plot
@@ -189,7 +249,7 @@ def run(self, rinput):
final.contents = contents
final.error_fitting = error_fitting
-
+ final.missing_fibers = missing_fibers
# Perform extraction with own traces
calibrator_aper = ApertureExtractor(final, self.datamodel)
reduced_copy = copy_img(reduced)
@@ -206,7 +266,7 @@ def run(self, rinput):
self.logger.info('end trace spectra recipe')
return self.create_result(reduced_image=reduced,
- reduced_rss = reduced_rss,
+ reduced_rss=reduced_rss,
master_traces=final)
def obtain_boxes_from_image(self, reduced, expected, npeaks, cstart=2000):
@@ -257,10 +317,6 @@ def obtain_boxes_from_image(self, reduced, expected, npeaks, cstart=2000):
plt.scatter(expected, nidxs - expected)
plt.show()
-
- print("expected", expected)
- print("nidx", nidxs)
-
return nidxs, col
def refine_boxes_from_image(self, reduced, expected, cstart=2000, nsearch=20):
@@ -295,17 +351,17 @@ def refine_boxes_from_image(self, reduced, expected, cstart=2000, nsearch=20):
refined = expected[:]
for ibox, box in enumerate(expected):
- iargmax = final[box - nsearch: box + nsearch +1].argmax()
+ iargmax = final[box - nsearch: box + nsearch + 1].argmax()
refined[ibox] = iargmax + box - nsearch
return refined, cstart
- def search_traces(self, reduced, boxes, box_borders, cstart=2000,
- threshold=0.3, poldeg=5, step=2, debug_plot=0):
+ def search_traces(self, reduced, boxes, box_borders, inactive_fibers=None, cstart=2000,
+ threshold=0.3, poldeg=5, step=2, hs=3, debug_plot=0):
data = reduced[0].data
-
- hs = 3
+ if inactive_fibers is None:
+ inactive_fibers = []
tol = 1.63
self.logger.info('search for traces')
@@ -338,6 +394,7 @@ def search_traces(self, reduced, boxes, box_borders, cstart=2000,
contents = []
error_fitting = []
+ missing_fibers = []
self.logger.info('trace peaks from references')
for dtrace in central_peaks:
# FIXME, for traces, the background must be local
@@ -345,33 +402,46 @@ def search_traces(self, reduced, boxes, box_borders, cstart=2000,
local_trace_background = 300 # background
self.logger.debug('trace fiber %d', dtrace.fibid)
- if dtrace.start:
- mm = trace(image2, x=cstart, y=dtrace.start[1], step=step,
- hs=hs, background=local_trace_background, maxdis=maxdis)
-
- if debug_plot:
- plt.plot(mm[:, 0], mm[:, 1], '.')
- plt.savefig('trace-xy-{:03d}.png'.format(dtrace.fibid))
- plt.close()
- plt.plot(mm[:, 0], mm[:, 2], '.')
- plt.savefig('trace-xz-{:03d}.png'.format(dtrace.fibid))
- plt.close()
- if len(mm) < poldeg + 1:
- self.logger.warning('in fibid %d, only %d points to fit pol of degree %d',
- dtrace.fibid, len(mm), poldeg)
- pfit = numpy.array([])
+ conf_ok = dtrace.fibid not in inactive_fibers
+ peak_ok = dtrace.start is not None
+
+ pfit = numpy.array([])
+ start = cstart
+ stop = cstart
+
+ if peak_ok:
+ if not conf_ok:
+ error_fitting.append(dtrace.fibid)
+ self.logger.warning('found fibid %d, expected to be missing', dtrace.fibid)
else:
- pfit = nppol.polyfit(mm[:, 0], mm[:, 1], deg=poldeg)
- start = mm[0, 0]
- stop = mm[-1, 0]
+ mm = trace(image2, x=cstart, y=dtrace.start[1], step=step,
+ hs=hs, background=local_trace_background, maxdis=maxdis)
+
+ if debug_plot:
+ plt.plot(mm[:, 0], mm[:, 1], '.')
+ plt.savefig('trace-xy-{:03d}.png'.format(dtrace.fibid))
+ plt.close()
+ plt.plot(mm[:, 0], mm[:, 2], '.')
+ plt.savefig('trace-xz-{:03d}.png'.format(dtrace.fibid))
+ plt.close()
+ if len(mm) < poldeg + 1:
+ self.logger.warning('in fibid %d, only %d points to fit pol of degree %d',
+ dtrace.fibid, len(mm), poldeg)
+ pfit = numpy.array([])
+ else:
+ pfit = nppol.polyfit(mm[:, 0], mm[:, 1], deg=poldeg)
+
+ start = mm[0, 0]
+ stop = mm[-1, 0]
+ self.logger.debug('trace start %d stop %d', int(start), int(stop))
else:
- pfit = numpy.array([])
- start = cstart
- stop = cstart
- error_fitting.append(dtrace.fibid)
-
- self.logger.debug('trace start %d stop %d', int(start), int(stop))
+ if conf_ok:
+ self.logger.warning('error tracing fibid %d', dtrace.fibid)
+ error_fitting.append(dtrace.fibid)
+ else:
+ self.logger.debug('expected missing fibid %d', dtrace.fibid)
+ missing_fibers.append(dtrace.fibid)
this_trace = GeometricTrace(
fibid=dtrace.fibid,
@@ -382,7 +452,7 @@ def search_traces(self, reduced, boxes, box_borders, cstart=2000,
)
contents.append(this_trace)
- return contents, error_fitting
+ return contents, error_fitting, missing_fibers
def estimate_background(image, center, hs, boxref):
@@ -396,10 +466,6 @@ def estimate_background(image, center, hs, boxref):
return threshold_otsu(colcut)
-# FIXME: need a better place for this
-# Moved from megaradrp.trace
-
-
class FiberTraceInfo(object):
def __init__(self, fibid, boxid):
self.boxid = boxid
@@ -431,7 +497,7 @@ def init_traces(image, center, hs, boxes, box_borders, tol=1.5, threshold=0.37,
nfibers_max = nfibers - len(mfibers)
sfibers = box.get('skipcount', [])
_logger.debug('pseudoslit box: %s, id: %d', box['name'], boxid)
- _logger.debug('nfibers: %d, missing: %s',nfibers, mfibers)
+ _logger.debug('nfibers: %d, missing: %s', nfibers, mfibers)
counted_fibers += nfibers
b1 = int(box_borders[boxid])
@@ -505,8 +571,8 @@ def init_traces(image, center, hs, boxes, box_borders, tol=1.5, threshold=0.37,
borders, scale=measured_scale)
for fibid, match in fibermatch.iter_best_solution(fiber_model,
- matched_peaks,
- pos_solutions):
+ matched_peaks,
+ pos_solutions):
fti = FiberTraceInfo(fibid, boxid)
if match is not None:
fti.start = (center, peaks_y[match, 1], peaks_y[match, 2])
@@ -518,14 +584,3 @@ def init_traces(image, center, hs, boxes, box_borders, tol=1.5, threshold=0.37,
for m1, n2 in boxes_with_missing_fibers:
_logger.debug('missing %d fibers in box %s', n2, m1)
return fiber_traces
-
-
-def cosinebell(n, fraction=0.10):
- """"Cosine bell mask"""
- mask = numpy.ones(n)
- nmasked = int(fraction*n)
- for i in range(nmasked):
- f = 0.5 * (1 - numpy.cos(numpy.pi * float(i) / float(nmasked)))
- mask[i] = f
- mask[n-i-1] = f
- return mask
\ No newline at end of file
diff --git a/megaradrp/recipes/calibration/twilight.py b/megaradrp/recipes/calibration/twilight.py
index 0cc737f7..69c40109 100644
--- a/megaradrp/recipes/calibration/twilight.py
+++ b/megaradrp/recipes/calibration/twilight.py
@@ -22,8 +22,8 @@
import megaradrp.requirements as reqs
from megaradrp.core.recipe import MegaraBaseRecipe
-from megaradrp.types import MasterTwilightFlat
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import MasterTwilightFlat
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame
# Flat 2D
from megaradrp.processing.combine import basic_processing_with_combination
from numina.array import combine
@@ -58,7 +58,7 @@ class RecipeInput(recipeio.RecipeInput):
master_dark = reqs.MasterDarkRequirement()
master_bpm = reqs.MasterBPMRequirement()
master_slitflat = reqs.MasterSlitFlatRequirement()
- master_traces = reqs.MasterAperturesRequirement()
+ master_apertures = reqs.MasterAperturesRequirement(alias='master_traces')
extraction_offset = Parameter([0.0], 'Offset traces for extraction', accept_scalar=True)
normalize_region = Parameter([1900, 2100], 'Region used to normalize the flat-field',
validator=pixel_2d_check)
@@ -103,7 +103,7 @@ class TwilightFiberFlatRecipe(MegaraBaseRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -148,7 +148,7 @@ def run(self, rinput):
self.save_intermediate_img(reduced_image, 'reduced_image.fits')
reduced_rss = self.run_reduction_1d(img,
- rinput.master_traces,
+ rinput.master_apertures,
rinput.master_wlcalib,
rinput.master_fiberflat,
offset=rinput.extraction_offset
diff --git a/megaradrp/recipes/combined/extinctionstar.py b/megaradrp/recipes/combined/extinctionstar.py
index dd7e7a2c..b189f74f 100644
--- a/megaradrp/recipes/combined/extinctionstar.py
+++ b/megaradrp/recipes/combined/extinctionstar.py
@@ -13,7 +13,7 @@
from numina.core import Result, Requirement
from numina.types.datatype import ListOfType
-import megaradrp.types as typs
+import megaradrp.ntypes as typs
from megaradrp.core.recipe import MegaraBaseRecipe
diff --git a/megaradrp/recipes/combined/sensstar.py b/megaradrp/recipes/combined/sensstar.py
index 207fb3e4..3ee56d57 100644
--- a/megaradrp/recipes/combined/sensstar.py
+++ b/megaradrp/recipes/combined/sensstar.py
@@ -14,7 +14,7 @@
from numina.types.datatype import ListOfType
from numina.core.requirements import ObservationResultRequirement
-import megaradrp.types as typs
+import megaradrp.ntypes as typs
from megaradrp.core.recipe import MegaraBaseRecipe
diff --git a/megaradrp/recipes/scientific/base.py b/megaradrp/recipes/scientific/base.py
index a63104b9..8cf7cd49 100644
--- a/megaradrp/recipes/scientific/base.py
+++ b/megaradrp/recipes/scientific/base.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2018 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -25,14 +25,24 @@
from megaradrp.processing.fiberflat import FlipLR, FiberFlatCorrector
from megaradrp.processing.twilight import TwilightCorrector
from megaradrp.processing.extractobj import compute_centroid, compute_dar
-from megaradrp.processing.extractobj import subtract_sky
+from megaradrp.processing.sky import subtract_sky, subtract_sky_rss
class ImageRecipe(MegaraBaseRecipe):
"""Base Image."""
- # Requirements
+ # Requirements
obresult = ObservationResultRequirement()
+ method = Parameter(
+ 'median',
+ description='Combination method',
+ choices=['mean', 'median', 'sigmaclip']
+ )
+ method_kwargs = Parameter(
+ dict(),
+ description='Arguments for combination method',
+ optional=True
+ )
master_bias = reqs.MasterBiasRequirement()
master_dark = reqs.MasterDarkRequirement()
master_bpm = reqs.MasterBPMRequirement()
@@ -40,18 +50,26 @@ class ImageRecipe(MegaraBaseRecipe):
master_wlcalib = reqs.WavelengthCalibrationRequirement()
master_fiberflat = reqs.MasterFiberFlatRequirement()
master_twilight = reqs.MasterTwilightRequirement()
- master_traces = reqs.MasterAperturesRequirement()
+ master_apertures = reqs.MasterAperturesRequirement(alias='master_traces')
+ sky_rss = reqs.SkyRSSRequirement(optional=True)
extraction_offset = Parameter([0.0], 'Offset traces for extraction', accept_scalar=True)
ignored_sky_bundles = Parameter([], 'Ignore these sky bundles')
master_sensitivity = reqs.SensitivityRequirement()
reference_extinction = reqs.ReferenceExtinction()
relative_threshold = Parameter(0.3, 'Threshold for peak detection')
+ diffuse_light_image = reqs.DiffuseLightRequirement()
def base_run(self, rinput):
# 2D reduction
flow1 = self.init_filters(rinput, rinput.obresult.configuration)
- img = basic_processing_with_combination(rinput, flow1, method=combine.median)
+ fmethod = getattr(combine, rinput.method)
+
+ img = basic_processing_with_combination(
+ rinput, flow1,
+ method=fmethod,
+ method_kwargs=rinput.method_kwargs
+ )
hdr = img[0].header
self.set_base_headers(hdr)
@@ -61,7 +79,7 @@ def base_run(self, rinput):
# 1D, extraction, Wl calibration, Flat fielding
reduced_rss = self.run_reduction_1d(img,
- rinput.master_traces, rinput.master_wlcalib,
+ rinput.master_apertures, rinput.master_wlcalib,
rinput.master_fiberflat, rinput.master_twilight,
offset=rinput.extraction_offset
)
@@ -82,22 +100,31 @@ def run_reduction_1d(self, img, tracemap, wlcalib, fiberflat, twflat=None, offse
flow2 = SerialFlow(correctors)
- reduced_rss = flow2(img)
+ reduced_rss = flow2(img)
return reduced_rss
- def run_sky_subtraction(self, img, ignored_sky_bundles=None):
- return subtract_sky(img,
- self.datamodel,
- ignored_sky_bundles=ignored_sky_bundles,
- logger=self.logger
- )
-
def compute_dar(self, img):
import numpy.polynomial.polynomial as pol
- wl, xdar, ydar = compute_dar(img, self.datamodel, logger=self.logger)
+ wl, xdar, ydar = compute_dar(img, logger=self.logger)
print('DAR, x:', pol.polyfit(wl, xdar, deg=3))
print('DAR: y:', pol.polyfit(wl, ydar, deg=3))
def centroid(self, rssdata, fiberconf, c1, c2, point):
return compute_centroid(rssdata, fiberconf, c1, c2, point, logger=self.logger)
+
+ def run_sky_subtraction(self, img, sky_rss=None, ignored_sky_bundles=None):
+
+ if sky_rss is None:
+ self.logger.info('compute sky from SKY bundles')
+ if ignored_sky_bundles:
+ self.logger.info('sky bundles ignored: %s', ignored_sky_bundles)
+ return subtract_sky(img,
+ ignored_sky_bundles=ignored_sky_bundles,
+ logger=self.logger
+ )
+ else:
+ self.logger.info('use sky RSS image')
+ return subtract_sky_rss(img, sky_img=sky_rss,
+ logger=self.logger
+ )
\ No newline at end of file
diff --git a/megaradrp/recipes/scientific/lcb.py b/megaradrp/recipes/scientific/lcb.py
index 19ed1109..fbe3f680 100644
--- a/megaradrp/recipes/scientific/lcb.py
+++ b/megaradrp/recipes/scientific/lcb.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -17,7 +17,7 @@
from numina.core import Result
from megaradrp.recipes.scientific.base import ImageRecipe
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame
from megaradrp.processing.fluxcalib import FluxCalibration
@@ -45,7 +45,7 @@ class LCBImageRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -77,7 +77,11 @@ def run(self, rinput):
isb = rinput.ignored_sky_bundles
if isb:
self.logger.info('sky bundles ignored: %s', isb)
- final, origin, sky = self.run_sky_subtraction(rss_data, ignored_sky_bundles=isb)
+ final, origin, sky = self.run_sky_subtraction(
+ rss_data,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=isb
+ )
self.logger.info('end sky subtraction')
# Flux calibration
if rinput.master_sensitivity is not None:
diff --git a/megaradrp/recipes/scientific/lcbfastmapping.py b/megaradrp/recipes/scientific/lcbfastmapping.py
index b8035256..a5abbebd 100644
--- a/megaradrp/recipes/scientific/lcbfastmapping.py
+++ b/megaradrp/recipes/scientific/lcbfastmapping.py
@@ -12,7 +12,7 @@
from numina.core import Product, ObservationResult
-from megaradrp.types import ProcessedMultiRSS
+from megaradrp.ntypes import ProcessedMultiRSS
from megaradrp.core.recipe import MegaraBaseRecipe
from megaradrp.processing.multirss import generate_multi_rss
diff --git a/megaradrp/recipes/scientific/mos.py b/megaradrp/recipes/scientific/mos.py
index 1523e8a3..68b0601a 100644
--- a/megaradrp/recipes/scientific/mos.py
+++ b/megaradrp/recipes/scientific/mos.py
@@ -1,5 +1,5 @@
#
-# Copyright 2011-2019 Universidad Complutense de Madrid
+# Copyright 2011-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -18,7 +18,7 @@
from megaradrp.processing.fluxcalib import FluxCalibration
from megaradrp.utils import add_collapsed_mos_extension
-from megaradrp.types import ProcessedRSS, ProcessedFrame
+from megaradrp.ntypes import ProcessedRSS, ProcessedFrame
from .base import ImageRecipe
@@ -46,7 +46,7 @@ class MOSImageRecipe(ImageRecipe):
`reduced_image` of the recipe result.
The apertures in the 2D image are extracted, using the information in
- `master_traces` and resampled according to the wavelength calibration in
+ `master_apertures` and resampled according to the wavelength calibration in
`master_wlcalib`. Then is divided by the `master_fiberflat`.
The resulting RSS is saved as an intermediate
result named 'reduced_rss.fits'. This RSS is also returned in the field
@@ -77,7 +77,11 @@ def run(self, rinput):
isb = rinput.ignored_sky_bundles
if isb:
self.logger.info('sky bundles ignored: %s', isb)
- final, origin, sky = self.run_sky_subtraction(rss_data, ignored_sky_bundles=isb)
+ final, origin, sky = self.run_sky_subtraction(
+ rss_data,
+ sky_rss=rinput.sky_rss,
+ ignored_sky_bundles=isb
+ )
self.logger.info('end sky subtraction')
# Flux calibration
if rinput.master_sensitivity is not None:
diff --git a/megaradrp/requirements.py b/megaradrp/requirements.py
index 6d61787b..cc2afb23 100644
--- a/megaradrp/requirements.py
+++ b/megaradrp/requirements.py
@@ -1,5 +1,5 @@
#
-# Copyright 2015-2017 Universidad Complutense de Madrid
+# Copyright 2015-2019 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -12,48 +12,55 @@
from numina.core import Requirement
from numina.types.multitype import MultiType
-import megaradrp.types
+import megaradrp.ntypes
import megaradrp.products
import megaradrp.products.modelmap
class MasterBiasRequirement(Requirement):
+ """Requirement for Master Bias images"""
def __init__(self, optional=False):
- super(MasterBiasRequirement,
- self).__init__(megaradrp.types.MasterBias,
- 'Master BIAS image',
- optional=optional
- )
+ super(MasterBiasRequirement, self).__init__(
+ megaradrp.ntypes.MasterBias,
+ 'Master BIAS image',
+ optional=optional,
+ validation=True
+ )
class MasterBPMRequirement(Requirement):
def __init__(self, optional=True):
- super(MasterBPMRequirement,
- self).__init__(megaradrp.types.MasterBPM,
- 'Master Bad Pixel Mask',
- optional=optional
- )
+ super(MasterBPMRequirement, self).__init__(
+ megaradrp.ntypes.MasterBPM,
+ 'Master Bad Pixel Mask',
+ optional=optional,
+ validation=False
+ )
class MasterDarkRequirement(Requirement):
def __init__(self, optional=True):
- super(MasterDarkRequirement,
- self).__init__(megaradrp.types.MasterDark, 'Master DARK image',
- optional=optional)
+ super(MasterDarkRequirement, self).__init__(
+ megaradrp.ntypes.MasterDark,
+ 'Master DARK image',
+ optional=optional,
+ validation=True
+ )
class MasterFiberFlatRequirement(Requirement):
def __init__(self):
- super(MasterFiberFlatRequirement,
- self).__init__(megaradrp.types.MasterFiberFlat,
- 'Master fiber flat calibration'
- )
+ super(MasterFiberFlatRequirement, self).__init__(
+ megaradrp.ntypes.MasterFiberFlat,
+ 'Master fiber flat calibration',
+ validation=True
+ )
class MasterSlitFlatRequirement(Requirement):
def __init__(self, optional=True):
super(MasterSlitFlatRequirement,
- self).__init__(megaradrp.types.MasterSlitFlat,
+ self).__init__(megaradrp.ntypes.MasterSlitFlat,
'Master slit flat calibration',
optional=optional
)
@@ -62,7 +69,7 @@ def __init__(self, optional=True):
class MasterTwilightRequirement(Requirement):
def __init__(self, optional=True):
super(MasterTwilightRequirement,
- self).__init__(megaradrp.types.MasterTwilightFlat,
+ self).__init__(megaradrp.ntypes.MasterTwilightFlat,
'Master twlight flat calibration',
optional=optional
)
@@ -70,32 +77,51 @@ def __init__(self, optional=True):
class MasterTraceMapRequirement(Requirement):
def __init__(self):
- super(MasterTraceMapRequirement,
- self).__init__(megaradrp.products.TraceMap, 'Trace information of the Apertures')
+ super(MasterTraceMapRequirement, self).__init__(
+ megaradrp.products.TraceMap,
+ 'Trace information of the Apertures',
+ validation=True
+ )
class MasterAperturesRequirement(Requirement):
- def __init__(self):
+ def __init__(self, alias=None):
super(MasterAperturesRequirement, self).__init__(MultiType(
megaradrp.products.modelmap.ModelMap,
- megaradrp.products.TraceMap), 'Apertures information for extraction')
+ megaradrp.products.TraceMap),
+ 'Apertures information for extraction',
+ validation=True,
+ alias=alias
+ )
class WavelengthCalibrationRequirement(Requirement):
def __init__(self):
- super(WavelengthCalibrationRequirement,
- self).__init__(megaradrp.products.WavelengthCalibration, 'Wavelength calibration table')
+ super(WavelengthCalibrationRequirement, self).__init__(
+ megaradrp.products.WavelengthCalibration,
+ 'Wavelength calibration table',
+ validation=True
+ )
class LinesCatalogRequirement(Requirement):
def __init__(self):
- super(LinesCatalogRequirement, self).__init__(megaradrp.types.MegaraLinesCatalog, 'Catalog of lines')
+ super(LinesCatalogRequirement, self).__init__(megaradrp.ntypes.MegaraLinesCatalog, 'Catalog of lines')
+
+
+class SkyRSSRequirement(Requirement):
+ def __init__(self, optional=True):
+ super(SkyRSSRequirement, self).__init__(
+ megaradrp.ntypes.SkyRSS,
+ 'Row Stacked Spectra of the sky',
+ optional=optional
+ )
class SensitivityRequirement(Requirement):
def __init__(self, optional=True):
super(SensitivityRequirement,
- self).__init__(megaradrp.types.MasterSensitivity,
+ self).__init__(megaradrp.ntypes.MasterSensitivity,
'Master sensitivity for flux calibration',
optional=optional
)
@@ -104,7 +130,16 @@ def __init__(self, optional=True):
class ReferenceExtinction(Requirement):
def __init__(self, optional=True):
super(ReferenceExtinction,
- self).__init__(megaradrp.types.ReferenceExtinctionTable,
+ self).__init__(megaradrp.ntypes.ReferenceExtinctionTable,
"Reference extinction",
optional=optional
- )
\ No newline at end of file
+ )
+
+
+class DiffuseLightRequirement(Requirement):
+ def __init__(self, optional=True):
+ super(DiffuseLightRequirement,
+ self).__init__(megaradrp.ntypes.DiffuseLightCorrection,
+ 'Diffuse light correction image',
+ optional=optional
+ )
diff --git a/megaradrp/schemas/__init__.py b/megaradrp/schemas/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/megaradrp/schemas/baseimage.json b/megaradrp/schemas/baseimage.json
new file mode 100644
index 00000000..7ca38207
--- /dev/null
+++ b/megaradrp/schemas/baseimage.json
@@ -0,0 +1,520 @@
+{
+ "$id": "https://guaix.ucm.es/megara/schemas/baseimage.json",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Schema for MEGARA headers",
+ "description": "Describe MEGARA headers",
+ "anyOf": [
+ {"$ref": "#/definitions/one_ext_hdul"},
+ {"$ref": "#/definitions/two_ext_hdul"},
+ {"$ref": "#/definitions/four_ext_hdul"},
+ {"$ref": "#/definitions/proc_rss_wl_hdul"},
+ {"$ref": "#/definitions/proc_image_hdul"}
+ ],
+ "definitions": {
+ "structure_hdu": {
+ "type": "object",
+ "properties": {
+ "values": {
+ "type": "object"
+ },
+ "comments": {
+ "type": "object"
+ },
+ "ordering": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "required": [
+ "values"
+ ]
+ },
+ "primary_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/primary_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/raw_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/rss_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/proc_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/spec_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "primary_hdu_values": {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"type": "string"},
+ "DATE-OBS": {"$ref": "#/definitions/datetime"},
+ "INSTRUME": {"type": "string", "const": "MEGARA"},
+ "BUNIT": {"type": "string", "default": "ADU"},
+ "DETECTOR": {"type": "string", "default": "CCD2231-84-0-E74"},
+ "EXPTIME": {"type": "number", "minimum": 0},
+ "DARKTIME": {"type": "number", "minimum": 0},
+ "READMODE": {"type": "string", "enum": ["NORMAL", "MIRROR"]},
+ "RSPEED": {"type": "string", "enum": ["SLOW", "FAST", "ENG"]},
+ "GAIN1": {"type": "number", "minimum": 0, "default": 1.73},
+ "GAIN2": {"type": "number", "minimum": 0, "default": 1.6},
+ "RDNOISE1": {"type": "number", "minimum": 0, "default": 3.4},
+ "RDNOISE2": {"type": "number", "minimum": 0, "default": 3.4},
+ "IPA": {"type": "number"},
+ "ARIMASS": {"type": "number"},
+ "ARIMASS1": {"type": "number"},
+ "ARIMASS2": {"type": "number"},
+ "AMSTART": {"type": "number"},
+ "AMEND": {"type": "number"},
+ "RA": {"type": "string"},
+ "DEC": {"type": "string"},
+ "RADEG": {"type": "number"},
+ "DECDEG": {"type": "number"},
+ "AZIMUTH": {"type": "number"},
+ "ELEVAT": {"type": "number"},
+ "ROTANG": {"type": "number"},
+ "LST": {"type": "string"},
+ "MDJ-OBS": {"type": "number"},
+ "SETPNT": {"type": "number"},
+ "CCDTEMP0": {"type": "number", "minimum": 0},
+ "CCDTEMP1": {"type": "number", "minimum": 0},
+ "CCDTEMP2": {"type": "number", "minimum": 0},
+ "CCDTEMP3": {"type": "number", "minimum": 0},
+ "SENTEMP0": {"type": "number"},
+ "SENTEMP1": {"type": "number"},
+ "SENTEMP2": {"type": "number"},
+ "SENTEMP3": {"type": "number"},
+ "SENTEMP4": {"type": "number"},
+ "SENTEMP5": {"type": "number"},
+ "SENTEMP6": {"type": "number"},
+ "INSMODE": {"type": "string", "enum": ["LCB", "MOS"]},
+ "VPHWHPOS": {"type": "string", "enum": [
+ "VPH1", "VPH2", "VPH3", "VPH4", "VPH5","VPH6", "VPH7","VPH8","VPH9","VPH10",
+ "VPH11"
+ ]
+ },
+ "VPH": {"type": "string"},
+ "FOCUS": {"type": "number", "minimum": 0},
+ "SLITN": {"type": "number"},
+ "OSFILTER": {"type": "string",
+ "enum": ["BLUE", "RED", " RED"]
+ },
+ "COVER": {"type": "string",
+ "enum": ["STOPPED", "BOTH_OPEN"]
+ },
+ "LAMPI1S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPI1I": {"type": "number", "minimum": 0, "maximum": 100},
+ "LAMPI2S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPI2I": {"type": "number", "minimum": 0, "maximum": 100},
+ "LAMPS1S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS2S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS3S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS4S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS5S": {"type": "integer", "enum": [0,1]},
+ "LAMPMIR": {"type": "string", "enum": ["WORK", "PARK", "UNDEFINED"],
+ "description": "Status of the ICM mirror"
+ },
+ "SPECLAMP": {"type": "string", "enum": ["NONE", "ThNe", "ThAr", "ThArNe", "UNKNOWN"]},
+ "OBJECT": {"type": "string"},
+ "OBSTYPE": {"type": "string",
+ "enum": ["AUXILIARY", "CALIBRATION", "SCIENTIFIC", "ENGINEERING", "OBJECT"],
+ "description": "OBJECT is not in our document"
+ },
+ "ORIGIN": {"type": "string"},
+ "INSCONF": {"$ref": "#/definitions/uuid"},
+ "UUID": {"$ref": "#/definitions/uuid"},
+ "BLCKUUID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["OBSMODE", "DATE-OBS", "INSTRUME", "UUID", "INSCONF", "EXPTIME",
+ "VPH"]
+ },
+ "raw_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"const": 4196},
+ "NAXIS2": {"const": 4212}
+ }
+ },
+ "proc_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"const": 4096},
+ "NAXIS2": {"const": 4112}
+ }
+ },
+ "spec_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 1},
+ "NAXIS1": {"const": 4300}
+ }
+ },
+ "rss_naxis2": {
+ "type": "object",
+ "allOf": [
+ {
+ "if": {
+ "properties": {
+ "INSMODE": {
+ "const": "LCB"
+ }
+ }
+ },
+ "then": {"properties": {"NAXIS2": {"const": 623}}}
+ },
+ {
+ "if": {
+ "properties": {
+ "INSMODE": {
+ "const": "MOS"
+ }
+ }
+ },
+ "then": {"properties": {"NAXIS2": {"const": 644}}}
+ }
+ ]
+ },
+ "rss_hdu_values": {
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"enum": [4300, 4096]}
+ }
+ },
+ {
+ "$ref": "#/definitions/rss_naxis2"
+ }
+ ]
+ },
+ "one_ext_hdul": {
+ "description": "A MEGARA image, 1 HDU",
+ "type": "array",
+ "additionalItems": false,
+ "minItems": 1,
+ "maxItems": 1,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ }
+ ]
+ },
+ "two_ext_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 2 HDUs",
+ "additionalItems": false,
+ "minItems": 2,
+ "maxItems": 4,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ },
+ {
+ }
+ ]
+ },
+ "four_ext_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 4 HDUs",
+ "additionalItems": false,
+ "minItems": 4,
+ "maxItems": 4,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ "$ref": "#/definitions/variance_hdu"
+ },
+ {
+ "$ref": "#/definitions/map_hdu"
+ }
+ ]
+ },
+ "proc_rss_wl_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 3 HDUs",
+ "additionalItems": false,
+ "minItems": 3,
+ "maxItems": 3,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ "$ref": "#/definitions/wlmap_hdu"
+ }
+ ]
+ },
+ "proc_image_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 3 HDUs",
+ "additionalItems": false,
+ "minItems": 2,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ "$ref": "#/definitions/variance_hdu"
+ },
+ {
+ "$ref": "#/definitions/map_hdu"
+ }
+ ]
+ },
+ "spec1d_hdul": {
+ "type": "array",
+ "description": "A MEGARA spectrum, 1 HDUs",
+ "additionalItems": false,
+ "minItems": 1,
+ "items": [
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "fibers_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/fibers_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/fibers_lcb_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/fibers_mos_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "fibers_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "FIBERS"},
+ "NFIBERS": {"type": "integer", "enum": [623, 644]},
+ "NBUNDLES": {"type": "integer", "enum": [9, 92]},
+ "INSMODE": {"type": "string", "enum": ["LCB", "MOS"]},
+ "CONFID": {"$ref": "#/definitions/uuid"},
+ "CONFNAME": {"type": "string"}
+ },
+ "patternProperties": {
+ "^FIB[0123456][0-9][0-9]_N$": {"type": "string",
+ "description": "this is matching more keywords, like FIB699_N"
+ },
+ "^FIB[0123456][0-9][0-9]_X$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_Y$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_B$": {"type": "integer"},
+ "^FIB[0123456][0-9][0-9]_D$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_R$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_A$": {"type": "boolean"},
+ "^BUN0[0-9][0-9]_E$": {"type": "boolean"},
+ "^BUN0[0-9][0-9]_X$": {"type": "number"},
+ "^BUN0[0-9][0-9]_Y$": {"type": "number"},
+ "^BUN0[0-9][0-9]_O$": {"type": "number"},
+ "^BUN0[0-9][0-9]_P$": {"type": "integer"},
+ "^BUN0[0-9][0-9]_I$": {"type": "string"},
+ "^BUN0[0-9][0-9]_T$": {"type": "string"}
+ },
+ "required": ["EXTNAME","NFIBERS", "NBUNDLES", "INSMODE", "CONFID",
+ "FIB001_X", "FIB002_Y", "FIB623_X", "FIB623_Y"]
+ },
+ "fibers_lcb_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NFIBERS": {"type": "integer", "const": 623},
+ "NBUNDLES": {"type": "integer", "const": 9},
+ "INSMODE": {"type": "string", "const": "LCB"},
+ "CONFID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["FIB623_X", "FIB623_Y", "BUN000_X", "BUN093_X"]
+ },
+ "fibers_mos_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NFIBERS": {"type": "integer", "const": 644},
+ "NBUNDLES": {"type": "integer"},
+ "INSMODE": {"type": "string", "const": "MOS"},
+ "CONFID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["FIB644_X", "FIB644_Y", "BUN001_X", "BUN092_X"]
+ },
+ "wlmap_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/wlmap_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/wlmap_lcb_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/wlmap_mos_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "wlmap_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "WLMAP"},
+ "NAXIS1": {"type": "integer", "enum": [4300]},
+ "NAXIS2": {"type": "integer", "enum": [623, 644]}
+ },
+ "required": ["EXTNAME"]
+ },
+ "wlmap_lcb_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS2": {"type": "integer", "const": 623}
+ }
+ },
+ "wlmap_mos_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS2": {"type": "integer", "const": 644}
+ }
+ },
+ "variance_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/variance_hdu_values"}
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "variance_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "VARIANCE"}
+ },
+ "required": ["EXTNAME"]
+ },
+ "map_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/map_hdu_values"}
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "map_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "MAP"}
+ },
+ "required": ["EXTNAME"]
+ },
+ "sensitivity_values": {
+ "description": "Headers of master sensitivity image",
+ "type": "object",
+ "properties": {
+ "BUNIT": {"const": "ELECTRON"},
+ "TUNIT": {"const": "Jy"},
+ "PIXLIMF1": {"type": "integer", "minimum":1, "maximum": 4300},
+ "PIXLIMF2": {"type": "integer", "minimum":1, "maximum": 4300},
+ "PIXLIMR1": {"type": "integer", "minimum":1, "maximum": 4300},
+ "PIXLIMR2": {"type": "integer", "minimum":1, "maximum": 4300},
+ "PIXLIMM1": {"type": "integer", "minimum":1, "maximum": 4300},
+ "PIXLIMM2": {"type": "integer", "minimum":1, "maximum": 4300},
+ "WAVLIMF1": {"type": "number", "minimum":0},
+ "WAVLIMF2": {"type": "number", "minimum":0},
+ "WAVLIMR1": {"type": "number", "minimum":0},
+ "WAVLIMR2": {"type": "number", "minimum":0},
+ "WAVLIMM1": {"type": "number", "minimum":0},
+ "WAVLIMM2": {"type": "number", "minimum":0}
+ },
+ "required": ["BUNIT", "TUNIT", "PIXLIMF1", "PIXLIMF2", "PIXLIMR1", "PIXLIMR2", "PIXLIMM1", "PIXLIMM2"]
+ },
+ "uuid": {
+ "type": "string",
+ "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
+ },
+ "datetime": {
+ "type": "string",
+ "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\\.[0-9]{1,4})?$"
+ }
+ }
+}
\ No newline at end of file
diff --git a/megaradrp/schemas/basestruct.json b/megaradrp/schemas/basestruct.json
new file mode 100644
index 00000000..6e3e09ec
--- /dev/null
+++ b/megaradrp/schemas/basestruct.json
@@ -0,0 +1,108 @@
+{
+ "$id": "https://guaix.ucm.es/megara/schemas/basestruct.json",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Schema for MEGARA structured results",
+ "description": "Describe MEGARA structured results",
+ "oneOf": [
+ {
+ "$ref": "#/definitions/trace_map"
+ },
+ {
+ "$ref": "#/definitions/model_map"
+ },
+ {
+ "$ref": "#/definitions/wave_calib"
+ }
+ ],
+ "definitions": {
+ "trace_map": {
+ "allOf": [
+ {
+ "$ref": "#/definitions/base_struct"
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type_fqn": {
+ "type": "string",
+ "const": "megaradrp.products.tracemap.TraceMap"
+ }
+ }
+ }
+ ]
+ },
+ "model_map": {
+ "allOf": [
+ {
+ "$ref": "#/definitions/base_struct"
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type_fqn": {
+ "type": "string",
+ "const": "megaradrp.products.modelmap.ModelMap"
+ }
+ }
+ }
+ ]
+ },
+ "wave_calib": {
+ "allOf": [
+ {
+ "$ref": "#/definitions/base_struct"
+ },
+ {
+ "type": "object",
+ "properties": {
+ "type_fqn": {
+ "type": "string",
+ "const": "megaradrp.products.wavecalibration.WavelengthCalibration"
+ }
+ }
+ }
+ ]
+ },
+ "base_struct": {
+ "type": "object",
+ "properties": {
+ "instrument": {
+ "type": "string"
+ },
+ "uuid": {
+ "$ref": "#/definitions/uuid"
+ },
+ "type": {
+ "type": "string"
+ },
+ "type_fqn": {
+ "type": "string"
+ },
+ "contents": {
+ "type": "array"
+ },
+ "quality_control": {
+ "type": "string"
+ },
+ "tags": {
+ "type": "object"
+ },
+ "meta_info": {
+ "$ref": "#/definitions/meta_info"
+ }
+ },
+ "required": ["instrument", "type_fqn"]
+ },
+ "meta_info": {
+ "type": "object"
+ },
+ "uuid": {
+ "type": "string",
+ "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
+ },
+ "datetime": {
+ "type": "string",
+ "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\\.[0-9]{1,4})?$"
+ }
+ }
+}
\ No newline at end of file
diff --git a/megaradrp/tests/simpleobj.py b/megaradrp/tests/simpleobj.py
index 7f504643..9f486df7 100644
--- a/megaradrp/tests/simpleobj.py
+++ b/megaradrp/tests/simpleobj.py
@@ -1,9 +1,12 @@
+import math
+
+import astropy.wcs
+import astropy.io.fits as fits
+
def create_simple_hdul():
"""Create a simple image for testing"""
- import astropy.io.fits as fits
-
prim = fits.PrimaryHDU()
prim.header['instrume'] = 'MEGARA'
prim.header['VPH'] = 'LR-B'
@@ -13,4 +16,45 @@ def create_simple_hdul():
fibers = fits.ImageHDU(name='FIBERS')
fibers.header['CONFID'] = 'a908e9d2-7599-41f3-9e9d-91042df01da5'
simple_img = fits.HDUList([prim, fibers])
- return simple_img
\ No newline at end of file
+ return simple_img
+
+
+def generate_sky_wcs():
+ wcsl = astropy.wcs.WCS(naxis=2)
+
+ wcsl.wcs.crpix = [512, 512]
+ wcsl.wcs.crval = [9.0000, 32.0000]
+ wcsl.wcs.cdelt = [0.01, 0.01]
+ wcsl.wcs.ctype = ['RA---TAN', 'DEC--TAN']
+ ang = math.pi / 3.0
+ wcsl.wcs.pc = [[math.cos(ang), -math.sin(ang)], [math.sin(ang), math.cos(ang)]]
+ return wcsl
+
+
+def create_sky_header():
+ wcsl = generate_sky_wcs()
+ return wcsl.to_header()
+
+
+def create_spec_header():
+ hdr = fits.Header()
+ hdr['DATE-OBS'] = '2017-08-23T21:38:30.55'
+ # GTC
+ hdr['OBSGEO-X'] = 5327285.0921
+ hdr['OBSGEO-Y'] = -1718777.1125
+ hdr['OBSGEO-Z'] = 3051786.7327
+
+ hdr['RADEG'] = 285.481037748898
+ hdr['DECDEG'] = 42.4882140636786
+
+ hdr['CTYPE1'] = 'AWAV'
+ hdr['CRPIX1'] = 1
+ hdr['CRVAL1'] = 362.0
+ hdr['CDELT1'] = 1.86
+ hdr['CUNIT1'] = 'nm'
+
+ hdr['CRPIX2'] = 0
+ hdr['CRVAL2'] = 0
+ hdr['CDELT2'] = 1
+ hdr['CTYPE2'] = ''
+ return hdr
\ No newline at end of file
diff --git a/megaradrp/tests/test_datamodel.py b/megaradrp/tests/test_datamodel.py
index cf49596c..ffd2280f 100644
--- a/megaradrp/tests/test_datamodel.py
+++ b/megaradrp/tests/test_datamodel.py
@@ -12,7 +12,8 @@
import astropy.table
import pytest
-from ..datamodel import MegaraDataModel, FibersConf
+from ..datamodel import MegaraDataModel
+from megaradrp.instrument.focalplane import FocalPlaneConf
def create_empty_img(insmode):
@@ -46,7 +47,7 @@ def test_fiberconf_1(name, confid, nbundles, nfibers):
conf = datamodel.get_fiberconf(img)
- assert isinstance(conf, FibersConf)
+ assert isinstance(conf, FocalPlaneConf)
# Default values from file
assert conf.name == name
assert conf.conf_id == confid
diff --git a/megaradrp/tests/test_drp.py b/megaradrp/tests/test_drp.py
index 488c772e..9ab9e4aa 100644
--- a/megaradrp/tests/test_drp.py
+++ b/megaradrp/tests/test_drp.py
@@ -68,16 +68,16 @@ def test_recipes_have_tags(current_drp):
'MegaraSlitFlat': {'master_bpm': 205, 'master_bias': 105},
'MegaraTraceMap': {'master_bias': 105, 'master_bpm': 205},
'MegaraModelMap': {'master_bpm': 205, 'master_bias': 105, 'master_slitflat': 1, 'master_traces': 11},
- 'MegaraFiberFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_traces': 11, 'master_wlcalib': 21},
- 'MegaraTwilightFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_traces': 11, 'master_wlcalib': 21, 'master_fiberflat': 49},
- 'MegaraFocusSpectrograph': {'master_bias': 105, 'master_bpm': 205, 'master_traces': 11, 'master_wlcalib': 21},
- 'MegaraFocusTelescope': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraLcbAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraLcbImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraLcbStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraMosAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraMosImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
- 'MegaraMosStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_traces': 11},
+ 'MegaraFiberFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_apertures': 11, 'master_wlcalib': 21},
+ 'MegaraTwilightFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_apertures': 11, 'master_wlcalib': 21, 'master_fiberflat': 49},
+ 'MegaraFocusSpectrograph': {'master_bias': 105, 'master_bpm': 205, 'master_apertures': 11, 'master_wlcalib': 21},
+ 'MegaraFocusTelescope': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraLcbAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraLcbImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraLcbStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraMosAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraMosImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
+ 'MegaraMosStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 1, 'master_wlcalib': 21, 'master_fiberflat': 49, 'master_twilight': 59, 'master_apertures': 11},
'MegaraExtinctionStar': {},
'MegaraSensitivityStar': {},
}
@@ -93,16 +93,16 @@ def test_recipes_have_tags(current_drp):
'MegaraSlitFlat': {'master_bpm': 205, 'master_bias': 105},
'MegaraTraceMap': {'master_bias': 105, 'master_bpm': 205},
'MegaraModelMap': {'master_bpm': 205, 'master_bias': 105, 'master_slitflat': 2, 'master_traces': 12},
- 'MegaraFiberFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_traces': 12, 'master_wlcalib': 22},
- 'MegaraTwilightFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_traces': 12, 'master_wlcalib': 22, 'master_fiberflat': 42},
- 'MegaraFocusSpectrograph': {'master_bias': 105, 'master_bpm': 205, 'master_traces': 12, 'master_wlcalib': 22},
- 'MegaraFocusTelescope': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraLcbAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraLcbImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraLcbStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraMosAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraMosImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
- 'MegaraMosStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_traces': 12},
+ 'MegaraFiberFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_apertures': 12, 'master_wlcalib': 22},
+ 'MegaraTwilightFlatImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_apertures': 12, 'master_wlcalib': 22, 'master_fiberflat': 42},
+ 'MegaraFocusSpectrograph': {'master_bias': 105, 'master_bpm': 205, 'master_apertures': 12, 'master_wlcalib': 22},
+ 'MegaraFocusTelescope': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraLcbAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraLcbImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraLcbStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraMosAcquisition': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraMosImage': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
+ 'MegaraMosStdStar': {'master_bias': 105, 'master_bpm': 205, 'master_slitflat': 2, 'master_wlcalib': 22, 'master_fiberflat': 42, 'master_twilight': 52, 'master_apertures': 12},
'MegaraExtinctionStar': {},
'MegaraSensitivityStar': {},
}
@@ -201,5 +201,4 @@ def test_recipes_extract_tags(current_drp, ob_repo, results):
pass
except KeyError:
pass
-
assert results[key] == match_per_recipe
diff --git a/megaradrp/tests/test_types.py b/megaradrp/tests/test_ntypes.py
similarity index 98%
rename from megaradrp/tests/test_types.py
rename to megaradrp/tests/test_ntypes.py
index 5d74f70d..78fc9947 100644
--- a/megaradrp/tests/test_types.py
+++ b/megaradrp/tests/test_ntypes.py
@@ -5,7 +5,7 @@
import numina.core
import numina.dal
from numina.exceptions import NoResultFound
-from ..types import MasterFiberFlat
+from ..ntypes import MasterFiberFlat
from ..products.tracemap import TraceMap
diff --git a/megaradrp/tests/test_requirements.py b/megaradrp/tests/test_requirements.py
new file mode 100644
index 00000000..9b16f74e
--- /dev/null
+++ b/megaradrp/tests/test_requirements.py
@@ -0,0 +1,8 @@
+
+from numina.core import Requirement
+from ..requirements import DiffuseLightRequirement
+
+
+def test_requires_df_l():
+ req = DiffuseLightRequirement()
+ assert isinstance(req, Requirement)
\ No newline at end of file
diff --git a/megaradrp/tools/heal_traces.py b/megaradrp/tools/heal_traces.py
new file mode 100644
index 00000000..be7c5cb8
--- /dev/null
+++ b/megaradrp/tools/heal_traces.py
@@ -0,0 +1,507 @@
+from __future__ import division
+from __future__ import print_function
+
+import argparse
+from copy import deepcopy
+import json
+import numpy as np
+from numpy.polynomial import Polynomial
+import sys
+from uuid import uuid4
+
+import numina.instrument.assembly as asb
+from numina.array.display.polfit_residuals import polfit_residuals
+from numina.array.display.ximshow import ximshow_file
+from numina.array.display.pause_debugplot import pause_debugplot
+
+
+def assign_boxes_to_fibers(pseudo_slit_config, insmode):
+ """Read boxes in configuration file and assign values to fibid
+
+ Parameters
+ ----------
+ pseudo_slit_config : dict
+ Contains the association of fibers and boxes
+ insmode : string
+ Value of the INSMODE keyword: 'LCB' or 'MOS'.
+
+ Returns
+ -------
+ fibid_with_box : list of strings
+ List with string label that contains both the fibid and the
+ box name.
+
+ """
+ fibid_with_box = []
+ n1 = 1
+ list_to_print = []
+ for dumbox in pseudo_slit_config:
+ nfibers = dumbox['nfibers']
+ name = dumbox['name']
+ n2 = n1 + nfibers
+ fibid_with_box += \
+ ["{} [{}]".format(val1, val2)
+ for val1, val2 in zip(range(n1, n2), [name] * nfibers)]
+ dumstr ='Box {:>2}, fibers {:3d} - {:3d}'.format(name, n1, n2 - 1)
+ list_to_print.append(dumstr)
+ n1 = n2
+ print('\n* Fiber description for INSMODE={}'.format(insmode))
+ for dumstr in reversed(list_to_print):
+ print(dumstr)
+ print('---------------------------------')
+
+ return fibid_with_box
+
+
+def plot_trace(ax, coeff, xmin, xmax, ix_offset,
+ rawimage, fibids, fiblabel, colour):
+ if xmin == xmax == 0:
+ num = 4096
+ xp = np.linspace(start=1, stop=4096, num=num)
+ else:
+ num = int(float(xmax - xmin + 1) + 0.5)
+ xp = np.linspace(start=xmin, stop=xmax, num=num)
+ ypol = Polynomial(coeff)
+ yp = ypol(xp)
+ if rawimage:
+ lcut = (yp > 2056.5)
+ yp[lcut] += 100
+ ax.plot(xp + ix_offset, yp + 1, color=colour, linestyle='dotted')
+ if fibids:
+ if xmin == xmax == 0:
+ xmidpoint = 2048
+ else:
+ xmidpoint = (xmin+xmax)/2
+ ax.text(xmidpoint, yp[int(num / 2)], fiblabel, fontsize=6,
+ bbox=dict(boxstyle="round,pad=0.1", fc="white", ec="grey", ),
+ color=colour, fontweight='bold', backgroundcolor='white',
+ ha='center')
+
+
+def main(args=None):
+ # parse command-line options
+ parser = argparse.ArgumentParser(
+ description="description: heal traces"
+ )
+ # positional parameters
+ parser.add_argument("fits_file",
+ help="FITS image containing the spectra",
+ type=argparse.FileType('r'))
+ parser.add_argument("traces_file",
+ help="JSON file with fiber traces",
+ type=argparse.FileType('r'))
+ # optional parameters
+ parser.add_argument("--rawimage",
+ help="FITS file is a RAW image (otherwise trimmed "
+ "image is assumed)",
+ action="store_true")
+ parser.add_argument("--global_offset",
+ help="Global offset polynomial coefficients "
+ "(+upwards, -downwards)")
+ parser.add_argument("--fibids",
+ help="Display fiber identification number",
+ action="store_true")
+ parser.add_argument("--verbose",
+ help="Enhance verbosity",
+ action="store_true")
+ parser.add_argument("--healing",
+ help="JSON healing file to improve traces",
+ type=argparse.FileType('r'))
+ parser.add_argument("--updated_traces",
+ help="JSON file with modified fiber traces",
+ type=argparse.FileType('w'))
+ parser.add_argument("--z1z2",
+ help="tuple z1,z2, minmax or None (use zscale)")
+ parser.add_argument("--bbox",
+ help="bounding box tuple: nc1,nc2,ns1,ns2")
+ parser.add_argument("--keystitle",
+ help="tuple of FITS keywords.format: " +
+ "key1,key2,...keyn.'format'")
+ parser.add_argument("--geometry",
+ help="tuple x,y,dx,dy",
+ default="0,0,640,480")
+ parser.add_argument("--pdffile",
+ help="ouput PDF file name",
+ type=argparse.FileType('w'))
+ parser.add_argument("--echo",
+ help="Display full command line",
+ action="store_true")
+
+ args = parser.parse_args(args=args)
+
+ if args.echo:
+ print('\033[1m\033[31m% ' + ' '.join(sys.argv) + '\033[0m\n')
+
+ # global_offset in command line
+ if args.global_offset is None:
+ args_global_offset = [0.0]
+ else:
+ args_global_offset = [float(dum) for dum in
+ str(args.global_offset).split(",")]
+
+ # read pdffile
+ if args.pdffile is not None:
+ from matplotlib.backends.backend_pdf import PdfPages
+ pdf = PdfPages(args.pdffile.name)
+ else:
+ pdf = None
+
+ ax = ximshow_file(args.fits_file.name,
+ args_cbar_orientation='vertical',
+ args_z1z2=args.z1z2,
+ args_bbox=args.bbox,
+ args_keystitle=args.keystitle,
+ args_geometry=args.geometry,
+ pdf=pdf,
+ show=False)
+
+ # trace offsets for RAW images
+ if args.rawimage:
+ ix_offset = 51
+ else:
+ ix_offset = 1
+
+ # read and display traces from JSON file
+ bigdict = json.loads(open(args.traces_file.name).read())
+
+ # Load metadata from the traces
+ meta_info = bigdict['meta_info']
+
+ origin = meta_info['origin']
+ insconf_uuid = origin['insconf_uuid']
+ date_obs = origin.get('date_obs')
+
+ tags = bigdict['tags']
+ insmode = tags['insmode']
+
+ # create instrument model
+ pkg_paths = ['megaradrp.instrument.configs']
+ store = asb.load_paths_store(pkg_paths)
+
+ insmodel = asb.assembly_instrument(store, insconf_uuid, date_obs, by_key='uuid')
+
+ pseudo_slit_config = insmodel.get_value('pseudoslit.boxes', **tags)
+
+ fibid_with_box = assign_boxes_to_fibers(pseudo_slit_config, insmode)
+ total_fibers = bigdict['total_fibers']
+ if total_fibers != len(fibid_with_box):
+ raise ValueError('Mismatch between number of fibers and '
+ 'expected number from account from boxes')
+ if 'global_offset' in bigdict.keys():
+ global_offset = bigdict['global_offset']
+ if args_global_offset != [0.0] and global_offset != [0.0]:
+ raise ValueError('global_offset != 0 argument cannot be employed '
+ 'when global_offset != 0 in JSON file')
+ elif args_global_offset != [0.0]:
+ global_offset = args_global_offset
+ else:
+ global_offset = args_global_offset
+ print('>>> Using global_offset:', global_offset)
+ pol_global_offset = np.polynomial.Polynomial(global_offset)
+ if 'ref_column' in bigdict.keys():
+ ref_column = bigdict['ref_column']
+ else:
+ ref_column = 2000
+ for fiberdict in bigdict['contents']:
+ fibid = fiberdict['fibid']
+ fiblabel = fibid_with_box[fibid - 1]
+ start = fiberdict['start']
+ stop = fiberdict['stop']
+ coeff = np.array(fiberdict['fitparms'])
+ # skip fibers without trace
+ if len(coeff) > 0:
+ pol_trace = np.polynomial.Polynomial(coeff)
+ y_at_ref_column = pol_trace(ref_column)
+ correction = pol_global_offset(y_at_ref_column)
+ coeff[0] += correction
+ # update values in bigdict (JSON structure)
+ bigdict['contents'][fibid-1]['fitparms'] = coeff.tolist()
+ plot_trace(ax, coeff, start, stop, ix_offset, args.rawimage,
+ args.fibids, fiblabel, colour='blue')
+ else:
+ print('Warning ---> Missing fiber:', fibid_with_box[fibid - 1])
+
+ # if present, read healing JSON file
+ if args.healing is not None:
+ healdict = json.loads(open(args.healing.name).read())
+ list_operations = healdict['operations']
+ for operation in list_operations:
+
+ if operation['description'] == 'vertical_shift_in_pixels':
+ if 'fibid_list' in operation.keys():
+ fibid_list = operation['fibid_list']
+ else:
+ fibid_ini = operation['fibid_ini']
+ fibid_end = operation['fibid_end']
+ fibid_list = range(fibid_ini, fibid_end + 1)
+ for fibid in fibid_list:
+ if fibid < 1 or fibid > total_fibers:
+ raise ValueError('fibid number outside valid range')
+ fiblabel = fibid_with_box[fibid - 1]
+ coeff = np.array(
+ bigdict['contents'][fibid - 1]['fitparms']
+ )
+ if len(coeff) > 0:
+ if args.verbose:
+ print('(vertical_shift_in_pixels) fibid:',
+ fiblabel)
+ vshift = operation['vshift']
+ coeff[0] += vshift
+ bigdict['contents'][fibid - 1]['fitparms'] = \
+ coeff.tolist()
+ start = bigdict['contents'][fibid - 1]['start']
+ stop = bigdict['contents'][fibid - 1]['stop']
+ plot_trace(ax, coeff, start, stop, ix_offset,
+ args.rawimage, True, fiblabel,
+ colour='green')
+ else:
+ print('(vertical_shift_in_pixels SKIPPED) fibid:',
+ fiblabel)
+
+ elif operation['description'] == 'duplicate_trace':
+ fibid_original = operation['fibid_original']
+ if fibid_original < 1 or fibid_original > total_fibers:
+ raise ValueError(
+ 'fibid_original number outside valid range'
+ )
+ fibid_duplicated = operation['fibid_duplicated']
+ if fibid_duplicated < 1 or fibid_duplicated > total_fibers:
+ raise ValueError(
+ 'fibid_duplicated number outside valid range'
+ )
+ fiblabel_original = fibid_with_box[fibid_original - 1]
+ fiblabel_duplicated = fibid_with_box[fibid_duplicated - 1]
+ coeff = np.array(
+ bigdict['contents'][fibid_original - 1]['fitparms']
+ )
+ if len(coeff) > 0:
+ if args.verbose:
+ print('(duplicated_trace) fibids:',
+ fiblabel_original, '-->', fiblabel_duplicated)
+ vshift = operation['vshift']
+ coeff[0] += vshift
+ bigdict['contents'][fibid_duplicated - 1]['fitparms'] = \
+ coeff.tolist()
+ start = bigdict['contents'][fibid_original - 1]['start']
+ stop = bigdict['contents'][fibid_original - 1]['stop']
+ bigdict['contents'][fibid_duplicated - 1]['start'] = start
+ bigdict['contents'][fibid_duplicated - 1]['stop'] = stop
+ plot_trace(ax, coeff, start, stop, ix_offset,
+ args.rawimage, True, fiblabel_duplicated,
+ colour='green')
+ else:
+ print('(duplicated_trace SKIPPED) fibids:',
+ fiblabel_original, '-->', fiblabel_duplicated)
+
+ elif operation['description'] == 'extrapolation':
+ if 'fibid_list' in operation.keys():
+ fibid_list = operation['fibid_list']
+ else:
+ fibid_ini = operation['fibid_ini']
+ fibid_end = operation['fibid_end']
+ fibid_list = range(fibid_ini, fibid_end + 1)
+ for fibid in fibid_list:
+ if fibid < 1 or fibid > total_fibers:
+ raise ValueError('fibid number outside valid range')
+ fiblabel = fibid_with_box[fibid - 1]
+ coeff = np.array(
+ bigdict['contents'][fibid - 1]['fitparms']
+ )
+ if len(coeff) > 0:
+ if args.verbose:
+ print('(extrapolation) fibid:', fiblabel)
+ # update values in bigdict (JSON structure)
+ start = operation['start']
+ stop = operation['stop']
+ start_orig = bigdict['contents'][fibid - 1]['start']
+ stop_orig = bigdict['contents'][fibid - 1]['stop']
+ bigdict['contents'][fibid - 1]['start'] = start
+ bigdict['contents'][fibid - 1]['stop'] = stop
+ if start < start_orig:
+ plot_trace(ax, coeff, start, start_orig,
+ ix_offset,
+ args.rawimage, True, fiblabel,
+ colour='green')
+ if stop_orig < stop:
+ plot_trace(ax, coeff, stop_orig, stop,
+ ix_offset,
+ args.rawimage, True, fiblabel,
+ colour='green')
+ if start_orig <= start <= stop <= stop_orig:
+ plot_trace(ax, coeff, start, stop,
+ ix_offset,
+ args.rawimage, True, fiblabel,
+ colour='green')
+ else:
+ print('(extrapolation SKIPPED) fibid:', fiblabel)
+
+ elif operation['description'] == 'fit_through_user_points':
+ fibid = operation['fibid']
+ fiblabel = fibid_with_box[fibid - 1]
+ if args.verbose:
+ print('(fit through user points) fibid:', fiblabel)
+ poldeg = operation['poldeg']
+ start = operation['start']
+ stop = operation['stop']
+ xfit = []
+ yfit = []
+ for userpoint in operation['user_points']:
+ # assume x, y coordinates in JSON file are given in
+ # image coordinates, starting at (1,1) in the lower
+ # left corner
+ xdum = userpoint['x'] - 1 # use np.array coordinates
+ ydum = userpoint['y'] - 1 # use np.array coordinates
+ xfit.append(xdum)
+ yfit.append(ydum)
+ xfit = np.array(xfit)
+ yfit = np.array(yfit)
+ if len(xfit) <= poldeg:
+ raise ValueError('Insufficient number of points to fit'
+ ' polynomial')
+ poly, residum = polfit_residuals(xfit, yfit, poldeg)
+ coeff = poly.coef
+ plot_trace(ax, coeff, start, stop, ix_offset,
+ args.rawimage, args.fibids, fiblabel,
+ colour='green')
+ bigdict['contents'][fibid - 1]['start'] = start
+ bigdict['contents'][fibid - 1]['stop'] = stop
+ bigdict['contents'][fibid - 1]['fitparms'] = coeff.tolist()
+
+ elif operation['description'] == \
+ 'extrapolation_through_user_points':
+ fibid = operation['fibid']
+ fiblabel = fibid_with_box[fibid - 1]
+ if args.verbose:
+ print('(extrapolation_through_user_points):', fiblabel)
+ start_reuse = operation['start_reuse']
+ stop_reuse = operation['stop_reuse']
+ resampling = operation['resampling']
+ poldeg = operation['poldeg']
+ start = operation['start']
+ stop = operation['stop']
+ coeff = bigdict['contents'][fibid - 1]['fitparms']
+ xfit = np.linspace(start_reuse, stop_reuse, num=resampling)
+ poly = np.polynomial.Polynomial(coeff)
+ yfit = poly(xfit)
+ for userpoint in operation['user_points']:
+ # assume x, y coordinates in JSON file are given in
+ # image coordinates, starting at (1,1) in the lower
+ # left corner
+ xdum = userpoint['x'] - 1 # use np.array coordinates
+ ydum = userpoint['y'] - 1 # use np.array coordinates
+ xfit = np.concatenate((xfit, np.array([xdum])))
+ yfit = np.concatenate((yfit, np.array([ydum])))
+ poly, residum = polfit_residuals(xfit, yfit, poldeg)
+ coeff = poly.coef
+ if start < start_reuse:
+ plot_trace(ax, coeff, start, start_reuse, ix_offset,
+ args.rawimage, args.fibids, fiblabel,
+ colour='green')
+ if stop_reuse < stop:
+ plot_trace(ax, coeff, stop_reuse, stop, ix_offset,
+ args.rawimage, args.fibids, fiblabel,
+ colour='green')
+ bigdict['contents'][fibid - 1]['start'] = start
+ bigdict['contents'][fibid - 1]['stop'] = stop
+ bigdict['contents'][fibid - 1]['fitparms'] = coeff.tolist()
+
+ elif operation['description'] == 'sandwich':
+ fibid = operation['fibid']
+ fiblabel = fibid_with_box[fibid - 1]
+ if args.verbose:
+ print('(sandwich) fibid:', fiblabel)
+ fraction = operation['fraction']
+ nf1, nf2 = operation['neighbours']
+ start = operation['start']
+ stop = operation['stop']
+ tmpf1 = bigdict['contents'][nf1 - 1]
+ tmpf2 = bigdict['contents'][nf2 - 1]
+ if nf1 != tmpf1['fibid'] or nf2 != tmpf2['fibid']:
+ raise ValueError(
+ "Unexpected fiber numbers in neighbours"
+ )
+ coefff1 = np.array(tmpf1['fitparms'])
+ coefff2 = np.array(tmpf2['fitparms'])
+ coeff = coefff1 + fraction * (coefff2 - coefff1)
+ plot_trace(ax, coeff, start, stop, ix_offset,
+ args.rawimage, args.fibids,
+ fiblabel, colour='green')
+ # update values in bigdict (JSON structure)
+ bigdict['contents'][fibid - 1]['start'] = start
+ bigdict['contents'][fibid - 1]['stop'] = stop
+ bigdict['contents'][fibid - 1][
+ 'fitparms'] = coeff.tolist()
+ if fibid in bigdict['error_fitting']:
+ bigdict['error_fitting'].remove(fibid)
+
+ elif operation['description'] == 'renumber_fibids_within_box':
+ fibid_ini = operation['fibid_ini']
+ fibid_end = operation['fibid_end']
+ box_ini = fibid_with_box[fibid_ini - 1][4:]
+ box_end = fibid_with_box[fibid_end - 1][4:]
+ if box_ini != box_end:
+ print('ERROR: box_ini={}, box_end={}'.format(box_ini,
+ box_end))
+ raise ValueError('fibid_ini and fibid_end correspond to '
+ 'different fiber boxes')
+ fibid_shift = operation['fibid_shift']
+ if fibid_shift in [-1, 1]:
+ if fibid_shift == -1:
+ i_start = fibid_ini
+ i_stop = fibid_end + 1
+ i_step = 1
+ else:
+ i_start = fibid_end
+ i_stop = fibid_ini - 1
+ i_step = -1
+ for fibid in range(i_start, i_stop, i_step):
+ fiblabel_ori = fibid_with_box[fibid - 1]
+ fiblabel_new = fibid_with_box[fibid - 1 + fibid_shift]
+ if args.verbose:
+ print('(renumber_fibids) fibid:',
+ fiblabel_ori, '-->', fiblabel_new)
+ bigdict['contents'][fibid -1 + fibid_shift] = \
+ deepcopy(bigdict['contents'][fibid -1])
+ bigdict['contents'][fibid -1 + fibid_shift]['fibid'] += \
+ fibid_shift
+ # display updated trace
+ coeff = \
+ bigdict['contents'][fibid -1 + fibid_shift]['fitparms']
+ start = \
+ bigdict['contents'][fibid -1 + fibid_shift]['start']
+ stop = bigdict['contents'][fibid -1 + fibid_shift]['stop']
+ plot_trace(ax, coeff, start, stop, ix_offset,
+ args.rawimage, args.fibids,
+ fiblabel_ori + '-->' + fiblabel_new,
+ colour='green')
+ if fibid_shift == -1:
+ bigdict['contents'][fibid_end - 1]['fitparms'] = []
+ else:
+ bigdict['contents'][fibid_ini - 1]['fitparms'] = []
+ else:
+ raise ValueError('fibid_shift in operation '
+ 'renumber_fibids_within_box '
+ 'must be -1 or 1')
+ else:
+ raise ValueError('Unexpected healing method:',
+ operation['description'])
+
+# update trace map
+ if args.updated_traces is not None:
+ # avoid overwritting initial JSON file
+ if args.updated_traces.name != args.traces_file.name:
+ # new random uuid for the updated calibration
+ bigdict['uuid'] = str(uuid4())
+ with open(args.updated_traces.name, 'w') as outfile:
+ json.dump(bigdict, outfile, indent=2)
+
+ if pdf is not None:
+ pdf.savefig()
+ pdf.close()
+ else:
+ pause_debugplot(12, pltshow=True, tight_layout=True)
+
+
+if __name__ == "__main__":
+
+ main()
diff --git a/megaradrp/tools/overplot_traces.py b/megaradrp/tools/overplot_traces.py
index 5c9e8424..825a164c 100644
--- a/megaradrp/tools/overplot_traces.py
+++ b/megaradrp/tools/overplot_traces.py
@@ -2,15 +2,11 @@
from __future__ import print_function
import argparse
-from copy import deepcopy
-import json
import numpy as np
from numpy.polynomial import Polynomial
import sys
-from uuid import uuid4
-
import numina.instrument.assembly as asb
-from numina.array.display.polfit_residuals import polfit_residuals
+import numina.types.structured as structured
from numina.array.display.ximshow import ximshow_file
from numina.array.display.pause_debugplot import pause_debugplot
@@ -53,16 +49,15 @@ def assign_boxes_to_fibers(pseudo_slit_config, insmode):
return fibid_with_box
-def plot_trace(ax, coeff, xmin, xmax, ix_offset,
- rawimage, fibids, fiblabel, colour):
+def plot_aper(ax, center_model, xmin, xmax, ix_offset,
+ rawimage, fibids, fiblabel, colour, correction=0):
if xmin == xmax == 0:
num = 4096
xp = np.linspace(start=1, stop=4096, num=num)
else:
num = int(float(xmax - xmin + 1) + 0.5)
xp = np.linspace(start=xmin, stop=xmax, num=num)
- ypol = Polynomial(coeff)
- yp = ypol(xp)
+ yp = center_model(xp) + correction
if rawimage:
lcut = (yp > 2056.5)
yp[lcut] += 100
@@ -95,7 +90,8 @@ def main(args=None):
help="FITS file is a RAW image (otherwise trimmed "
"image is assumed)",
action="store_true")
- parser.add_argument("--global_offset",
+ parser.add_argument("--global_offset", "--global-offset",
+ nargs='+', type=float,
help="Global offset polynomial coefficients "
"(+upwards, -downwards)")
parser.add_argument("--fibids",
@@ -104,12 +100,6 @@ def main(args=None):
parser.add_argument("--verbose",
help="Enhance verbosity",
action="store_true")
- parser.add_argument("--healing",
- help="JSON healing file to improve traces",
- type=argparse.FileType('r'))
- parser.add_argument("--updated_traces",
- help="JSON file with modified fiber traces",
- type=argparse.FileType('w'))
parser.add_argument("--z1z2",
help="tuple z1,z2, minmax or None (use zscale)")
parser.add_argument("--bbox",
@@ -134,10 +124,12 @@ def main(args=None):
# global_offset in command line
if args.global_offset is None:
- args_global_offset = [0.0]
+ args_global_offset_set = False
+ args.global_offset = [0.0]
else:
- args_global_offset = [float(dum) for dum in
- str(args.global_offset).split(",")]
+ args_global_offset_set = True
+
+ args_global_offset = Polynomial(args.global_offset)
# read pdffile
if args.pdffile is not None:
@@ -162,16 +154,23 @@ def main(args=None):
ix_offset = 1
# read and display traces from JSON file
- bigdict = json.loads(open(args.traces_file.name).read())
-
+ # TODO: some checks, this should be done by validating the struct
+ with open(args.traces_file.name, mode='r') as fd:
+ import json
+ data = json.load(fd)
+ if 'type_fqn' not in data:
+ raise ValueError("malformed JSON file, 'type_fqn' missing")
+ #
+ apers = structured.open(args.traces_file.name)
# Load metadata from the traces
- meta_info = bigdict['meta_info']
+ meta_info = apers.meta_info
origin = meta_info['origin']
insconf_uuid = origin['insconf_uuid']
- date_obs = origin['date_obs']
+ # None is allowed
+ date_obs = origin.get('date_obs')
- tags = bigdict['tags']
+ tags = apers.tags
insmode = tags['insmode']
# create instrument model
@@ -182,319 +181,35 @@ def main(args=None):
pseudo_slit_config = insmodel.get_value('pseudoslit.boxes', **tags)
- fibid_with_box = assign_boxes_to_fibers(pseudo_slit_config, insmode)
- total_fibers = bigdict['total_fibers']
+ fibid_with_box = list(assign_boxes_to_fibers(pseudo_slit_config, insmode))
+ total_fibers = apers.total_fibers
if total_fibers != len(fibid_with_box):
raise ValueError('Mismatch between number of fibers and '
'expected number from account from boxes')
- if 'global_offset' in bigdict.keys():
- global_offset = bigdict['global_offset']
- if args_global_offset != [0.0] and global_offset != [0.0]:
- raise ValueError('global_offset != 0 argument cannot be employed '
- 'when global_offset != 0 in JSON file')
- elif args_global_offset != [0.0]:
- global_offset = args_global_offset
- else:
+
+ if args_global_offset_set:
global_offset = args_global_offset
- print('>>> Using global_offset:', global_offset)
- pol_global_offset = np.polynomial.Polynomial(global_offset)
- if 'ref_column' in bigdict.keys():
- ref_column = bigdict['ref_column']
else:
- ref_column = 2000
- for fiberdict in bigdict['contents']:
- fibid = fiberdict['fibid']
+ global_offset = apers.global_offset
+
+ print('>>> Using global_offset:', global_offset)
+ ref_column = apers.ref_column
+
+ for geot in apers.contents:
+ fibid = geot.fibid
fiblabel = fibid_with_box[fibid - 1]
- start = fiberdict['start']
- stop = fiberdict['stop']
- coeff = np.array(fiberdict['fitparms'])
+ start = geot.start
+ stop = geot.stop
# skip fibers without trace
- if len(coeff) > 0:
- pol_trace = np.polynomial.Polynomial(coeff)
- y_at_ref_column = pol_trace(ref_column)
- correction = pol_global_offset(y_at_ref_column)
- coeff[0] += correction
- # update values in bigdict (JSON structure)
- bigdict['contents'][fibid-1]['fitparms'] = coeff.tolist()
- plot_trace(ax, coeff, start, stop, ix_offset, args.rawimage,
- args.fibids, fiblabel, colour='blue')
+ if geot.valid:
+ center_model = geot.aper_center()
+ y_at_ref_column = center_model(ref_column)
+ correction = global_offset(y_at_ref_column)
+ plot_aper(ax, center_model, start, stop, ix_offset, args.rawimage,
+ args.fibids, fiblabel, colour='blue', correction=correction)
else:
print('Warning ---> Missing fiber:', fibid_with_box[fibid - 1])
- # if present, read healing JSON file
- if args.healing is not None:
- healdict = json.loads(open(args.healing.name).read())
- list_operations = healdict['operations']
- for operation in list_operations:
-
- if operation['description'] == 'vertical_shift_in_pixels':
- if 'fibid_list' in operation.keys():
- fibid_list = operation['fibid_list']
- else:
- fibid_ini = operation['fibid_ini']
- fibid_end = operation['fibid_end']
- fibid_list = range(fibid_ini, fibid_end + 1)
- for fibid in fibid_list:
- if fibid < 1 or fibid > total_fibers:
- raise ValueError('fibid number outside valid range')
- fiblabel = fibid_with_box[fibid - 1]
- coeff = np.array(
- bigdict['contents'][fibid - 1]['fitparms']
- )
- if len(coeff) > 0:
- if args.verbose:
- print('(vertical_shift_in_pixels) fibid:',
- fiblabel)
- vshift = operation['vshift']
- coeff[0] += vshift
- bigdict['contents'][fibid - 1]['fitparms'] = \
- coeff.tolist()
- start = bigdict['contents'][fibid - 1]['start']
- stop = bigdict['contents'][fibid - 1]['stop']
- plot_trace(ax, coeff, start, stop, ix_offset,
- args.rawimage, True, fiblabel,
- colour='green')
- else:
- print('(vertical_shift_in_pixels SKIPPED) fibid:',
- fiblabel)
-
- elif operation['description'] == 'duplicate_trace':
- fibid_original = operation['fibid_original']
- if fibid_original < 1 or fibid_original > total_fibers:
- raise ValueError(
- 'fibid_original number outside valid range'
- )
- fibid_duplicated = operation['fibid_duplicated']
- if fibid_duplicated < 1 or fibid_duplicated > total_fibers:
- raise ValueError(
- 'fibid_duplicated number outside valid range'
- )
- fiblabel_original = fibid_with_box[fibid_original - 1]
- fiblabel_duplicated = fibid_with_box[fibid_duplicated - 1]
- coeff = np.array(
- bigdict['contents'][fibid_original - 1]['fitparms']
- )
- if len(coeff) > 0:
- if args.verbose:
- print('(duplicated_trace) fibids:',
- fiblabel_original, '-->', fiblabel_duplicated)
- vshift = operation['vshift']
- coeff[0] += vshift
- bigdict['contents'][fibid_duplicated - 1]['fitparms'] = \
- coeff.tolist()
- start = bigdict['contents'][fibid_original - 1]['start']
- stop = bigdict['contents'][fibid_original - 1]['stop']
- bigdict['contents'][fibid_duplicated - 1]['start'] = start
- bigdict['contents'][fibid_duplicated - 1]['stop'] = stop
- plot_trace(ax, coeff, start, stop, ix_offset,
- args.rawimage, True, fiblabel_duplicated,
- colour='green')
- else:
- print('(duplicated_trace SKIPPED) fibids:',
- fiblabel_original, '-->', fiblabel_duplicated)
-
- elif operation['description'] == 'extrapolation':
- if 'fibid_list' in operation.keys():
- fibid_list = operation['fibid_list']
- else:
- fibid_ini = operation['fibid_ini']
- fibid_end = operation['fibid_end']
- fibid_list = range(fibid_ini, fibid_end + 1)
- for fibid in fibid_list:
- if fibid < 1 or fibid > total_fibers:
- raise ValueError('fibid number outside valid range')
- fiblabel = fibid_with_box[fibid - 1]
- coeff = np.array(
- bigdict['contents'][fibid - 1]['fitparms']
- )
- if len(coeff) > 0:
- if args.verbose:
- print('(extrapolation) fibid:', fiblabel)
- # update values in bigdict (JSON structure)
- start = operation['start']
- stop = operation['stop']
- start_orig = bigdict['contents'][fibid - 1]['start']
- stop_orig = bigdict['contents'][fibid - 1]['stop']
- bigdict['contents'][fibid - 1]['start'] = start
- bigdict['contents'][fibid - 1]['stop'] = stop
- if start < start_orig:
- plot_trace(ax, coeff, start, start_orig,
- ix_offset,
- args.rawimage, True, fiblabel,
- colour='green')
- if stop_orig < stop:
- plot_trace(ax, coeff, stop_orig, stop,
- ix_offset,
- args.rawimage, True, fiblabel,
- colour='green')
- if start_orig <= start <= stop <= stop_orig:
- plot_trace(ax, coeff, start, stop,
- ix_offset,
- args.rawimage, True, fiblabel,
- colour='green')
- else:
- print('(extrapolation SKIPPED) fibid:', fiblabel)
-
- elif operation['description'] == 'fit_through_user_points':
- fibid = operation['fibid']
- fiblabel = fibid_with_box[fibid - 1]
- if args.verbose:
- print('(fit through user points) fibid:', fiblabel)
- poldeg = operation['poldeg']
- start = operation['start']
- stop = operation['stop']
- xfit = []
- yfit = []
- for userpoint in operation['user_points']:
- # assume x, y coordinates in JSON file are given in
- # image coordinates, starting at (1,1) in the lower
- # left corner
- xdum = userpoint['x'] - 1 # use np.array coordinates
- ydum = userpoint['y'] - 1 # use np.array coordinates
- xfit.append(xdum)
- yfit.append(ydum)
- xfit = np.array(xfit)
- yfit = np.array(yfit)
- if len(xfit) <= poldeg:
- raise ValueError('Insufficient number of points to fit'
- ' polynomial')
- poly, residum = polfit_residuals(xfit, yfit, poldeg)
- coeff = poly.coef
- plot_trace(ax, coeff, start, stop, ix_offset,
- args.rawimage, args.fibids, fiblabel,
- colour='green')
- bigdict['contents'][fibid - 1]['start'] = start
- bigdict['contents'][fibid - 1]['stop'] = stop
- bigdict['contents'][fibid - 1]['fitparms'] = coeff.tolist()
-
- elif operation['description'] == \
- 'extrapolation_through_user_points':
- fibid = operation['fibid']
- fiblabel = fibid_with_box[fibid - 1]
- if args.verbose:
- print('(extrapolation_through_user_points):', fiblabel)
- start_reuse = operation['start_reuse']
- stop_reuse = operation['stop_reuse']
- resampling = operation['resampling']
- poldeg = operation['poldeg']
- start = operation['start']
- stop = operation['stop']
- coeff = bigdict['contents'][fibid - 1]['fitparms']
- xfit = np.linspace(start_reuse, stop_reuse, num=resampling)
- poly = np.polynomial.Polynomial(coeff)
- yfit = poly(xfit)
- for userpoint in operation['user_points']:
- # assume x, y coordinates in JSON file are given in
- # image coordinates, starting at (1,1) in the lower
- # left corner
- xdum = userpoint['x'] - 1 # use np.array coordinates
- ydum = userpoint['y'] - 1 # use np.array coordinates
- xfit = np.concatenate((xfit, np.array([xdum])))
- yfit = np.concatenate((yfit, np.array([ydum])))
- poly, residum = polfit_residuals(xfit, yfit, poldeg)
- coeff = poly.coef
- if start < start_reuse:
- plot_trace(ax, coeff, start, start_reuse, ix_offset,
- args.rawimage, args.fibids, fiblabel,
- colour='green')
- if stop_reuse < stop:
- plot_trace(ax, coeff, stop_reuse, stop, ix_offset,
- args.rawimage, args.fibids, fiblabel,
- colour='green')
- bigdict['contents'][fibid - 1]['start'] = start
- bigdict['contents'][fibid - 1]['stop'] = stop
- bigdict['contents'][fibid - 1]['fitparms'] = coeff.tolist()
-
- elif operation['description'] == 'sandwich':
- fibid = operation['fibid']
- fiblabel = fibid_with_box[fibid - 1]
- if args.verbose:
- print('(sandwich) fibid:', fiblabel)
- fraction = operation['fraction']
- nf1, nf2 = operation['neighbours']
- start = operation['start']
- stop = operation['stop']
- tmpf1 = bigdict['contents'][nf1 - 1]
- tmpf2 = bigdict['contents'][nf2 - 1]
- if nf1 != tmpf1['fibid'] or nf2 != tmpf2['fibid']:
- raise ValueError(
- "Unexpected fiber numbers in neighbours"
- )
- coefff1 = np.array(tmpf1['fitparms'])
- coefff2 = np.array(tmpf2['fitparms'])
- coeff = coefff1 + fraction * (coefff2 - coefff1)
- plot_trace(ax, coeff, start, stop, ix_offset,
- args.rawimage, args.fibids,
- fiblabel, colour='green')
- # update values in bigdict (JSON structure)
- bigdict['contents'][fibid - 1]['start'] = start
- bigdict['contents'][fibid - 1]['stop'] = stop
- bigdict['contents'][fibid - 1][
- 'fitparms'] = coeff.tolist()
- if fibid in bigdict['error_fitting']:
- bigdict['error_fitting'].remove(fibid)
-
- elif operation['description'] == 'renumber_fibids_within_box':
- fibid_ini = operation['fibid_ini']
- fibid_end = operation['fibid_end']
- box_ini = fibid_with_box[fibid_ini - 1][4:]
- box_end = fibid_with_box[fibid_end - 1][4:]
- if box_ini != box_end:
- print('ERROR: box_ini={}, box_end={}'.format(box_ini,
- box_end))
- raise ValueError('fibid_ini and fibid_end correspond to '
- 'different fiber boxes')
- fibid_shift = operation['fibid_shift']
- if fibid_shift in [-1, 1]:
- if fibid_shift == -1:
- i_start = fibid_ini
- i_stop = fibid_end + 1
- i_step = 1
- else:
- i_start = fibid_end
- i_stop = fibid_ini - 1
- i_step = -1
- for fibid in range(i_start, i_stop, i_step):
- fiblabel_ori = fibid_with_box[fibid - 1]
- fiblabel_new = fibid_with_box[fibid - 1 + fibid_shift]
- if args.verbose:
- print('(renumber_fibids) fibid:',
- fiblabel_ori, '-->', fiblabel_new)
- bigdict['contents'][fibid -1 + fibid_shift] = \
- deepcopy(bigdict['contents'][fibid -1])
- bigdict['contents'][fibid -1 + fibid_shift]['fibid'] += \
- fibid_shift
- # display updated trace
- coeff = \
- bigdict['contents'][fibid -1 + fibid_shift]['fitparms']
- start = \
- bigdict['contents'][fibid -1 + fibid_shift]['start']
- stop = bigdict['contents'][fibid -1 + fibid_shift]['stop']
- plot_trace(ax, coeff, start, stop, ix_offset,
- args.rawimage, args.fibids,
- fiblabel_ori + '-->' + fiblabel_new,
- colour='green')
- if fibid_shift == -1:
- bigdict['contents'][fibid_end - 1]['fitparms'] = []
- else:
- bigdict['contents'][fibid_ini - 1]['fitparms'] = []
- else:
- raise ValueError('fibid_shift in operation '
- 'renumber_fibids_within_box '
- 'must be -1 or 1')
- else:
- raise ValueError('Unexpected healing method:',
- operation['description'])
-
-# update trace map
- if args.updated_traces is not None:
- # avoid overwritting initial JSON file
- if args.updated_traces.name != args.traces_file.name:
- # new random uuid for the updated calibration
- bigdict['uuid'] = str(uuid4())
- with open(args.updated_traces.name, 'w') as outfile:
- json.dump(bigdict, outfile, indent=2)
-
if pdf is not None:
pdf.savefig()
pdf.close()
diff --git a/megaradrp/validators.py b/megaradrp/validators.py
index 2aa77662..4b9f2fe3 100644
--- a/megaradrp/validators.py
+++ b/megaradrp/validators.py
@@ -1,5 +1,5 @@
#
-# Copyright 2016-2017 Universidad Complutense de Madrid
+# Copyright 2016-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -10,13 +10,19 @@
"""Validators for Observing modes"""
import sys
-
import six
+import pkgutil
+from six import StringIO
+
+import json
+import jsonschema
from numina.exceptions import ValidationError
+from megaradrp.datatype import MegaraDataType
-def validate_focus(obresult):
+
+def validate_focus(mode, obresult):
"""Validate FOCUS_SPECTROGRAPH"""
image_groups = {}
for idx, frame in enumerate(obresult.frames):
@@ -36,4 +42,488 @@ def validate_focus(obresult):
if len(image_groups) < 2:
raise ValidationError('We have only {} different focus in OB'.format(len(image_groups)))
- return True
\ No newline at end of file
+ return True
+
+
+def validate_key(mode, obresult, key):
+ """Validate key"""
+
+ # Assume that the individual images are valid IMG_COMP
+ # check consistency of key
+ kval = []
+ for idx, frame in enumerate(obresult.frames):
+ # SPECLAMP values
+ with frame.open() as img:
+ try:
+ spec_val = img[0].header[key]
+ kval.append(spec_val)
+ except Exception:
+ _type, exc, tb = sys.exc_info()
+ six.reraise(ValidationError, exc, tb)
+
+ if kval[:-1] == kval[1:]:
+ return True
+ else:
+ raise ValidationError("{} value is incorrect".format(key))
+
+
+def validate_arc(mode, obresult):
+ """Validate ARC_CALIBRATION"""
+
+ # Assume that the individual images are valid IMG_COMP
+ return validate_key(mode, obresult, 'SPECLAMP')
+
+
+def validate_flat(mode, obresult):
+ """Validate FLAT"""
+
+ # Assume that the individual images are valid IMG_COMP
+ return True
+
+
+def validate_keyword_exists(header, key):
+ """Verify that the keyword exists"""
+ value = header.get(key)
+ if value is None:
+ msg = 'Expected keyword "{}" is not present'.format(key)
+ raise ValidationError(msg)
+ return True
+
+
+def validate_keyword_value(header, key, expected):
+ from numina.exceptions import ValidationError
+
+ validate_keyword_exists(header, key)
+ value = header.get(key)
+
+ if value != expected:
+ msg = 'Keyword "{0}" has value "{1}" != "{2}"'.format(key, value, expected)
+ raise ValidationError(msg)
+
+
+def validate_keyword_any_value(header, key, any_expected):
+ """Validate that keyword has any of allowed values"""
+ from numina.exceptions import ValidationError
+
+ validate_keyword_exists(header, key)
+ value = header.get(key)
+
+ for expected in any_expected:
+ if value == expected:
+ break
+ else:
+ msg = 'Keyword "{0}" has value "{1}" not in "{2}"'.format(key, value, any_expected)
+ raise ValidationError(msg)
+
+
+def convert_headers(hdulist):
+ headers = [convert_header(hdu.header) for hdu in hdulist]
+ return headers
+
+
+def convert_header(header):
+ hdu_v = {}
+ hdu_c = {}
+ hdu_o = []
+ hdu_repr = {'values': hdu_v, 'comments': hdu_c, 'ordering': hdu_o}
+
+ for card in header.cards:
+ key = card.keyword
+ value = card.value
+ comment = card.comment
+ hdu_v[key] = value
+ hdu_c[key] = comment
+ hdu_o.append(key)
+
+ return hdu_repr
+
+
+def check_null(obj, level=None):
+ return True
+
+
+def check_invalid(obj, level=None):
+ raise ValidationError
+
+
+class Checker(object):
+ def __init__(self, validator):
+ super(Checker, self).__init__()
+ self.validator = validator
+
+ def check(self, obj, level=None):
+
+ return self.check_post(obj, level=level)
+
+ def __call__(self, hdulist, level=None):
+ return self.check(hdulist, level=level)
+
+ def check_post(self, hdulist, level=None):
+ return True
+
+
+class ImageChecker(Checker):
+ def __init__(self, validator):
+ super(ImageChecker, self).__init__(validator)
+
+ def check(self, hdulist, level=None):
+ dheaders = convert_headers(hdulist)
+ self.check_dheaders(dheaders, level=level)
+ super(ImageChecker, self).check_post(hdulist, level=level)
+ return True
+
+ def check_dheaders(self, dheaders, level=None):
+ # Check with json schema
+ # convert header to dict
+ try:
+ self.validator.validate(dheaders)
+ except jsonschema.exceptions.ValidationError:
+ raise
+
+ if len(dheaders) == 2:
+ values_fibers = dheaders[1]['values']
+ values_primary = dheaders[0]['values']
+ check_header_additional(values_primary, values_fibers)
+
+
+class StructChecker(Checker):
+ def __init__(self, validator):
+ super(StructChecker, self).__init__(validator)
+
+ def check(self, obj, level=None):
+ self.validator.validate(obj)
+ self.check_post(obj, level=level)
+ return True
+
+
+class BaseChecker(ImageChecker):
+ def __init__(self, validator):
+ super(BaseChecker, self).__init__(validator)
+
+
+# TODO: insert all subschemas in the general schema
+_sub_schema_rss = {
+ "oneOf": [
+ {
+ "type": "object",
+ "properties": {
+ #"NAXIS1": {"const": 4300},
+ "NAXIS2": {"const": 623},
+ "INSMODE": {"const": "LCB"}
+ }
+ },
+ {
+ "type": "object",
+ "properties": {
+ #"NAXIS1": {"const": 4300},
+ "NAXIS2": {"const": 644},
+ "INSMODE": {"const": "MOS"}
+ }
+ }
+ ]
+}
+
+_sub_schema_bias = {
+ "type": "object",
+ "properties": {
+ "OBJECT": {"const": "BIAS"},
+ "OBSMODE": {"const": "MegaraBiasImage"},
+ "IMAGETYP": {"const": "IMAGE_BIAS"},
+ "EXPTIME": {"type": "number", "maximum": 0},
+ "DARKTIME": {"type": "number", "maximum": 0}
+ }
+}
+
+
+_sub_schema_master_bpm = {
+ "type": "object",
+ "properties": {
+ "NUMTYPE": {"enum": ['MasterBias', 'MASTER_BPM']}
+ }
+}
+
+_sub_schema_master_bias = {
+ "type": "object",
+ "properties": {
+ #"OBJECT": {"const": "BIAS"},
+ "OBSMODE": {"const": "MegaraBiasImage"},
+ "IMAGETYP": {"const": "MASTER_BIAS"},
+ "EXPTIME": {"type": "number", "maximum": 0},
+ "DARKTIME": {"type": "number", "maximum": 0},
+ "NUMTYPE": {"enum": ['MasterBias', 'MASTER_BIAS']}
+ }
+}
+
+
+_sub_schema_dark = {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"const": "MegaraDarkImage"},
+ "IMAGETYP": {"const": "IMAGE_DARK"},
+ }
+}
+
+
+_sub_schema_master_dark = {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"const": "MegaraDarkImage"},
+ "IMAGETYP": {"const": "MASTER_DARK"},
+ }
+}
+
+
+class ExtChecker(BaseChecker):
+ def __init__(self, schema, sub_schemas, n_ext=None):
+ super(ExtChecker, self).__init__(schema)
+ self.n_ext = n_ext
+ self.sub_schemas = sub_schemas
+
+ def check_dheaders(self, dheaders, level=None):
+
+ try:
+ super(ExtChecker, self).check_dheaders(dheaders, level=level)
+ except jsonschema.exceptions.ValidationError:
+ pass
+ # Image must have only one extension
+ if self.n_ext is not None:
+ if len(dheaders) != self.n_ext:
+ msg = 'image has not expected number of HDUs ({})'.format(self.n_ext)
+ raise ValueError(msg)
+
+ for sub_schema in self.sub_schemas:
+ try:
+ if isinstance(sub_schema, str):
+ url, fragment = self.validator.resolver.resolve(sub_schema)
+ else:
+ fragment = sub_schema
+
+ jsonschema.validate(dheaders[0]['values'], schema=fragment)
+ except jsonschema.exceptions.ValidationError:
+ raise
+
+
+class FlatImageChecker(ExtChecker):
+ def __init__(self, schema):
+
+ _sub_schema_flat = {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"enum": [
+ "MegaraFiberFlatImage", "MegaraTraceMap", "MegaraModelMap", "MegaraSuccess"]
+ },
+ "IMAGETYP": {"const": "IMAGE_FLAT"},
+ }
+ }
+
+ super(FlatImageChecker, self).__init__(schema, ["#/definitions/raw_hdu_values", _sub_schema_flat], n_ext=2)
+
+ def check_post(self, hdulist, level=None):
+ """Additional checks"""
+ self.check_post_level1(hdulist)
+
+ def check_post_level1(self, hdulist):
+ """Additional checks"""
+ hdr = hdulist[0].header
+ # Flat must have inc LAMPS-ON
+ # Flat must have comp LAMPS-OFF
+ lamp_i_s = (hdr['LAMPI1S'] or hdr['LAMPI1S'])
+ if not lamp_i_s:
+ msg = 'all incandescent lamps are OFF'
+ raise ValidationError(msg)
+ lamp_s_s = True
+ for idx in range(1, 6):
+ label = 'LAMPS{}S'.format(idx)
+ lamp_s_s = lamp_s_s and hdr[label]
+ if lamp_s_s:
+ msg = 'some comparation lamps are ON'
+ raise ValidationError(msg)
+
+
+class CompImageChecker(ExtChecker):
+ def __init__(self, schema):
+
+ _sub_schema_comp = {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"enum": ["MegaraArcCalibration", "MegaraSuccess"]},
+ "IMAGETYP": {"const": "IMAGE_COMP"},
+ }
+ }
+
+ super(CompImageChecker, self).__init__(schema, ["#/definitions/raw_hdu_values", _sub_schema_comp], n_ext=2)
+
+ def check_post(self, hdulist, level=None):
+ """Additional checks"""
+ self.check_post_level1(hdulist)
+
+ def check_post_level1(self, hdulist):
+ """Additional checks"""
+ hdr = hdulist[0].header
+ # Flat must have all inc LAMPS-OFF
+ # Flat must have some comp LAMPS-ON
+ lamp_i_s = (hdr['LAMPI1S'] or hdr['LAMPI1S'])
+ if lamp_i_s:
+ msg = 'some incandescent lamps are ON'
+ raise ValidationError(msg)
+ lamp_s_s = False
+ for idx in range(1, 6):
+ label = 'LAMPS{}S'.format(idx)
+ lamp_s_s = lamp_s_s or hdr[label]
+ if not lamp_s_s:
+ msg = 'all comparation lamps are OFF'
+ raise ValidationError(msg)
+
+
+class TargetImageChecker(ExtChecker):
+ def __init__(self, schema):
+
+ _sub_schema_target = {
+ "type": "object",
+ "properties": {
+ }
+ }
+
+ super(TargetImageChecker, self).__init__(schema,
+ ["#/definitions/raw_hdu_values", _sub_schema_target], n_ext=2
+ )
+
+ def check_post(self, hdulist, level=None):
+ """Additional checks"""
+ self.check_post_level1(hdulist)
+
+ def check_post_level1(self, hdulist):
+ """Additional checks"""
+ hdr = hdulist[0].header
+ # Target must have all inc LAMPS-OFF
+ # Target must have all comp LAMPS-ON
+ lamp_i_s = (hdr['LAMPI1S'] or hdr['LAMPI1S'])
+ if lamp_i_s:
+ msg = 'some incandescent lamps are ON'
+ raise ValidationError(msg)
+ lamp_s_s = False
+ for idx in range(1, 6):
+ label = 'LAMPS{}S'.format(idx)
+ lamp_s_s = lamp_s_s or hdr[label]
+ if lamp_s_s:
+ msg = 'some comparation lamps are ON'
+ raise ValidationError(msg)
+
+
+class MasterFlatRSSChecker(ExtChecker):
+ def __init__(self, schema):
+ super(MasterFlatRSSChecker, self).__init__(schema,
+ [_sub_schema_rss], n_ext=3
+ )
+
+class MasterSensitivityChecker(ExtChecker):
+ def __init__(self, schema):
+ super(MasterSensitivityChecker, self).__init__(schema,
+ ["#/definitions/spec_hdu_values", "#/definitions/sensitivity_values"], n_ext=1
+ )
+
+
+def check_header_additional(values_primary, values_fibers):
+ """Additional checks than can't be done with schema"""
+
+
+ if values_primary['INSMODE'] != values_fibers['INSMODE']:
+ raise ValueError('insmode in PRIMARY != insmode in FIBERS')
+
+ if values_fibers['INSMODE'] == 'LCB':
+ rbundles = [0, 93, 94, 95, 96, 97, 98, 99, 100]
+ else:
+ rbundles = range(1, 92 + 1)
+
+ nfibers = values_fibers['NFIBERS']
+ nbundles = values_fibers['NBUNDLES']
+
+ for idbundle in rbundles:
+ # types are check in the json schema
+ for stype in ['P', "I", "T", "X", "Y", "O", "E"]:
+ keyname = "BUN{:03d}_{}".format(idbundle, stype)
+ if keyname not in values_fibers:
+ raise ValueError("keyname {} not in values_fibers".format(keyname))
+
+ for idfiber in range(1, nfibers + 1):
+ # types are check in the json schema
+ for stype in ['A', "D", "R", "X", "Y", "B"]:
+ keyname = "FIB{:03d}_{}".format(idfiber, stype)
+ if keyname not in values_fibers:
+ msg = "keyname {} not in values_fibers".format(keyname)
+ raise ValueError(msg)
+
+ for stype in ["N"]:
+ keyname = "FIB{:03d}_{}".format(idfiber, stype)
+ if keyname not in values_fibers:
+ msg = "keyword {} not in values_fibers".format(keyname)
+ print(msg)
+ #raise ValueError(msg)
+
+
+class CheckAsDatatype(object):
+ """Collection of schemas for validation"""
+ def __init__(self):
+
+ image_schema_path = "baseimage.json"
+ json_schema_path = "basestruct.json"
+
+ data_image = pkgutil.get_data('megaradrp.schemas', image_schema_path)
+ data_json = pkgutil.get_data('megaradrp.schemas', json_schema_path)
+ schema_image = json.load(StringIO(data_image.decode('utf8')))
+ schema_json = json.load(StringIO(data_json.decode('utf8')))
+
+ ValClass = jsonschema.validators.validator_for(schema_image)
+ self.validator_image = ValClass(schema_image)
+ ValClass = jsonschema.validators.validator_for(schema_json)
+ self.validator_json = ValClass(schema_json)
+
+ raw_checker = ExtChecker(self.validator_image, ["#/definitions/raw_hdu_values"])
+ proc_checker = ExtChecker(self.validator_image, ["#/definitions/proc_hdu_values"])
+ rss_checker = ExtChecker(self.validator_image, [_sub_schema_rss])
+ spec_checker = ExtChecker(self.validator_image, ["#/definitions/spec_hdu_values"])
+ sens_checker = MasterSensitivityChecker(self.validator_image)
+ struct_checker = StructChecker(self.validator_json)
+
+ _megara_checkers = {}
+ _megara_checkers[MegaraDataType.UNKNOWN] = check_null
+ _megara_checkers[MegaraDataType.IMAGE_RAW] = raw_checker
+ _megara_checkers[MegaraDataType.IMAGE_BIAS] = ExtChecker(self.validator_image, ["#/definitions/raw_hdu_values", _sub_schema_bias], n_ext=1)
+ _megara_checkers[MegaraDataType.IMAGE_DARK] = ExtChecker(self.validator_image, ["#/definitions/raw_hdu_values", _sub_schema_dark], n_ext=1)
+ _megara_checkers[MegaraDataType.IMAGE_SLITFLAT] = raw_checker
+ _megara_checkers[MegaraDataType.IMAGE_FLAT] = FlatImageChecker(self.validator_image)
+ _megara_checkers[MegaraDataType.IMAGE_COMP] = CompImageChecker(self.validator_image)
+ #
+ _megara_checkers[MegaraDataType.IMAGE_TWILIGHT] = raw_checker
+ _megara_checkers[MegaraDataType.IMAGE_TEST] = raw_checker
+ _megara_checkers[MegaraDataType.IMAGE_TARGET] = TargetImageChecker(self.validator_image)
+ #
+ _megara_checkers[MegaraDataType.IMAGE_PROCESSED] = proc_checker
+ _megara_checkers[MegaraDataType.MASTER_BPM] = ExtChecker(self.validator_image, [
+ "#/definitions/proc_hdu_values", _sub_schema_master_bpm
+ ], n_ext=1)
+ _megara_checkers[MegaraDataType.MASTER_BIAS] = ExtChecker(self.validator_image, ["#/definitions/proc_hdu_values", _sub_schema_master_bias], n_ext=1)
+ _megara_checkers[MegaraDataType.MASTER_DARK] = ExtChecker(self.validator_image, ["#/definitions/proc_hdu_values", _sub_schema_master_dark], n_ext=1)
+ _megara_checkers[MegaraDataType.MASTER_SLITFLAT] = proc_checker
+ _megara_checkers[MegaraDataType.DIFFUSE_LIGHT] = proc_checker
+ #
+ _megara_checkers[MegaraDataType.RSS_PROCESSED] = rss_checker
+ _megara_checkers[MegaraDataType.RSS_WL_PROCESSED] = rss_checker
+ _megara_checkers[MegaraDataType.MASTER_FLAT] = MasterFlatRSSChecker(self.validator_image)
+ _megara_checkers[MegaraDataType.MASTER_TWILIGHT] = rss_checker
+
+ _megara_checkers[MegaraDataType.SPEC_PROCESSED] = spec_checker
+ _megara_checkers[MegaraDataType.MASTER_SENSITIVITY] = sens_checker
+
+ _megara_checkers[MegaraDataType.STRUCT_PROCESSED] = struct_checker
+ _megara_checkers[MegaraDataType.TRACE_MAP] = struct_checker
+ _megara_checkers[MegaraDataType.MODEL_MAP] = struct_checker
+ _megara_checkers[MegaraDataType.WAVE_CALIB] = struct_checker
+
+ self._megara_checkers = _megara_checkers
+
+ def __call__(self, datatype):
+ return self._megara_checkers[datatype]
+
+
+check_as_datatype = CheckAsDatatype()
diff --git a/megaradrp/visualization.py b/megaradrp/visualization.py
index ff9a2ff0..dc0a98c6 100644
--- a/megaradrp/visualization.py
+++ b/megaradrp/visualization.py
@@ -1,5 +1,5 @@
#
-# Copyright 2017-2018 Universidad Complutense de Madrid
+# Copyright 2017-2020 Universidad Complutense de Madrid
#
# This file is part of Megara DRP
#
@@ -18,12 +18,10 @@
import matplotlib.transforms as mtransforms
import numpy as np
+import megaradrp.instrument.constants as cons
M_SQRT3 = math.sqrt(3)
-PLATESCALE = 1.2120 # arcsec / mm
-SCALE = 0.443 # mm from center to center, upwards
-
def hexplot(axis, x, y, z, scale=1.0, extent=None,
cmap=None, norm=None, vmin=None, vmax=None,
@@ -199,11 +197,16 @@ def main(argv=None):
import astropy.io.fits as fits
from astropy.wcs import WCS
from astropy.visualization import simple_norm
+ import astropy.units as u
import matplotlib.transforms as mtransforms
import megaradrp.datamodel as dm
+ from megaradrp.instrument.focalplane import FocalPlaneConf
from megaradrp.processing.wcs import update_wcs_from_ipa, compute_pa_from_ipa
+ # scale of the LCB grid in mm
+ SCALE = cons.SPAXEL_SCALE.to(u.mm).value
+
try:
from megaradrp.processing.cube import create_cube_from_rss
has_contours = True
@@ -285,24 +288,22 @@ def main(argv=None):
for fname in args.rss:
with fits.open(fname) as img:
-
- datamodel = dm.MegaraDataModel()
if args.plot_nominal_config:
insmode = img['FIBERS'].header['INSMODE']
- fiberconf = datamodel.get_fiberconf_default(insmode)
+ fp_conf = dm.get_fiberconf_default(insmode)
else:
- fiberconf = datamodel.get_fiberconf(img)
- plot_mask = np.ones((fiberconf.nfibers,), dtype=np.bool)
+ fp_conf = FocalPlaneConf.from_img(img)
+ plot_mask = np.ones((fp_conf.nfibers,), dtype=np.bool)
if not args.plot_sky:
- skyfibers = fiberconf.sky_fibers()
+ skyfibers = fp_conf.sky_fibers()
skyfibers.sort()
skyfibers_idx = [(fibid - 1) for fibid in skyfibers]
plot_mask[skyfibers_idx] = False
- x = np.empty((fiberconf.nfibers,))
- y = np.empty((fiberconf.nfibers,))
+ x = np.empty((fp_conf.nfibers,))
+ y = np.empty((fp_conf.nfibers,))
# Key is fibid
- for _, fiber in sorted(fiberconf.fibers.items()):
+ for _, fiber in sorted(fp_conf.fibers.items()):
idx = fiber.fibid - 1
x[idx] = fiber.x
y[idx] = fiber.y
@@ -408,7 +409,29 @@ def main(argv=None):
synt.writeto(args.contour_image_save)
conserve_flux = not args.contour_is_density
- s_cube = create_cube_from_rss(synt, target_scale_arcsec, conserve_flux=conserve_flux)
+ order = 1
+ s_cube = create_cube_from_rss(synt, order, target_scale_arcsec, conserve_flux=conserve_flux)
+ cube_wcs = WCS(s_cube[0].header).celestial
+ px, py = cube_wcs.wcs.crpix
+ interp = np.squeeze(s_cube[0].data)
+ td = mtransforms.Affine2D().translate(-px, -py).scale(target_scale_arcsec, target_scale_arcsec)
+ tt_d = td + ax.transData
+ # im = ax.imshow(interp, alpha=0.9, cmap='jet', transform=tt_d)
+ # im = ax.imshow(interp, alpha=0.9, cmap='jet', transform=ax.get_transform(cube_wcs))
+ if args.contour_levels is not None:
+ levels = json.loads(args.contour_levels)
+ mm = ax.contour(interp, levels, transform=tt_d)
+ else:
+ mm = ax.contour(interp, transform=tt_d)
+ print('contour levels', mm.levels)
+
+ if args.has_contours and args.contour:
+ target_scale_arcsec = args.pixel_size
+ # Build synthetic rss... for reconstruction
+ primary = fits.PrimaryHDU(data=zval[:, np.newaxis], header=img[extname].header)
+ synt = fits.HDUList([primary, img['FIBERS']])
+ order = 1
+ s_cube = create_cube_from_rss(synt, order, target_scale_arcsec)
cube_wcs = WCS(s_cube[0].header).celestial
px, py = cube_wcs.wcs.crpix
interp = np.squeeze(s_cube[0].data)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..9787c3bd
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
diff --git a/schemas/baseimage.json b/schemas/baseimage.json
new file mode 100644
index 00000000..5b466659
--- /dev/null
+++ b/schemas/baseimage.json
@@ -0,0 +1,473 @@
+{
+ "$id": "/home/spr/devel/guaix/megaradrp/schemas/baseimage.json",
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Schema for MEGARA headers",
+ "description": "Describe MEGARA headers",
+ "anyOf": [
+ {"$ref": "#/definitions/one_ext_hdul"},
+ {"$ref": "#/definitions/two_ext_hdul"},
+ {"$ref": "#/definitions/proc_rss_wl_hdul"},
+ {"$ref": "#/definitions/proc_image_hdul"}
+ ],
+ "definitions": {
+ "structure_hdu": {
+ "type": "object",
+ "properties": {
+ "values": {
+ "type": "object"
+ },
+ "comments": {
+ "type": "object"
+ },
+ "ordering": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ },
+ "required": [
+ "values"
+ ]
+ },
+ "primary_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/primary_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/raw_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/rss_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/proc_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/spec_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "primary_hdu_values": {
+ "type": "object",
+ "properties": {
+ "OBSMODE": {"type": "string"},
+ "DATE-OBS": {"$ref": "#/definitions/datetime"},
+ "INSTRUME": {"type": "string", "const": "MEGARA"},
+ "BUNIT": {"type": "string", "default": "ADU"},
+ "DETECTOR": {"type": "string", "default": "CCD2231-84-0-E74"},
+ "EXPTIME": {"type": "number", "minimum": 0},
+ "DARKTIME": {"type": "number", "minimum": 0},
+ "READMODE": {"type": "string", "enum": ["NORMAL", "MIRROR"]},
+ "RSPEED": {"type": "string", "enum": ["SLOW", "FAST", "ENG"]},
+ "GAIN1": {"type": "number", "minimum": 0, "default": 1.73},
+ "GAIN2": {"type": "number", "minimum": 0, "default": 1.6},
+ "RDNOISE1": {"type": "number", "minimum": 0, "default": 3.4},
+ "RDNOISE2": {"type": "number", "minimum": 0, "default": 3.4},
+ "IPA": {"type": "number"},
+ "ARIMASS": {"type": "number"},
+ "ARIMASS1": {"type": "number"},
+ "ARIMASS2": {"type": "number"},
+ "AMSTART": {"type": "number"},
+ "AMEND": {"type": "number"},
+ "RA": {"type": "string"},
+ "DEC": {"type": "string"},
+ "RADEG": {"type": "number"},
+ "DECDEG": {"type": "number"},
+ "AZIMUTH": {"type": "number"},
+ "ELEVAT": {"type": "number"},
+ "ROTANG": {"type": "number"},
+ "LST": {"type": "string"},
+ "MDJ-OBS": {"type": "number"},
+ "SETPNT": {"type": "number"},
+ "CCDTEMP0": {"type": "number", "minimum": 0},
+ "CCDTEMP1": {"type": "number", "minimum": 0},
+ "CCDTEMP2": {"type": "number", "minimum": 0},
+ "CCDTEMP3": {"type": "number", "minimum": 0},
+ "SENTEMP0": {"type": "number"},
+ "SENTEMP1": {"type": "number"},
+ "SENTEMP2": {"type": "number"},
+ "SENTEMP3": {"type": "number"},
+ "SENTEMP4": {"type": "number"},
+ "SENTEMP5": {"type": "number"},
+ "SENTEMP6": {"type": "number"},
+ "INSMODE": {"type": "string", "enum": ["LCB", "MOS"]},
+ "VPHWHPOS": {"type": "string", "enum": [
+ "VPH1", "VPH2", "VPH3", "VPH4", "VPH5","VPH6", "VPH7","VPH8","VPH9","VPH10",
+ "VPH11"
+ ]
+ },
+ "VPH": {"type": "string"},
+ "FOCUS": {"type": "number", "minimum": 0},
+ "SLITN": {"type": "number"},
+ "OSFILTER": {"type": "string",
+ "enum": ["BLUE", "RED", " RED"]
+ },
+ "COVER": {"type": "string",
+ "enum": ["STOPPED", "BOTH_OPEN"]
+ },
+ "LAMPI1S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPI1I": {"type": "number", "minimum": 0, "maximum": 100},
+ "LAMPI2S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPI2I": {"type": "number", "minimum": 0, "maximum": 100},
+ "LAMPS1S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS2S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS3S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS4S": {"type": "integer",
+ "enum": [0,1]
+ },
+ "LAMPS5S": {"type": "integer", "enum": [0,1]},
+ "LAMPMIR": {"type": "string", "enum": ["WORK", "PARK", "UNDEFINED"],
+ "description": "Status of the ICM mirror"
+ },
+ "SPECLAMP": {"type": "string", "enum": ["NONE", "ThNe", "ThAr", "ThArNe", "UNKNOWN"]},
+ "OBJECT": {"type": "string"},
+ "OBSTYPE": {"type": "string",
+ "enum": ["AUXILIARY", "CALIBRATION", "SCIENTIFIC", "ENGINEERING", "OBJECT"],
+ "description": "OBJECT is not in our document"
+ },
+ "ORIGIN": {"type": "string"},
+ "INSCONF": {"$ref": "#/definitions/uuid"},
+ "UUID": {"$ref": "#/definitions/uuid"},
+ "BLCKUUID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["OBSMODE", "DATE-OBS", "INSTRUME", "UUID", "INSCONF", "EXPTIME",
+ "VPH"]
+ },
+ "raw_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"const": 4196},
+ "NAXIS2": {"const": 4212}
+ }
+ },
+ "proc_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"const": 4096},
+ "NAXIS2": {"const": 4112}
+ }
+ },
+ "spec_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 1},
+ "NAXIS1": {"const": 4300}
+ }
+ },
+ "rss_naxis2": {
+ "type": "object",
+ "allOf": [
+ {
+ "if": {
+ "properties": {
+ "INSMODE": {
+ "const": "LCB"
+ }
+ }
+ },
+ "then": {"properties": {"NAXIS2": {"const": 623}}}
+ },
+ {
+ "if": {
+ "properties": {
+ "INSMODE": {
+ "const": "MOS"
+ }
+ }
+ },
+ "then": {"properties": {"NAXIS2": {"const": 644}}}
+ }
+ ]
+ },
+ "rss_hdu_values": {
+ "allOf": [
+ {
+ "type": "object",
+ "properties": {
+ "NAXIS": {"const": 2},
+ "NAXIS1": {"enum": [4300, 4096]}
+ }
+ },
+ {
+ "$ref": "#/definitions/rss_naxis2"
+ }
+ ]
+ },
+ "one_ext_hdul": {
+ "description": "A MEGARA image, 1 HDU",
+ "type": "array",
+ "additionalItems": false,
+ "minItems": 1,
+ "maxItems": 1,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ }
+ ]
+ },
+ "two_ext_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 2 HDUs",
+ "additionalItems": false,
+ "minItems": 2,
+ "maxItems": 2,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ }
+ ]
+ },
+ "proc_rss_wl_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 3 HDUs",
+ "additionalItems": false,
+ "minItems": 3,
+ "maxItems": 3,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ "$ref": "#/definitions/wlmap_hdu"
+ }
+ ]
+ },
+ "proc_image_hdul": {
+ "type": "array",
+ "description": "A MEGARA image, 3 HDUs",
+ "additionalItems": false,
+ "minItems": 2,
+ "items": [
+ {
+ "$ref": "#/definitions/primary_hdu"
+ },
+ {
+ "$ref": "#/definitions/fibers_hdu"
+ },
+ {
+ "$ref": "#/definitions/variance_hdu"
+ },
+ {
+ "$ref": "#/definitions/map_hdu"
+ }
+ ]
+ },
+ "spec1d_hdul": {
+ "type": "array",
+ "description": "A MEGARA spectrum, 1 HDUs",
+ "additionalItems": false,
+ "minItems": 1,
+ "items": [
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "fibers_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/fibers_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/fibers_lcb_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/fibers_mos_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "fibers_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "FIBERS"},
+ "NFIBERS": {"type": "integer", "enum": [623, 644]},
+ "NBUNDLES": {"type": "integer", "enum": [9, 92]},
+ "INSMODE": {"type": "string", "enum": ["LCB", "MOS"]},
+ "CONFID": {"$ref": "#/definitions/uuid"},
+ "CONFNAME": {"type": "string"}
+ },
+ "patternProperties": {
+ "^FIB[0123456][0-9][0-9]_N$": {"type": "string",
+ "description": "this is matching more keywords, like FIB699_N"
+ },
+ "^FIB[0123456][0-9][0-9]_X$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_Y$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_B$": {"type": "integer"},
+ "^FIB[0123456][0-9][0-9]_D$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_R$": {"type": "number"},
+ "^FIB[0123456][0-9][0-9]_A$": {"type": "boolean"},
+ "^BUN0[0-9][0-9]_E$": {"type": "boolean"},
+ "^BUN0[0-9][0-9]_X$": {"type": "number"},
+ "^BUN0[0-9][0-9]_Y$": {"type": "number"},
+ "^BUN0[0-9][0-9]_O$": {"type": "number"},
+ "^BUN0[0-9][0-9]_P$": {"type": "integer"},
+ "^BUN0[0-9][0-9]_I$": {"type": "string"},
+ "^BUN0[0-9][0-9]_T$": {"type": "string"}
+ },
+ "required": ["EXTNAME","NFIBERS", "NBUNDLES", "INSMODE", "CONFID",
+ "FIB001_X", "FIB002_Y", "FIB623_X", "FIB623_Y"]
+ },
+ "fibers_lcb_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NFIBERS": {"type": "integer", "const": 623},
+ "NBUNDLES": {"type": "integer", "const": 9},
+ "INSMODE": {"type": "string", "const": "LCB"},
+ "CONFID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["FIB623_X", "FIB623_Y", "BUN000_X", "BUN093_X"]
+ },
+ "fibers_mos_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NFIBERS": {"type": "integer", "const": 644},
+ "NBUNDLES": {"type": "integer"},
+ "INSMODE": {"type": "string", "const": "MOS"},
+ "CONFID": {"$ref": "#/definitions/uuid"}
+ },
+ "required": ["FIB644_X", "FIB644_Y", "BUN001_X", "BUN092_X"]
+ },
+ "wlmap_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/wlmap_hdu_values"},
+ {
+ "oneOf": [
+ {
+ "$ref": "#/definitions/wlmap_lcb_hdu_values"
+ },
+ {
+ "$ref": "#/definitions/wlmap_mos_hdu_values"
+ }
+ ]
+ }
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "wlmap_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "WLMAP"},
+ "NAXIS1": {"type": "integer", "enum": [4300]},
+ "NAXIS2": {"type": "integer", "enum": [623, 644]}
+ },
+ "required": ["EXTNAME"]
+ },
+ "wlmap_lcb_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS2": {"type": "integer", "const": 623}
+ }
+ },
+ "wlmap_mos_hdu_values": {
+ "type": "object",
+ "properties": {
+ "NAXIS2": {"type": "integer", "const": 644}
+ }
+ },
+ "variance_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/variance_hdu_values"}
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "variance_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "VARIANCE"}
+ },
+ "required": ["EXTNAME"]
+ },
+ "map_hdu": {
+ "allOf": [
+ {"$ref": "#/definitions/structure_hdu"},
+ {
+ "type": "object",
+ "properties": {
+ "values": {
+ "allOf": [
+ {"$ref": "#/definitions/map_hdu_values"}
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "map_hdu_values": {
+ "type": "object",
+ "properties": {
+ "EXTNAME": {"type": "string", "const": "MAP"}
+ },
+ "required": ["EXTNAME"]
+ },
+ "uuid": {
+ "type": "string",
+ "pattern": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$"
+ },
+ "datetime": {
+ "type": "string",
+ "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(\\.[0-9]{1,4})?$"
+ }
+ }
+}
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
index 2fd84836..20d0f2b0 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,5 +1,77 @@
+[metadata]
+name = megaradrp
+version = 0.10.1
+author = Sergio Pascual
+author_email = sergiopr@fis.ucm.es
+url = https://github.com/guaix-ucm/megaradrp
+license = GPLv3
+description = MEGARA Data Reduction Pipeline
+long_description = file:README.rst
+long_description_content_type = text/x-rst
+classifiers =
+ Programming Language :: Python :: 2.7
+ Programming Language :: Python :: 3.5
+ Programming Language :: Python :: 3.6
+ Programming Language :: Python :: 3.7
+ Programming Language :: Python :: 3.8
+ Development Status :: 3 - Alpha
+ Environment :: Console
+ Intended Audience :: Science/Research
+ License :: OSI Approved :: GNU General Public License (GPL)
+ Operating System :: OS Independent
+ Topic :: Scientific/Engineering :: Astronomy
+
+[options]
+packages = find:
+install_requires =
+ setuptools>=39.2
+ numpy
+ astropy
+ scipy
+ numina >= 0.22
+ scikit-image
+ enum34; python_version<"3.4"
+ contextlib2; python_version<"3.5"
+ jsonschema
+
+[options.package_data]
+megaradrp = drp.yaml
+megaradrp.instrument.configs =
+ primary.txt
+ lcb_default_header.txt
+ mos_default_header.txt
+ component-*.json
+ instrument-*.json
+ properties-*.json
+
+[options.extras_require]
+test =
+ pytest
+ pytest-remotedata
+docs =
+ sphinx
+DB =
+ sqlalchemy
+ numinadb
+
+[options.entry_points]
+numina.pipeline.1 =
+ MEGARA = megaradrp.loader:load_drp
+
+numinadb.extra.1 =
+ MEGARA = megaradrp.db [DB]
+
+console_scripts =
+ megaradrp-overplot_traces = megaradrp.tools.overplot_traces:main
+ megaradrp-heal_traces = megaradrp.tools.heal_traces:main
+ megaradrp-cube = megaradrp.processing.cube:main
+
+[tool:pytest]
+testpaths = "megaradrp"
+remote_data_strict = true
+
[build_ext]
inplace = 0
[bdist_wheel]
-universal = 1
\ No newline at end of file
+universal = 1
diff --git a/setup.py b/setup.py
index 5a8779e6..d45274c3 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@
setup(
name='megaradrp',
- version='0.9.3',
+ version='0.10.1',
author='Sergio Pascual',
author_email='sergiopr@fis.ucm.es',
url='https://github.com/guaix-ucm/megaradrp',
@@ -24,19 +24,25 @@
'instrument-*.json',
'properties-*.json',
],
+ 'megaradrp.schemas': [
+ 'baseimage.json',
+ 'basestruct.json'
+ ]
},
install_requires=[
'setuptools>=36.2.1',
'numpy',
'astropy >= 2',
'scipy',
- 'numina >= 0.21',
+ 'numina >= 0.22',
'scikit-image',
'enum34;python_version<"3.4"',
'contextlib2;python_version<"3.5"',
+ 'jsonschema'
],
extras_require={
- 'DB': ['sqlalchemy', 'numinadb']
+ 'DB': ['sqlalchemy', 'numinadb'],
+ 'test': ['pytest', 'pytest-remotedata']
},
zip_safe=False,
entry_points={
@@ -48,14 +54,16 @@
],
'console_scripts': [
'megaradrp-overplot_traces = megaradrp.tools.overplot_traces:main',
+ 'megaradrp-heal_traces = megaradrp.tools.heal_traces:main',
+ 'megaradrp-cube = megaradrp.processing.cube:main',
],
},
classifiers=[
"Programming Language :: Python :: 2.7",
- "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
'Development Status :: 3 - Alpha',
"Environment :: Console",
"Intended Audience :: Science/Research",
diff --git a/tools/check_overscan.py b/tools/check_overscan.py
new file mode 100644
index 00000000..dcaa7ab3
--- /dev/null
+++ b/tools/check_overscan.py
@@ -0,0 +1,116 @@
+
+import matplotlib.pyplot as plt
+import astropy.io.fits as fits
+import numpy as np
+import scipy.ndimage.filters as filt
+from scipy.interpolate import LSQUnivariateSpline
+
+
+conf = {"trim1": [[0,2056],[50,4146]],
+ "trim2": [[2156,4212],[50,4146]],
+ "overscan1": [[0,2056],[4146,4196]],
+ "overscan1_corner": [[2056,2106],[4146,4196]],
+ "overscan2": [[2156,4212],[0,50]],
+ "overscan2_corner": [[2106,2156],[4146,4196]],
+ "prescan1": [[0,2056],[0,50]],
+ "prescan2": [[2156,4212],[4146,4196]],
+ "middle1": [[2056,2106],[50,4146]],
+ "middle2": [[2106,2156],[50,4146]],
+ "gain1": 1.73,
+ "gain2": 1.6
+}
+
+
+def to_slice(sec):
+ sec1, sec2 = sec
+ return slice(*sec1), slice(*sec2)
+
+
+def to_str(sec, format='fits'):
+ sec1, sec2 = sec
+ return str([to_index(sec2), to_index(sec1)])
+
+
+def to_index(ssec, format='fits'):
+ a, b = ssec
+ return [a + 1, b]
+
+
+def to_pix(sec, axis=0):
+ return np.arange(*sec[axis])
+
+
+def plot2(data, name, knots, ax, s=0, r=-1):
+ reg = conf[name]
+ axis_u = 0
+ axis_v = 1
+ plot4(data, reg, axis_u, axis_v, knots, ax, s=s, r=r)
+
+
+def plot3(data, name, knots, ax, s=0, r=-1):
+ reg = conf[name]
+ axis_u = 1
+ axis_v = 0
+ plot4(data, reg, axis_u, axis_v, knots, ax, s=s, r=r)
+
+
+def plot4(data, reg, axis_u, axis_v, knots, ax, s=0, r=-1):
+
+ u = to_pix(reg, axis=axis_u)
+ region = to_slice(reg)
+ v = data[region].mean(axis=axis_v)
+ v = filt.median_filter(v, size=7)
+
+ spl2 = LSQUnivariateSpline(u, v, knots, k=3)
+ v_spl2 = spl2(u)
+
+ ax.plot(u[s:r], v[s:r], label="D")
+ ax.plot(u[s:r], v_spl2[s:r], label="S2")
+
+
+if __name__ == '__main__':
+ import os
+ import argparse
+
+ parser = argparse.ArgumentParser(description='Check overscan')
+ parser.add_argument('filename', metavar='FILE', nargs='+',
+ help='Check overscan')
+
+
+ args = parser.parse_args()
+
+
+ for fname in args.filename:
+ print(fname)
+ fname_base, ext = os.path.splitext(fname)
+
+ hdulist = fits.open(fname)
+ hdu = hdulist[0]
+ data = hdu.data
+ s = 0
+ r = -1
+ knots1 = [10, 100, 200, 300, 400, 500, 750, 1000, 1200, 1500, 1700, 2000]
+ knots2 = [2200, 3000, 3500, 3900, 4000, 4100, 4200]
+
+ knots1 = [125, 250, 375, 500, 1000, 1500]
+ knots2 = [2500, 3000, 3500, 3600, 3700, 3800]
+
+ knots1 = [1200]
+ knots2 = [3100]
+ knotsm = [2100]
+
+ for regname, knots in zip(['overscan1', 'overscan2'], [knots1, knots2]):
+ fig, axes = plt.subplots(1, 1)
+ regs = to_str(conf[regname])
+ axes.set_title("{}\n{} {}".format(fname, regname, regs))
+ plot2(data, regname, knots, axes, s=s, r=r)
+ plt.savefig("{}_{}.png".format(fname_base, regname))
+ plt.close()
+
+ for regname, knots in zip(['middle1', 'middle2'], [knotsm, knotsm]):
+ fig, axes = plt.subplots(1, 1)
+ regs = to_str(conf[regname])
+ axes.set_title("{}\n{} {}".format(fname, regname, regs))
+ plot3(data, regname, knots, axes, s=s, r=r)
+ plt.savefig("{}_{}.png".format(fname_base, regname))
+ plt.close()
diff --git a/tools/find_boxes.py b/tools/find_boxes.py
index 91515a34..1787144a 100644
--- a/tools/find_boxes.py
+++ b/tools/find_boxes.py
@@ -9,31 +9,10 @@
from numina.array.display.ximshow import ximshow
from numina.array.display.pause_debugplot import pause_debugplot
from numina.array.display.ximplotxy import ximplotxy
+from numina.array.wavecalib.crosscorrelation import cosinebell
from numina.drps import get_system_drps
-def cosinebell(n, fraction):
- """Return a cosine bell spanning n pixels, masking a fraction of pixels
-
- Parameters
- ----------
- n : int
- Number of pixels.
- fraction : float
- Length fraction over which the data will be masked.
-
- """
-
- mask = np.ones(n)
- nmasked = int(fraction * n)
- for i in range(nmasked):
- yval = 0.5 * (1 - np.cos(np.pi * float(i) / float(nmasked)))
- mask[i] = yval
- mask[n - i - 1] = yval
-
- return mask
-
-
def find_boxes(fitsfile, channels, nsearch, debugplot):
"""Refine boxes search around previous locations.
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 00000000..b679e935
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,13 @@
+# tox (https://tox.readthedocs.io/) is a tool for running tests
+# in multiple virtualenvs. This configuration file will run the
+# test suite on all supported python versions. To use it, "pip install tox"
+# and then run "tox" from this directory.
+
+[tox]
+envlist = py27, py35, py36, py37, py38
+
+[testenv]
+extras =
+ test
+commands =
+ pytest --remote-data=none