Skip to content

Commit

Permalink
FocusRecipe has been modified to generate a JSON file with the wavele…
Browse files Browse the repository at this point in the history
…ngth information
  • Loading branch information
Pica committed Apr 20, 2016
1 parent d418ded commit b390353
Show file tree
Hide file tree
Showing 3 changed files with 92 additions and 45 deletions.
29 changes: 24 additions & 5 deletions megaradrp/products.py
Expand Up @@ -69,10 +69,6 @@ class MasterSlitFlat(MEGARAProductFrame):
pass


class MasterTwilightFlat(MEGARAProductFrame):
pass


class MasterFiberFlatFrame(MEGARAProductFrame):
pass

Expand Down Expand Up @@ -128,4 +124,27 @@ def _datatype_load(self, obj):
import tarfile
return tarfile.open(obj, 'r')
except IOError as e:
raise e
raise e


class JSONstorage(DataProductType):
def __init__(self, default=None):
super(JSONstorage, self).__init__(ptype=dict, default=default)

def _datatype_dump(self, obj, where):
import json
filename = where.destination + '.json'

with open(filename, 'w') as fd:
fd.write(json.dumps(obj))

return filename

def _datatype_load(self, obj):
import json
try:
with open(obj, 'r') as fd:
data = json.load(fd)
except IOError as e:
raise e
return data
104 changes: 66 additions & 38 deletions megaradrp/recipes/auxiliary/focusspec.py
Expand Up @@ -25,7 +25,6 @@

import numpy
from scipy.spatial import cKDTree

from numina.core import Requirement, Parameter
from numina.core.dataholders import Product
from numina.core import DataFrameType
Expand All @@ -34,20 +33,16 @@
from numina.core.products import LinesCatalog
import numina.array.fwhm as fmod
from numina.array.wavecal.statsummary import sigmaG
# FIXME: still using findpeaks1D functions
from numina.array.peaks.findpeaks1D import findPeaks_spectrum
from numina.array.peaks.peakdet import find_peaks_indexes, refine_peaks
from numina.array.peaks.findpeaks1D import refinePeaks_spectrum
from numina.flow.processing import BiasCorrector
from numina.flow import SerialFlow
import astropy.io.fits as fits

from megaradrp.processing.trimover import OverscanCorrector, TrimImage
from megaradrp.core.recipe import MegaraBaseRecipe
from megaradrp.products import TraceMap
from megaradrp.products import TraceMap, WavelengthCalibration,JSONstorage
from megaradrp.requirements import MasterBiasRequirement
from megaradrp.core.processing import apextract_tracemap
import astropy.io.fits as fits


_logger = logging.getLogger('numina.recipes.megara')

Expand All @@ -61,9 +56,12 @@ class FocusSpectrographRecipe(MegaraBaseRecipe):
tracemap = Requirement(TraceMap, 'Trace information of the Apertures')
lines_catalog = Requirement(LinesCatalog, 'Catalog of lines')
polynomial_degree = Parameter(2, 'Polynomial degree of arc calibration')
wlcalib = Requirement(WavelengthCalibration,
'Wavelength calibration table')
# Products
focus_table = Product(ArrayType)
focus_image = Product(DataFrameType)
focus_wavelength = Product(JSONstorage)

def __init__(self):
super(FocusSpectrographRecipe, self).__init__("0.1.0")
Expand All @@ -75,13 +73,16 @@ def generate_image(self, final):
x = numpy.arange(2048 * 2)
y = numpy.arange(2056 * 2)

test_points = numpy.transpose([numpy.tile(x, len(y)),numpy.repeat(y, len(x))])
test_points = numpy.transpose(
[numpy.tile(x, len(y)), numpy.repeat(y, len(x))])

voronoi_kdtree = cKDTree(voronoi_points)

test_point_dist, test_point_regions = voronoi_kdtree.query(test_points, k=1)
final_image = test_point_regions.reshape((4112,4096)).astype('float64')
final_image[:,:] = final[final_image[:,:].astype('int64'),2]
test_point_dist, test_point_regions = voronoi_kdtree.query(test_points,
k=1)
final_image = test_point_regions.reshape((4112, 4096)).astype(
'float64')
final_image[:, :] = final[final_image[:, :].astype('int64'), 2]
return (final_image)

def run(self, rinput):
Expand Down Expand Up @@ -109,10 +110,13 @@ def run(self, rinput):
_logger.info('pair lines in images')
line_fibers = self.filter_lines(ever)

focus_wavelength = self.generateJSON(ever, rinput.wlcalib,
rinput.obresult.images)

_logger.info('fit FWHM of lines')
final = self.reorder_and_fit(line_fibers, focci)

focus_median = numpy.median(final[:,2])
focus_median = numpy.median(final[:, 2])
_logger.info('median focus value is %5.2f', focus_median)

_logger.info('generate focus image')
Expand All @@ -121,22 +125,46 @@ def run(self, rinput):
hdulist = fits.HDUList([hdu])

_logger.info('end focus spectrograph')
return self.create_result(focus_table=final, focus_image=hdulist)
return self.create_result(focus_table=final, focus_image=hdulist,
focus_wavelength=focus_wavelength)

def generateJSON(self, data, wlcalib, original_images):
from numpy.polynomial.polynomial import polyval

_logger.info('start JSON generation')

result = {}
counter = 0

for image in data:
name = original_images[counter].filename
result[name] = {}
for fiber, value in image.items():
result[name][fiber] = []
for arco in value:
res = polyval(arco[0], wlcalib[fiber])
result[name][fiber].append(
[arco[0], arco[1], arco[2], res])
counter += 1

_logger.info('end JSON generation')

return result

def get_focus(self, points, final):
a = final[:, [0, 1]]

b = points

index = numpy.where(numpy.all(a==b,axis=1))
return final[index[0],[2]]

index = numpy.where(numpy.all(a == b, axis=1))
return final[index[0], [2]]

def create_flow(self, master_bias):

biasmap = master_bias.open()[0].data

flows = [OverscanCorrector(), TrimImage(), BiasCorrector(biasmap=biasmap)]
flows = [OverscanCorrector(), TrimImage(),
BiasCorrector(biasmap=biasmap)]
basicflow = SerialFlow(flows)
return basicflow

Expand All @@ -161,17 +189,14 @@ def run_on_image(self, rssdata, tracemap):
# find peaks
threshold = numpy.median(row) + times_sigma * sigmaG(row)

ipeaks_int = findPeaks_spectrum(row, nwinwidth, threshold)
ipeaks_int_aux = find_peaks_indexes(row, nwinwidth, threshold)
assert numpy.allclose(ipeaks_int, ipeaks_int_aux)
ipeaks_float = refinePeaks_spectrum(row, ipeaks_int, nwinwidth)
ipeaks_int = find_peaks_indexes(row, nwinwidth, threshold)
ipeaks_float = refine_peaks(row, ipeaks_int, nwinwidth)[0]

# self.pintarGrafica(refine_peaks(row, ipeaks_int, nwinwidth)[0] - refinePeaks_spectrum(row, ipeaks_int, nwinwidth))

fpeaks[idx] = []
for peak, peak_f in zip(ipeaks_int, ipeaks_float):
qslit = row[peak-lwidth:peak+lwidth]
qslit = row[peak - lwidth:peak + lwidth]
peak_val, fwhm = fmod.compute_fwhm_1d_simple(qslit, lwidth)
peak_on_trace = the_pol(peak)
fpeaks[idx].append((peak_f, peak_on_trace, fwhm))
Expand All @@ -188,10 +213,13 @@ def pintarGrafica(self, diferencia_final):
ax.plot(ejeX, diferencia_final, label="0")
# lgd = ax.legend(loc='upper center', bbox_to_anchor=(0.5, -0.1), ncol=4,
# mode="expand")
lgd = ax.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc='upper center', ncol=4, mode="expand", borderaxespad=0.)
lgd = ax.legend(bbox_to_anchor=(0., 1.02, 1., .102),
loc='upper center', ncol=4, mode="expand",
borderaxespad=0.)
handles, labels = ax.get_legend_handles_labels()

fig.savefig('diferencia.eps', format='eps', dpi=1500, bbox_extra_artists=(lgd,), bbox_inches='tight')
fig.savefig('diferencia.eps', format='eps', dpi=1500,
bbox_extra_artists=(lgd,), bbox_inches='tight')
plt.draw()
plt.show()

Expand All @@ -215,19 +243,21 @@ def filter_lines(self, data):
savelines[i] = {}
savelines[i]['basedata'] = {}
savelines[i]['centers'] = []
kdtree = cKDTree(ref[:,:2])
kdtree = cKDTree(ref[:, :2])
for i in range(ntotal):
if i == center:
for j in range(outref):
savelines[j]['basedata']['coordinates'] = tuple(ref[j,:2])
savelines[j]['centers'].append(ref[j,2])
savelines[j]['basedata']['coordinates'] = tuple(
ref[j, :2])
savelines[j]['centers'].append(ref[j, 2])
continue
comp = numpy.array(data[i][fiberid])

qdis, qidx = kdtree.query(comp[:,:2], distance_upper_bound=maxdis)
qdis, qidx = kdtree.query(comp[:, :2],
distance_upper_bound=maxdis)
for compidx, lidx in enumerate(qidx):
if lidx < outref:
savelines[lidx]['centers'].append(comp[compidx,2])
savelines[lidx]['centers'].append(comp[compidx, 2])

remove_groups = []

Expand All @@ -236,32 +266,30 @@ def filter_lines(self, data):
remove_groups.append(ir)

for ir in remove_groups:
_logger.debug('remove group of lines %d in fiber %d',ir, fiberid)
_logger.debug('remove group of lines %d in fiber %d', ir,
fiberid)
del savelines[ir]

return line_fibers

def reorder_and_fit(self, line_fibers, focii):
"""Fit all the values of FWHM to a 2nd degree polynomial and return minimum."""

l = 0
for i in line_fibers:
for _ in line_fibers[i]:
l += 1
l = sum(len(value) for key, value in line_fibers.items())

_logger.debug('there are %d groups of lines to fit', l)
ally = numpy.zeros((len(focii), l))
final = numpy.zeros((l, 3))
l = 0
for i in line_fibers:
for j in line_fibers[i]:
ally[:,l] = line_fibers[i][j]['centers']
ally[:, l] = line_fibers[i][j]['centers']
final[l, :2] = line_fibers[i][j]['basedata']['coordinates']
l += 1

res = numpy.polyfit(focii, ally, deg=2)
_logger.debug('fitting to deg 2 polynomial, done')
best= -res[1] / (2*res[0])
final[:,2] = best
best = -res[1] / (2 * res[0])
final[:, 2] = best

return final
return final
4 changes: 2 additions & 2 deletions megaradrp/recipes/calibration/flat.py
Expand Up @@ -187,7 +187,7 @@ def load_files_from_directory(self, tar_file, img):

pool2 = mp.Pool(processes=self.processes)
extracted_w = [pool2.apply_async(extract_w_paralell,
args=(ite, img[:,ite], results[ite])) for ite in range(self.SIZE)]
args=(img[:,ite], results[ite])) for ite in range(self.SIZE)]
extracted_w = [p.get() for p in extracted_w]

_logger.info('extracted')
Expand All @@ -214,7 +214,7 @@ def run_tracemap(self, rinput):
result = self.create_result(fiberflat_frame=reduced, master_fiberflat=rss)
return result

def extract_w_paralell(col, img, mlist):
def extract_w_paralell(img, mlist):

'''
:param img: <fits>
Expand Down

0 comments on commit b390353

Please sign in to comment.