Skip to content

Commit

Permalink
passing self-test for load and download--major API rework to use xarr…
Browse files Browse the repository at this point in the history
…ay.Dataset
  • Loading branch information
scivision committed Mar 29, 2018
1 parent 4ad49df commit 73cceeb
Show file tree
Hide file tree
Showing 12 changed files with 213 additions and 248 deletions.
19 changes: 19 additions & 0 deletions .appveyor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
image:
- Visual Studio 2017

stack: python 3

environment:
PY_DIR: C:\Python36-x64

clone_depth: 3

before_build:
- cmd: set PATH=%PY_DIR%;%PY_DIR%\Scripts;%PATH%

build_script:
- pip install -e .[tests]

after_build:
- pytest -v

13 changes: 13 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
[run]
cover_pylib = false
omit =
/home/travis/virtualenv/*

[report]
exclude_lines =
pragma: no cover
def __repr__
RuntimeError
NotImplementedError
ImportError

8 changes: 5 additions & 3 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@ git:
install: pip -q install -e .[tests]

script:
- pytest
- coverage run tests/test_all.py -v
- pytest -v


after_success: coveralls
after_success:
- coverage run tests/test_all.py
- coveralls

8 changes: 2 additions & 6 deletions DownloadDASC.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
#!/usr/bin/env python
from dateutil.parser import parse
#
from dascutils import getdasc
import dascutils as du

if __name__ == '__main__':
from argparse import ArgumentParser
Expand All @@ -14,6 +12,4 @@
p = p.parse_args()

#host = "ftp://mirrors.arsc.edu/AMISR/PKR/DASC/RAW/"
startend = [parse(t) for t in p.startend]

getdasc(startend, p.host,p.site, p.odir, p.clobber)
du.download(p.startend, p.host,p.site, p.odir, p.clobber)
4 changes: 2 additions & 2 deletions PlotDASC.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
This program by default projects HiST auroral tomography system FOV onto PFRR DASC.
"""
from dascutils.readDASCfits import readallDasc
import dasciutils.io as dui
from dascutils.plots import histdasc,moviedasc

def plotdasc(img,wavelength,odir,cadence,rows,cols):
Expand All @@ -28,6 +28,6 @@ def plotdasc(img,wavelength,odir,cadence,rows,cols):



img,times,waz,wel,wlla,wwl = readallDasc(p.indir, p.azfn, p.elfn, p.wavelength, p.minmax, p.tlim)
img = dui.load(p.indir, p.azfn, p.elfn, p.wavelength, p.minmax, p.tlim)

plotdasc(img, wwl, p.odir, p.cadence, None, None)
13 changes: 5 additions & 8 deletions PlotDASC_HiST.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,15 @@
This program by default projects HiST auroral tomography system FOV onto PFRR DASC.
"""
import matplotlib
#matplotlib.use('Agg')
#
from dascutils.readDASCfits import readallDasc
from dascutils.io import readallDasc
from dascutils.plots import histdasc,moviedasc
#
from themisasi.fov import mergefov

def plothstfovondasc(img,wavelength,odir,cadence,rows,cols):
histdasc(img,wavelength,odir) #histogram
def plothstfovondasc(imgs,odir,cadence,rows,cols):
histdasc(imgs, odir) #histogram

moviedasc(img,wavelength,times,odir,cadence,rows,cols)
moviedasc(imgs,times,odir,cadence,rows,cols)


if __name__ == '__main__':
Expand All @@ -38,7 +35,7 @@ def plothstfovondasc(img,wavelength,odir,cadence,rows,cols):
try:
plothstfovondasc(img,p.wavelength,p.odir,p.cadence,rows,cols)
except NameError:
img,times,waz,wel,wlla = readallDasc(p.indir,p.azfn,p.elfn,p.wavelength,p.minmax,p.tlim)
imgs = readallDasc(p.indir,p.azfn,p.elfn,p.wavelength,p.minmax,p.tlim)
rows,cols = mergefov(ocalfn,wlla,waz,wel,None,None,p.ncal,p.projalt,site='DASC')

plothstfovondasc(img,p.wavelength,p.odir,p.cadence,rows,cols)
15 changes: 9 additions & 6 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,11 @@
.. image:: https://travis-ci.org/scivision/dascutils.svg?branch=master
:target: https://travis-ci.org/scivision/dascutils

.. image:: https://coveralls.io/repos/github/scivision/dascutils/badge.svg?branch=master
:target: https://coveralls.io/github/scivision/dascutils?branch=master
.. image:: https://coveralls.io/repos/github/scivision/dascutils/badge.svg?branch=master
:target: https://coveralls.io/github/scivision/dascutils?branch=master

.. image:: https://ci.appveyor.com/api/projects/status/xrtb6fc3d4ojp507?svg=true
:target: https://ci.appveyor.com/project/scivision/dascutils

.. image:: https://api.codeclimate.com/v1/badges/36b08deedc7d2bf750c8/maintainability
:target: https://codeclimate.com/github/scivision/dascutils/maintainability
Expand All @@ -23,7 +26,7 @@ Utilities for plotting, saving, analyzing the Poker Flat Research Range Digital

This program handles the corrupted FITS files due to the RAID array failure on 2013 data.

The raw data FITS are one image per file
The raw data FITS are one image per file.

.. contents::

Expand All @@ -32,13 +35,13 @@ Install
::

pip install -e .

Download raw DASC files by time
===========================
Example download October 7, 2015 from 8:23 to 8:54 UTC::

./DownloadDASC.py 2015-10-07T08:23Z 2015-10-07T08:54Z
./DownloadDASC.py 2015-10-07T08:23Z 2015-10-07T08:54Z

-o download directory
-c clobber existing files
-s three-letter site acronym PKR for poker flat etc.
Expand Down
18 changes: 7 additions & 11 deletions dascutils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,7 @@
from dateutil.parser import parse
from datetime import datetime
from urllib.parse import urlparse
from pytz import UTC
#
from sciencedates import forceutc

EPOCH = datetime(1970,1,1,tzinfo=UTC)

def totimestamp(t):
"""
Expand All @@ -24,28 +20,28 @@ def totimestamp(t):
t = totimestamp(parse(t))
elif isinstance(t,(float,int)):
t = float(t)
assert 1e9 < t < 3e9, f'did you really mean {datetime.fromtimestamp(t,tz=UTC)}'
assert 1e9 < t < 3e9, f'did you really mean {datetime.fromtimestamp(t)}'
else: # assume it's an iterable 1-D vector
t = list(map(totimestamp,t))

return t


def getdasc(startend,host,site,odir='',clobber=False):
def download(startend,host,site,odir='',clobber=False):
"""
startend: tuple of datetime
year,month,day: integer
hour, minute: start,stop integer len == 2
"""
assert len(startend)==2

start = forceutc(startend[0])
end = forceutc(startend[1])
start = parse(startend[0]) if isinstance(startend[0],str) else startend[0]
end = parse(startend[1]) if isinstance(startend[1],str) else startend[1]

parsed = urlparse(host)
ftop = parsed[1]
fpath = parsed[2] + site
odir = Path(odir).expanduser()
odir = Path(odir).expanduser().resolve()
#%% get available files for this day
rparent = f'{fpath}/DASC/RAW/{start.year:4d}'
rday = f'{start.year:4d}{start.month:02d}{start.day:02d}'
Expand All @@ -56,15 +52,15 @@ def getdasc(startend,host,site,odir='',clobber=False):
if not rday in dlist:
raise FileNotFoundError(f'{rday} does not exist under {host}/{rparent}')

print('downloading to', odir.resolve())
print('downloading to', odir)
F.cwd(rday)
dlist = F.nlst()

print(f'remote filesize approx. {F.size(dlist[0])/1000} kB.')
for f in dlist:
#%% file in time range
#print (int(round(float(f[27:31]))))
t = forceutc(datetime.strptime(f[14:-9],'%Y%m%d_%H%M%S'))
t = datetime.strptime(f[14:-9],'%Y%m%d_%H%M%S')
if start <= t <= end:
#%% download file
ofn = odir / f
Expand Down
125 changes: 125 additions & 0 deletions dascutils/io.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
#!/usr/bin/env python
"""
Reads DASC allsky cameras images in FITS formats into GeoData.
Run standalone from PlayDASC.py
"""
from pathlib import Path
import warnings # corrupt FITS files let off a flood of AstroPy warnings
from astropy.io.fits.verify import VerifyWarning
import logging
from astropy.io import fits
import numpy as np
from datetime import timedelta
from dateutil.parser import parse
import xarray


def load(flist:list, azfn:Path=None, elfn:Path=None, minmax:tuple=None, treq:list=None) -> xarray.Dataset:
"""
reads FITS images and spatial az/el calibration for allsky camera
Bdecl is in degrees, from IGRF model
"""
warnings.filterwarnings('ignore', category=VerifyWarning)

if not flist:
raise FileNotFoundError('no files of this wavelength')

if isinstance(flist,(str,Path)):
flist = [flist]

#%% read one file mode
if treq is not None:
expstart = []

for i,fn in enumerate(flist):
try:
with fits.open(fn, mode='readonly') as h:
expstart.append(parse(h[0].header['OBSDATE'] + ' ' + h[0].header['OBSSTART']))
except IOError: #many corrupted files, accounted for by preallocated vectors
pass

expstart = np.array(expstart)

if isinstance(treq,float) or len(treq) == 1: # single frame
fi = np.nanargmin(abs(expstart-treq)) #index number in flist desired
elif len(treq)==2: #frames within bounds
if treq[0] is not None:
fi = (treq[0] <= expstart)
if treq[1] is not None:
fi &= (expstart < treq[1])
elif treq[1] is not None:
fi = (expstart < treq[1])
else:
fi = slice(None)
else:
fi = slice(None)

flist = flist[fi]
if len(flist)==0:
raise FileNotFoundError('no files found within time limits')

if isinstance(flist,Path): # so that we can iterate
flist = [flist]
#%% iterate over image files
time = []; img= []; wavelen = []

for i,fn in enumerate(flist):
try:
with fits.open(fn, mode='readonly') as h:
assert h[0].header['BITPIX']==16,'this function assumes unsigned 16-bit data'
expstart = parse(h[0].header['OBSDATE'] + 'T' + h[0].header['OBSSTART'])

time.append((expstart, expstart + timedelta(seconds=h[0].header['EXPTIME']))) #EXPTIME is in seconds

wavelen.append(int(h[0].header['FILTWAV']))

sensorloc={'lat':h[0].header['GLAT'],
'lon':h[0].header['GLON'],
'alt_m':200.} # TODO use real altitude

"""
DASC iKon cameras are/were 14-bit at least through 2015. So what they did was
just write unsigned 14-bit data into signed 16-bit integers, which doesn't overflow
since 14-bit \in {0,16384}.
These files do not have a BZERO value. Someday when they're written correctly this
code may need updating.
Further, there was a RAID failure that filled the data files with random values.
Don Hampton says about 90% of data OK, but 10% NOK.
"""

I = np.rot90(h[0].data,-1) #NOTE: rotation to match online AVIs from UAF website. It's not transpose, and the cal file seems off.
if not 'BZERO' in h[0].header.keys():
I[I>16384] = 0 #extreme, corrupted data
I = I.clip(0,16384).astype(np.uint16) #discard bad values for 14-bit cameras.

img.append(I)

except (IOError,TypeError) as e:
logging.warning(f'{fn} has error {e}')

# %% collect output
img = np.array(img)
time = np.array(time)
wavelen = np.array(wavelen)
wavelengths = np.unique(wavelen)
# %% deal with corrupted data
if minmax is not None:
img[(img<minmax[0]) | (img>minmax[1])] = 1 #instead of 0 for lognorm

ds = {}
for w in wavelengths:
ds[w] = (('time','y','x'),img[wavelen==w,...])

data = xarray.Dataset(ds,
coords={'time':time[:,0]},
attrs={'timeend':time[:,1],
'sensorloc':sensorloc})

if azfn is not None and elfn is not None:
with fits.open(Path(azfn).expanduser(),mode='readonly') as h:
data['az'] = (('y','x'),h[0].data)
with fits.open(Path(elfn).expanduser(),mode='readonly') as h:
data['el'] = (('y','x'),h[0].data)


return data

0 comments on commit 73cceeb

Please sign in to comment.