Skip to content

Commit

Permalink
Merge pull request #1300 from sunpy/unidown
Browse files Browse the repository at this point in the history
[Branch]: Unified Downloader
  • Loading branch information
dpshelio committed Jan 11, 2017
2 parents 0d870e8 + 7b7d90d commit b1f201d
Show file tree
Hide file tree
Showing 55 changed files with 2,674 additions and 381 deletions.
4 changes: 2 additions & 2 deletions .travis.yml
Expand Up @@ -52,10 +52,10 @@ matrix:
env: JOB="Documentation" PYTHON_VERSION=3.5 SETUP_CMD='build_sphinx -w'

- os: linux
env: JOB="Figures" PYTHON_VERSION=2.7 SETUP_CMD='test --figure' CONDA_DEPENDENCIES=''
env: JOB="Figures" PYTHON_VERSION=2.7 SETUP_CMD='test --figure' CONDA_DEPENDENCIES='' # conda env is loaded from a file.

- os: linux
env: JOB="Online" PYTHON_VERSION=3.5 SETUP_CMD='test --online --coverage'
env: JOB="Online" PYTHON_VERSION=3.5 SETUP_CMD='test --online --coverage -V'

- os: linux
env: JOB="Doctest" PYTHON_VERSION=3.5 SETUP_CMD='build_sphinx -b doctest'
Expand Down
2 changes: 2 additions & 0 deletions CHANGELOG.md
Expand Up @@ -21,6 +21,8 @@ Latest
* Remove deprecated `VSOClient.show` method.
* Deprecate `sunpy.wcs`: `sunpy.coordinates` and `sunpy.map` now provide all
that functionality in a more robust manner.
* Removed `HelioviewerClient` from the `sunpy.net` namespace. It should now be
imported with `from sunpy.net.helioviewer import HelioviewerClient`.

0.7.0
-----
Expand Down
47 changes: 44 additions & 3 deletions doc/source/code_ref/net.rst
@@ -1,21 +1,62 @@
SunPy net
=========

.. automodapi:: sunpy.net
SunPy's net submodule contains a lot of different code for accessing various
solar physics related web services. This submodule contains many layers. Most
users should use ``Fido``, which is an interface to multiple sources including
all the sources implemented in `~sunpy.net.dataretriever` as well as
`~sunpy.net.vso` and `~sunpy.net.jsoc`. ``Fido`` can be used like so::

>>> from sunpy.net import Fido, attrs as a
>>> results = Fido.search(a.Time("2012/1/1", "2012/1/2"), a.Instrument('lyra'))
>>> files = Fido.fetch(results)

.. automodapi:: sunpy.net.fido_factory

Dataretriever
-------------

.. automodapi:: sunpy.net.dataretriever
:allowed-package-names: sources
:headings: ^#

.. automodapi:: sunpy.net.dataretriever.sources
:headings: #~


VSO
---

.. automodapi:: sunpy.net.vso
:headings: ^#

.. automodapi:: sunpy.net.vso.attrs
:headings: ^#
:headings: #~


HEK
---

.. automodapi:: sunpy.net.hek
:headings: ^#

.. automodapi:: sunpy.net.hek2vso
:headings: ^#


HELIO
-----

.. automodapi:: sunpy.net.helio
:headings: ^#

.. automodapi:: sunpy.net.helio.hec
:headings: ^#
:headings: #~


JSOC
----

.. automodapi:: sunpy.net.jsoc
:headings: ^#

2 changes: 1 addition & 1 deletion doc/source/guide/acquiring_data/database.rst
Expand Up @@ -724,7 +724,7 @@ check `astropy.units`.

>>> from astropy import units as u
>>> print display_entries(
... database.query(vso.attrs.Wave(1.0*u.nm, 2.0*u.nm)),
... database.query(vso.attrs.Wavelength(1.0*u.nm, 2.0*u.nm)),
... ['id', 'observation_time_start', 'observation_time_end',
... 'instrument', 'wavemin', 'wavemax'], sort=True) # doctest: +NORMALIZE_WHITESPACE +SKIP
id observation_time_start observation_time_end instrument wavemin wavemax
Expand Down
4 changes: 2 additions & 2 deletions doc/source/guide/acquiring_data/vso.rst
Expand Up @@ -78,7 +78,7 @@ can be used to specify dates and time). The second argument:

sets the instrument we are looking for. The third argument:

``vso.attrs.Wave(142*u.AA, 123*u.AA)``
``vso.attrs.Wavelength(142*u.AA, 123*u.AA)``

sets the values for wavelength i.e, for wavemax(maximum value) and
similarly wavemin(for minimum value) for the query. Also the ``u.AA``
Expand Down Expand Up @@ -127,7 +127,7 @@ setting conditions that the returned records must satisfy. You can
set the wavelength; for example, to return the 171 Angstrom EIT results

>>> import astropy.units as u
>>> qr=client.query(vso.attrs.Time('2001/1/1', '2001/1/2'), vso.attrs.Instrument('eit'), vso.attrs.Wave(171*u.AA,171*u.AA) )
>>> qr=client.query(vso.attrs.Time('2001/1/1', '2001/1/2'), vso.attrs.Instrument('eit'), vso.attrs.Wavelength(171*u.AA,171*u.AA) )
>>> len(qr)
4

Expand Down
Expand Up @@ -36,7 +36,7 @@
vso.attrs.Sample(24 * u.hour) &
vso.attrs.Time('2011-01-01', '2011-01-02'))

wave = vso.attrs.Wave(30 * u.nm, 31 * u.nm)
wave = vso.attrs.Wavelength(30 * u.nm, 31 * u.nm)


vc = vso.VSOClient()
Expand Down
33 changes: 25 additions & 8 deletions sunpy/conftest.py
Expand Up @@ -20,6 +20,8 @@
else:
matplotlib.use('Agg')

from astropy.tests import disable_internet

from sunpy.tests import hash

hash_library_original_len = len(hash.hash_library)
Expand All @@ -40,16 +42,23 @@ def site_reachable(url):


def pytest_runtest_setup(item):
"""pytest hook to skip all tests that have the mark 'online' if the
"""
pytest hook to skip all tests that have the mark 'online' if the
client is online (simply detected by checking whether http://www.google.com
can be requested).
"""
if isinstance(item, item.Function):
if 'online' in item.keywords and not is_online():
msg = 'skipping test {0} (reason: client seems to be offline)'
pytest.skip(msg.format(item.name))

if 'online' not in item.keywords:
disable_internet.turn_off_internet()


def pytest_runtest_teardown(item, nextitem):
disable_internet.turn_on_internet()


def pytest_unconfigure(config):
tempdir = tempfile.mkdtemp(suffix="_figures")
Expand All @@ -60,15 +69,23 @@ def pytest_unconfigure(config):
for h in hash.file_list:
test_name = inv_hash_library.get(h, '')
if test_name != '':
os.rename(hash.file_list[h], os.path.join(tempdir, test_name + '.png'))
print('All test files for figure hashes can be found in {0}'.format(tempdir))
os.rename(hash.file_list[h], os.path.join(tempdir,
test_name + '.png'))
print('All test files for figure hashes can be found in {0}'.format(
tempdir))

#Check if additions have been made to the hash library
# Check if additions have been made to the hash library
if len(hash.hash_library) > hash_library_original_len:
#Write the new hash library in JSON
# Write the new hash library in JSON
tempdir = tempfile.mkdtemp()
hashfile = os.path.join(tempdir, hash.HASH_LIBRARY_NAME)
with open(hashfile, 'wb') as outfile:
json.dump(hash.hash_library, outfile, sort_keys=True, indent=4, separators=(',', ': '))
print("The hash library has expanded and should be copied to sunpy/tests/")
json.dump(
hash.hash_library,
outfile,
sort_keys=True,
indent=4,
separators=(',', ': '))
print(
"The hash library has expanded and should be copied to sunpy/tests/")
print(" " + hashfile)
2 changes: 1 addition & 1 deletion sunpy/database/attrs.py
Expand Up @@ -259,7 +259,7 @@ def _convert(attr):
return ValueAttr({(attr.__class__.__name__.lower(), ): attr.value})


@walker.add_converter(vso_attrs.Wave)
@walker.add_converter(vso_attrs.Wavelength)
def _convert(attr):
return ValueAttr({('wave', ): (attr.min.value, attr.max.value, str(attr.unit))})

Expand Down
8 changes: 4 additions & 4 deletions sunpy/database/serialize.py
Expand Up @@ -19,7 +19,7 @@ def default(self, o):
if isinstance(o, (AttrAnd, AttrOr)):
# sort by dictionary keys to be order-invariant
values = sorted(o.attrs, key=attrgetter('__class__.__name__'))
elif isinstance(o, vso.attrs.Wave):
elif isinstance(o, vso.attrs.Wavelength):
values = o.min.value, o.max.value, str(o.unit)
elif isinstance(o, vso.attrs.Time):
values = o.start, o.end, o.near
Expand Down Expand Up @@ -47,9 +47,9 @@ def query_decode(json_object):
if key in json_object:
Attr = getattr(vso.attrs, key)
return Attr(json_object[key])
if 'Wave' in json_object:
Attr = getattr(vso.attrs, 'Wave')
wavemin, wavemax, unit = json_object['Wave']
if 'Wavelength' in json_object:
Attr = getattr(vso.attrs, 'Wavelength')
wavemin, wavemax, unit = json_object['Wavelength']
return Attr(wavemin * u.Unit(unit), wavemax * u.Unit(unit))
if 'Time' in json_object:
Attr = getattr(vso.attrs, 'Time')
Expand Down
4 changes: 2 additions & 2 deletions sunpy/database/tests/test_attrs.py
Expand Up @@ -425,9 +425,9 @@ def test_walker_create_vso_instrument(vso_session):

@pytest.mark.online
def test_walker_create_wave(vso_session):
entries = walker.create(vso.attrs.Wave(0 * u.AA, 10 * u.AA), vso_session)
entries = walker.create(vso.attrs.Wavelength(0 * u.AA, 10 * u.AA), vso_session)
assert len(entries) == 2
entries = walker.create(vso.attrs.Wave(5 * u.AA, 10 * u.AA), vso_session)
entries = walker.create(vso.attrs.Wavelength(5 * u.AA, 10 * u.AA), vso_session)
assert len(entries) == 0


Expand Down
8 changes: 4 additions & 4 deletions sunpy/database/tests/test_serialize.py
Expand Up @@ -8,8 +8,8 @@


def test_vso_wave():
attr = vso.attrs.Wave(100 * u.AA, 200 * u.AA)
expected = '{"Wave": [100.0, 200.0, "Angstrom"]}'
attr = vso.attrs.Wavelength(100 * u.AA, 200 * u.AA)
expected = '{"Wavelength": [100.0, 200.0, "Angstrom"]}'
assert json.dumps(attr, cls=QueryEncoder) == expected


Expand Down Expand Up @@ -96,8 +96,8 @@ def test_attr_and():


def test_decode_wave():
dump = '{"Wave": [10.0, 20.0, "Angstrom"]}'
assert json.loads(dump, object_hook=query_decode) == vso.attrs.Wave(10 * u.AA, 20 * u.AA)
dump = '{"Wavelength": [10.0, 20.0, "Angstrom"]}'
assert json.loads(dump, object_hook=query_decode) == vso.attrs.Wavelength(10 * u.AA, 20 * u.AA)


def test_decode_time():
Expand Down
2 changes: 1 addition & 1 deletion sunpy/database/tests/test_tables.py
Expand Up @@ -45,7 +45,7 @@ def qr_block_with_missing_physobs():
return vso.VSOClient().query(
vso.attrs.Time('20130805T120000', '20130805T121000'),
vso.attrs.Instrument('SWAVES'), vso.attrs.Source('STEREO_A'),
vso.attrs.Provider('SSC'), vso.attrs.Wave(10 * u.kHz, 160 * u.kHz))[0]
vso.attrs.Provider('SSC'), vso.attrs.Wavelength(10 * u.kHz, 160 * u.kHz))[0]


@pytest.fixture
Expand Down
32 changes: 20 additions & 12 deletions sunpy/instr/rhessi.py
Expand Up @@ -93,6 +93,9 @@ def get_obssumm_dbase_file(time_range):
_time_range = TimeRange(time_range)
data_location = 'dbase/'

if _time_range.start < parse_time("2002/02/01"):
raise ValueError("RHESSI summary files are not available for before 2002-02-01")

url_root = get_base_url() + data_location
url = url_root + _time_range.start.strftime("hsi_obssumm_filedb_%Y%m.txt")

Expand Down Expand Up @@ -169,8 +172,8 @@ def parse_obssumm_dbase_file(filename):
def get_obssum_filename(time_range):
"""
Download the RHESSI observing summary data from one of the RHESSI
servers, parses it, and returns the name of the obssumm file relevant for
the time range
servers, parses it, and returns the name of the obssumm files relevant for
the time range.
Parameters
----------
Expand All @@ -179,13 +182,13 @@ def get_obssum_filename(time_range):
Returns
-------
out : string
Returns the filename of the observation summary file
out : list
Returns the filenames of the observation summary file
Examples
--------
>>> import sunpy.instr.rhessi as rhessi
>>> rhessi.get_obssumm_filename(('2011/04/04', '2011/04/05')) # doctest: +SKIP
>>> rhessi.get_obssum_filename(('2011/04/04', '2011/04/05')) # doctest: +SKIP
.. note::
This API is currently limited to providing data from whole days only.
Expand All @@ -199,9 +202,10 @@ def get_obssum_filename(time_range):
result = parse_obssumm_dbase_file(f[0])
_time_range = TimeRange(time_range)

index_number = _time_range.start.day - 1
index_number_start = _time_range.start.day - 1
index_number_end = _time_range.end.day - 1

return get_base_url() + data_location + result.get('filename')[index_number] + 's'
return [get_base_url() + data_location + filename + 's' for filename in result.get('filename')[index_number_start:index_number_end]]


def get_obssumm_file(time_range):
Expand Down Expand Up @@ -277,19 +281,21 @@ def parse_obssumm_file(filename):
'50 - 100 keV', '100 - 300 keV', '300 - 800 keV', '800 - 7000 keV',
'7000 - 20000 keV']

# the data stored in the fits file are "compressed" countrates stored as one byte
# The data stored in the FITS file are "compressed" countrates stored as
# one byte
compressed_countrate = np.array(afits[6].data.field('countrate'))

countrate = uncompress_countrate(compressed_countrate)
dim = np.array(countrate[:,0]).size

time_array = [reference_time_ut + timedelta(0,time_interval_sec * a) for a in np.arange(dim)]

#TODO generate the labels for the dict automatically from labels
# TODO generate the labels for the dict automatically from labels
data = {'time': time_array, 'data': countrate, 'labels': labels}

return header, data


def uncompress_countrate(compressed_countrate):
"""Convert the compressed count rate inside of observing summary file from
a compressed byte to a true count rate
Expand All @@ -312,6 +318,7 @@ def uncompress_countrate(compressed_countrate):
sum = lkup[16 * (i + 1) - 1] + 2 ** i
return lkup[compressed_countrate]


def hsi_linecolors():
"""Define discrete colors to use for RHESSI plots
Expand Down Expand Up @@ -359,8 +366,8 @@ def _backproject(calibrated_event_list, detector=8, pixel_size=(1., 1.),
"""
afits = fits.open(calibrated_event_list)

#info_parameters = fits[2]
#detector_efficiency = info_parameters.data.field('cbe_det_eff$$REL')
# info_parameters = fits[2]
# detector_efficiency = info_parameters.data.field('cbe_det_eff$$REL')

afits = fits.open(calibrated_event_list)

Expand Down Expand Up @@ -388,6 +395,7 @@ def _backproject(calibrated_event_list, detector=8, pixel_size=(1., 1.),

return bproj_image


def backprojection(calibrated_event_list, pixel_size=(1., 1.) * u.arcsec,
image_dim=(64, 64) * u.pix):
"""
Expand Down Expand Up @@ -450,7 +458,7 @@ def backprojection(calibrated_event_list, pixel_size=(1., 1.) * u.arcsec,
for detector in detector_list:
if detector > 0:
image = image + _backproject(calibrated_event_list, detector=detector, pixel_size=pixel_size.value
, image_dim=image_dim.value)
, image_dim=image_dim.value)

dict_header = {
"DATE-OBS": time_range.center().strftime("%Y-%m-%d %H:%M:%S"),
Expand Down

0 comments on commit b1f201d

Please sign in to comment.