Skip to content

Commit

Permalink
Merge pull request #3049 from nabobalis/0.9
Browse files Browse the repository at this point in the history
[0.9] Backports and CI update
  • Loading branch information
nabobalis committed May 2, 2019
2 parents 249e089 + 4e26fb5 commit 742f7e5
Show file tree
Hide file tree
Showing 16 changed files with 594 additions and 505 deletions.
8 changes: 4 additions & 4 deletions .circleci/config.yml
Expand Up @@ -58,7 +58,7 @@ jobs:
command: pip install -U pip
- run:
name: Install dependencies
command: pip install --progress-bar off -e .[all]
command: pip install --progress-bar off .[all]
- run:
name: Run tests
command: python setup.py test
Expand All @@ -82,7 +82,7 @@ jobs:
command: pip install -U pip
- run:
name: Install dependencies
command: pip install --progress-bar off -e .[all]
command: pip install --progress-bar off .[all]
- run:
name: Run tests
command: python setup.py test
Expand All @@ -108,10 +108,10 @@ jobs:
command: /opt/python/cp36-cp36m/bin/pip install --progress-bar off numpy==1.15.3 scipy astropy matplotlib==1.5.3
- run:
name: Install dependencies two
command: /opt/python/cp36-cp36m/bin/pip install --progress-bar off sqlalchemy scikit-image==0.13.1 glymur drms suds-jurko beautifulsoup4 requests python-dateutil pytest pytest-cov pytest-mock pytest-xdist mock hypothesis pytest-astropy pytest-rerunfailures
command: /opt/python/cp36-cp36m/bin/pip install --progress-bar off sqlalchemy scikit-image==0.13.1 glymur drms suds-jurko beautifulsoup4 requests python-dateutil pytest pytest-cov pytest-mock mock hypothesis pytest-astropy pytest-rerunfailures
- run:
name: Run tests
command: PYTHONHASHSEED=42 /opt/python/cp36-cp36m/bin/pytest -n=4
command: PYTHONHASHSEED=42 /opt/python/cp36-cp36m/bin/pytest
environment:
MPLBACKEND: agg
COLUMNS: 180
Expand Down
4 changes: 2 additions & 2 deletions .travis.yml
Expand Up @@ -37,8 +37,8 @@ env:
- MAIN_CMD='python setup.py'
- SETUP_CMD='test --coverage'
- CONDA_CHANNELS='sunpy'
- CONDA_DEPENDENCIES='openjpeg Cython jinja2 scipy matplotlib mock requests beautifulsoup4 sqlalchemy scikit-image pytest-mock lxml pyyaml pandas pytest-astropy suds-jurko glymur pytest-xdist dask drms sphinx-astropy pytest-cov hypothesis'
- PIP_DEPENDENCIES='pytest-sugar pytest-xdist'
- CONDA_DEPENDENCIES='openjpeg Cython jinja2 scipy matplotlib mock requests beautifulsoup4 sqlalchemy scikit-image pytest-mock lxml pyyaml pandas pytest-astropy suds-jurko glymur dask drms sphinx-astropy pytest-cov hypothesis'
- PIP_DEPENDENCIES='pytest-sugar'
- EVENT_TYPE='push pull_request cron'
- MPLBACKEND='agg'
- JOB='test'
Expand Down
11 changes: 11 additions & 0 deletions CHANGELOG.rst
@@ -1,3 +1,14 @@
Sunpy 0.9.7 (2019-05-01)
========================

Bug Fixes
---------

- If Carrington longitude ("crln_obs") is found in the FITS header, `~sunpy.map.Map` converts this to the correct Heliographic longitude. (`#2946 <https://github.com/sunpy/sunpy/pull/2946>`__)
- Fix HGS <> HCRS test due to Ecliptic frame changes in astropy 3.2 (`#3075 <https://github.com/sunpy/sunpy/pull/3075>`__)
- Fixes bug when creating a timeseries from a URL and bug when creating a TimeSeries from older GOES/XRS fits files. (`#3081 <https://github.com/sunpy/sunpy/pull/3081>`__)


Sunpy 0.9.6 (2019-01-28)
========================

Expand Down
7 changes: 5 additions & 2 deletions azure-pipelines.yml
Expand Up @@ -10,15 +10,18 @@ jobs:
name: Linux_37_online
os: linux
tox: py37-online -- -n=4
python: '3.7'

- template: azure-templates.yml
parameters:
name: Windows_27_offline
os: windows
tox: py27-offline
tox: py27-offline --
python: '2.7'

- template: azure-templates.yml
parameters:
name: Windows_36_offline
os: windows
tox: py36-offline
tox: py36-offline --
python: '3.6'
47 changes: 38 additions & 9 deletions azure-templates.yml
Expand Up @@ -9,18 +9,47 @@ jobs:
vmImage: Ubuntu 16.04

steps:
- task: CondaEnvironment@1
displayName: 'Create conda environment'

- ${{ if eq(parameters.os, 'linux') }}:
- script: |
sudo apt-get install -y libopenjpeg5
displayName: apt install openjpeg
- ${{ if eq(parameters.os, 'macos') }}:
- script: |
brew install openjpeg
displayName: brew install openjpeg
- task: UsePythonVersion@0
inputs:
createCustomEnvironment: True
environmentName: "test"
createOptions: python="3.6"
versionSpec: ${{ parameters.python }}

- script: pip install --upgrade tox
displayName: install tox

- script: pip install --upgrade tox-conda
displayName: install tox-conda
- ${{ if contains(parameters.tox, 'conda') }}:
- script: pip install --upgrade tox-conda
displayName: install tox-conda

- script: tox -e ${{ parameters.tox }} -vvv --junitxml=junit/test-results.xml --cov-report=xml --cov-report=html
displayName: run tox

- script: |
pip install --upgrade codecov
codecov --name ${{ format(parameters.name) }}
displayName: run codecov
condition: succeededOrFailed()
- task: PublishTestResults@2
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/test-*.xml'
testRunTitle: 'Publish test results for ${{ format(parameters.name) }}'

- script: tox -e ${{ parameters.tox }}
displayName: run tox-conda
- ${{ if contains(parameters.tox, 'online') }}:
- task: PublishCodeCoverageResults@1
condition: succeededOrFailed()
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/coverage.xml'
reportDirectory: '$(System.DefaultWorkingDirectory)/**/htmlcov'
2 changes: 1 addition & 1 deletion setup.cfg
Expand Up @@ -15,7 +15,7 @@ database_requires = sqlalchemy
image_requires = scikit-image
jpeg2000_requires = glymur
net_requires = drms, suds-jurko, beautifulsoup4, python-dateutil
tests_requires = pytest<3.7, pytest-cov, pytest-mock, pytest-xdist, mock, hypothesis, pytest-astropy
tests_requires = pytest<3.7, pytest-cov, pytest-mock, mock, hypothesis, pytest-astropy
docs_requires = ruamel.yaml, sphinx, sunpy-sphinx-theme, sphinx-gallery, sphinx-astropy, towncrier
# version should be PEP386 compatible (http://www.python.org/dev/peps/pep-0386)
version = 0.9.7
Expand Down
21 changes: 13 additions & 8 deletions sunpy/coordinates/tests/test_transformations.py
Expand Up @@ -8,8 +8,13 @@

import astropy.units as u
from astropy.tests.helper import quantity_allclose, assert_quantity_allclose
from astropy.coordinates import (SkyCoord, get_body_barycentric, HeliocentricTrueEcliptic, Angle,
from astropy.coordinates import (SkyCoord, get_body_barycentric, Angle,
ConvertError)
try:
from astropy.coordinates import HeliocentricMeanEcliptic
except ImportError:
from astropy.coordinates import HeliocentricTrueEcliptic as HeliocentricMeanEcliptic

from astropy.time import Time

from sunpy.coordinates import (Helioprojective, HeliographicStonyhurst,
Expand Down Expand Up @@ -132,22 +137,22 @@ def test_hcrs_hgs_array_obstime():

def test_hgs_hcrs():
# This test checks the HGS->HCRS transformation by transforming from HGS to
# HeliocentricTrueEcliptic (HTE). It will fail if there are errors in Astropy's
# HCRS->ICRS or ICRS->HTE transformations.
# HeliocentricMeanEcliptic (HME). It will fail if there are errors in Astropy's
# HCRS->ICRS or ICRS->HME transformations.

# Use published HGS coordinates in the Astronomical Almanac (2013), pages C6-C7
obstime = Time('2013-01-28')
earth_hgs = SkyCoord(0*u.deg, -5.73*u.deg, 0.9848139*u.AU, frame=HeliographicStonyhurst,
obstime=obstime)

# Transform to HTE at observation-time equinox
earth_hte = earth_hgs.transform_to(HeliocentricTrueEcliptic(equinox=obstime))
# Transform to HME at observation-time equinox
earth_hme = earth_hgs.transform_to(HeliocentricMeanEcliptic(equinox=obstime))

# Validate against published values from the Astronomical Almanac (2013), page C6 per page E2
# The dominant source of inaccuracy is the limited precision of the published B0 used above
assert quantity_allclose(earth_hte.lon, Angle('308d13m30.51s') - 180*u.deg, atol=5*u.arcsec)
assert quantity_allclose(earth_hte.lat, -Angle('-0.27s'), atol=10*u.arcsec)
assert quantity_allclose(earth_hte.distance, 0.9848139*u.AU, atol=5e-7*u.AU)
assert quantity_allclose(earth_hme.lon, Angle('308d13m30.51s') - 180*u.deg, atol=5*u.arcsec)
assert quantity_allclose(earth_hme.lat, -Angle('-0.27s'), atol=10*u.arcsec)
assert quantity_allclose(earth_hme.distance, 0.9848139*u.AU, atol=5e-7*u.AU)


def test_hgs_hgc_roundtrip():
Expand Down
35 changes: 33 additions & 2 deletions sunpy/database/tests/test_tables.py
Expand Up @@ -442,8 +442,8 @@ def test_entry_from_query_results_with_none_wave(qr_with_none_waves):
def test_entry_from_query_results_with_none_wave_and_default_unit(
qr_with_none_waves):
entries = list(entries_from_query_result(qr_with_none_waves, 'nm'))
assert len(entries) == 4
assert entries == [
assert len(entries) == 10
expected = [
DatabaseEntry(
source='SOHO', provider='SDAC', physobs='intensity',
fileid='/archive/soho/private/data/processed/virgo/level1/1212/HK/121222_1.H01',
Expand Down Expand Up @@ -471,8 +471,39 @@ def test_entry_from_query_results_with_none_wave_and_default_unit(
observation_time_start=datetime(2012, 12, 24, 0, 1, 58),
observation_time_end=datetime(2012, 12, 25, 0, 1, 57),
instrument='VIRGO', size=14.0, wavemin=None,
wavemax=None),
DatabaseEntry(
source='SOHO', provider='SDAC', physobs='intensity',
fileid='/archive/soho/private/data/processed/virgo/sph/VIRGO_D4.2_SPH_960411_120914.tar.gz',
observation_time_start=datetime(1996, 4, 11, 0, 0),
observation_time_end=datetime(2012, 9, 14, 0, 0),
instrument='VIRGO', size=512000.0, wavemin=None,
wavemax=None),
DatabaseEntry(
source='SOHO', provider='SDAC', physobs='intensity',
fileid='/archive/soho/private/data/processed/virgo/spm/SPM_blue_intensity_series.tar.gz',
observation_time_start=datetime(1996, 4, 11, 0, 0),
observation_time_end=datetime(2014, 3, 30, 23, 59),
instrument='VIRGO', size=32652.0, wavemin=None,
wavemax=None),
DatabaseEntry(
source='SOHO', provider='SDAC', physobs='intensity',
fileid='/archive/soho/private/data/processed/virgo/spm/SPM_green_intensity_series.tar.gz',
observation_time_start=datetime(1996, 4, 11, 0, 0),
observation_time_end=datetime(2014, 3, 30, 23, 59),
instrument='VIRGO', size=32652.0, wavemin=None,
wavemax=None),
DatabaseEntry(
source='SOHO', provider='SDAC', physobs='intensity',
fileid='/archive/soho/private/data/processed/virgo/spm/SPM_red_intensity_series.tar.gz',
observation_time_start=datetime(1996, 4, 11, 0, 0),
observation_time_end=datetime(2014, 3, 30, 23, 59),
instrument='VIRGO', size=32652.0, wavemin=None,
wavemax=None)]

for e in expected:
assert e in entries


def test_create_display_table_missing_entries():
with pytest.raises(TypeError):
Expand Down
8 changes: 5 additions & 3 deletions sunpy/map/map_factory.py
Expand Up @@ -260,18 +260,20 @@ def __call__(self, *args, **kwargs):
Parameters
----------
composite : boolean, optional
Indicates if collection of maps should be returned as a CompositeMap
Indicates if collection of maps should be returned as a CompositeMap.
Default is ``False``.
cube : boolean, optional
Indicates if collection of maps should be returned as a MapCube
sequence : boolean, optional
Indicates if collection of maps should be returned as a MapSequence
Indicates if collection of maps should be returned as a MapSequence.
Default is ``False``.
silence_errors : boolean, optional
If set, ignore data-header pairs which cause an exception.
Default is ``False``.
Notes
-----
Expand Down
18 changes: 10 additions & 8 deletions sunpy/map/mapbase.py
Expand Up @@ -651,14 +651,16 @@ def heliographic_longitude(self):
heliographic_longitude = self.meta.get('hgln_obs', None)

if heliographic_longitude is None:
if self._default_heliographic_longitude is None:
warnings.warn_explicit(
"Missing metadata for heliographic longitude: "
"assuming longitude of 0 degrees",
Warning, __file__,
inspect.currentframe().f_back.f_lineno)
self._default_heliographic_longitude = 0
heliographic_longitude = self._default_heliographic_longitude
if self.meta.get('crln_obs', None) is not None:
heliographic_longitude = self.meta['crln_obs'] * u.deg - get_sun_L0(self.date)
else:
if self._default_heliographic_longitude is None:
warnings.warn_explicit("Missing metadata for heliographic longitude: "
"assuming longitude of 0 degrees",
Warning, __file__,
inspect.currentframe().f_back.f_lineno)
self._default_heliographic_longitude = 0
heliographic_longitude = self._default_heliographic_longitude

if isinstance(heliographic_longitude, six.string_types):
heliographic_longitude = float(heliographic_longitude)
Expand Down
7 changes: 6 additions & 1 deletion sunpy/map/tests/test_mapbase.py
Expand Up @@ -31,6 +31,9 @@

testpath = sunpy.data.test.rootdir

@pytest.fixture
def hmi_test_map():
return sunpy.map.Map(os.path.join(testpath, "resampled_hmi.fits"))

@pytest.fixture
def aia171_test_map():
Expand Down Expand Up @@ -198,7 +201,9 @@ def test_coordinate_frame(aia171_test_map):
assert frame.observer.lon == aia171_test_map.observer_coordinate.frame.lon
assert frame.observer.radius == aia171_test_map.observer_coordinate.frame.radius
assert frame.obstime == aia171_test_map.date


def test_heliographic_longitude_crln(hmi_test_map):
assert hmi_test_map.heliographic_longitude == hmi_test_map.carrington_longitude - sunpy.coordinates.get_sun_L0(hmi_test_map.date)

# ==============================================================================
# Test Rotation WCS conversion
Expand Down
12 changes: 5 additions & 7 deletions sunpy/timeseries/sources/goes.py
Expand Up @@ -111,8 +111,8 @@ def peek(self, title="GOES Xray Flux"):
figure.show()

# ToDo: is this part of the DL pipeline? If so delete.
@classmethod
def _get_goes_sat_num(self, start, end):
@staticmethod
def _get_goes_sat_num(start, end):
"""Parses the query time to determine which GOES satellite to use."""

goes_operational = {
Expand All @@ -131,10 +131,8 @@ def _get_goes_sat_num(self, start, end):

sat_list = []
for sat_num in goes_operational:
if ((start >= goes_operational[sat_num].start and
start <= goes_operational[sat_num].end and
(end >= goes_operational[sat_num].start and
end <= goes_operational[sat_num].end))):
if (goes_operational[sat_num].start <= start <= goes_operational[sat_num].end and
goes_operational[sat_num].start <= end <= goes_operational[sat_num].end):
# if true then the satellite with sat_num is available
sat_list.append(sat_num)

Expand Down Expand Up @@ -166,7 +164,7 @@ def _parse_hdus(cls, hdulist):
xrsa = hdulist[2].data['FLUX'][0][:, 1]
seconds_from_start = hdulist[2].data['TIME'][0]
elif 1 <= len(hdulist) <= 3:
start_time = parse_time(header['TIMEZERO'])
start_time = parse_time(header['TIMEZERO'], format='utime')
seconds_from_start = hdulist[0].data[0]
xrsb = hdulist[0].data[1]
xrsa = hdulist[0].data[2]
Expand Down

0 comments on commit 742f7e5

Please sign in to comment.