Skip to content

Commit

Permalink
Adding ability to query over SOOP name (#84)
Browse files Browse the repository at this point in the history
Co-authored-by: Nabil Freij <nabil.freij@gmail.com>
  • Loading branch information
hayesla and nabobalis committed May 11, 2023
1 parent 40b4044 commit 1e1ca52
Show file tree
Hide file tree
Showing 10 changed files with 128 additions and 7 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,5 @@ __pycache__
dist/*
.history
.hypothesis/

.vscode
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Copyright (c) 2021 David Stansby All rights reserved.
Copyright (c) 2021-2023 David Stansby All rights reserved.

Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Expand Down
4 changes: 4 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,10 @@ Contributions for new features and bug fixes are welcome.
Changelog
=========

1.8
---
- Added ability to query with SOOP name.

1.7
---
- Added STIX data products to the list of valid data product identifiers.
Expand Down
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ classifiers =
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Topic :: Scientific/Engineering :: Physics

[options]
Expand Down
11 changes: 11 additions & 0 deletions sunpy_soar/attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,12 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)


class SOOP(SimpleAttr):
"""
The SOOP name to search for.
"""


walker = AttrWalker()


Expand Down Expand Up @@ -123,3 +129,8 @@ def _(wlk, attr, params):
@walker.add_applier(a.Provider)
def _(wlk, attr, params):
params.append(f"provider='{attr.value}'")


@walker.add_applier(SOOP)
def _(wlk, attr, params):
params.append(f"soop_name='{attr.value}'")
19 changes: 13 additions & 6 deletions sunpy_soar/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from sunpy.net.base_client import BaseClient, QueryResponseTable
from sunpy.time import parse_time

from sunpy_soar.attrs import Identifier, Product, walker
from sunpy_soar.attrs import SOOP, Identifier, Product, walker

__all__ = ['SOARClient']

Expand Down Expand Up @@ -108,7 +108,7 @@ def _do_search(query):
'Data item ID': info['data_item_id'],
'Filename': info['filename'],
'Filesize': info['filesize'],
})
'SOOP Name': info["soop_name"]})

def fetch(self, query_results, *, path, downloader, **kwargs):
"""
Expand Down Expand Up @@ -154,7 +154,7 @@ def _can_handle_query(cls, *query):
True if this client can handle the given query.
"""
required = {a.Time}
optional = {a.Instrument, a.Level, a.Provider, Product, Identifier}
optional = {a.Instrument, a.Level, a.Provider, Product, Identifier, SOOP}
if not cls.check_attr_types_in_query(query, required, optional):
return False
# check to make sure the instrument attr passed is one provided by the SOAR.
Expand All @@ -178,17 +178,24 @@ def register_values(cls):

@staticmethod
def load_dataset_values():
# Instrument attrs
attrs_path = pathlib.Path(__file__).parent / 'data' / 'attrs.json'
with open(attrs_path, 'r') as attrs_file:
all_datasets = json.load(attrs_file)

# Convert from dict to list of tuples
all_datasets = [(id, desc) for id, desc in all_datasets.items()]

# Instrument attrs
instr_path = pathlib.Path(__file__).parent / 'data' / 'instrument_attrs.json'
with open(instr_path, 'r') as instr_attrs_file:
all_instr = json.load(instr_attrs_file)

all_instr = [(id, desc) for id, desc in all_instr.items()]

return {Product: all_datasets, a.Instrument: all_instr, a.Provider: [('SOAR', 'Solar Orbiter Archive.')]}
soop_path = pathlib.Path(__file__).parent / 'data' / 'soop_attrs.json'
with open(soop_path, 'r') as soop_path_file:
all_soops = json.load(soop_path_file)

all_soops = [(id, desc) for id, desc in all_soops.items()]

return {Product: all_datasets, a.Instrument: all_instr,
SOOP: all_soops, a.Provider: [('SOAR', 'Solar Orbiter Archive.')]}
4 changes: 4 additions & 0 deletions sunpy_soar/data/attrs.json
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,16 @@
"PHI-HRT-ICNT": "",
"PHI-HRT-STOKES": "",
"PHI-HRT-VLOS": "",
"RPW-BIA-DENSITY": "Solar Orbiter Radio/Plasma Wave, LFR L3 plasma density derived from the spacecraft potential",
"RPW-BIA-DENSITY-10-SECONDS": "Solar Orbiter Radio/Plasma Wave, LFR L3 plasma density derived from the spacecraft potential, downsampled",
"RPW-BIA-DENSITY-10-SECONDS-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 plasma density derived from the spacecraft potential, downsampled",
"RPW-BIA-DENSITY-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 plasma density derived from the spacecraft potential",
"RPW-BIA-EFIELD": "Solar Orbiter Radio/Plasma Wave, LFR L3 electric field vector",
"RPW-BIA-EFIELD-10-SECONDS": "Solar Orbiter Radio/Plasma Wave, LFR L3 electric field vector, downsampled",
"RPW-BIA-EFIELD-10-SECONDS-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 electric field vector, downsampled",
"RPW-BIA-EFIELD-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 electric field vector",
"RPW-BIA-SCPOT": "Solar Orbiter Radio/Plasma Wave, LFR L3 spacecraft potential",
"RPW-BIA-SCPOT-10-SECONDS": "Solar Orbiter Radio/Plasma Wave, LFR L3 spacecraft potential, downsampled",
"RPW-BIA-SCPOT-10-SECONDS-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 spacecraft potential, downsampled",
"RPW-BIA-SCPOT-CDAG": "Solar Orbiter Radio/Plasma Wave, LFR L3 spacecraft potential",
"RPW-HFR-SURV": "Solar Orbiter Radio/Plasma Wave, HFR L2 parameters",
Expand Down
54 changes: 54 additions & 0 deletions sunpy_soar/data/soop_attrs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"CC_OFFPOI_ALIGNMENT": "",
"CC_OFFPOI_FLATFIELD_FULL": "",
"CC_OFFPOI_FLATFIELD_HRI": "",
"CC_OFFPOI_OOF": "",
"CC_OFFPOI_STAR": "",
"CC_OFFPOI_STRAYLIGHT": "",
"CC_ROLLS_RS": "",
"COORD_CALIBRATION": "",
"I_DEFAULT": "",
"L_BOTH_HRES_HCAD_Major-Flare": "",
"L_BOTH_HRES_LCAD_CH-Boundary-Expansion": "",
"L_BOTH_LRES_MCAD_Pole-to-Pole": "",
"L_BOTH_MRES_MCAD_Farside-Connection": "",
"L_BOTH_MRES_MCAD_Flare-SEPs": "",
"L_FULL_HRES_HCAD_Coronal-Dynamics": "",
"L_FULL_HRES_HCAD_Eruption-Watch": "",
"L_FULL_HRES_LCAD_MagnFieldConfig": "",
"L_FULL_HRES_MCAD_Coronal-He-Abundance": "",
"L_FULL_LRES_MCAD_Coronal-Synoptic": "",
"L_FULL_LRES_MCAD_Probe-Quadrature": "",
"L_FULL_MRES_MCAD_CME-SEPs": "",
"L_IS_STIX": "",
"L_IS_SoloHI_STIX": "",
"L_SMALL_HRES_HCAD_Fast-Wind": "",
"L_SMALL_HRES_HCAD_Slow-Wind-Connection": "",
"L_SMALL_MRES_MCAD_Ballistic-Connection": "",
"L_SMALL_MRES_MCAD_Composition-Mosaic": "",
"L_SMALL_MRES_MCAD_Connection-Mosaic": "",
"L_SMALL_MRES_MCAD_Earth-Quadrature": "",
"L_TEMPORARY": "",
"R_BOTH_HRES_HCAD_Filaments": "",
"R_BOTH_HRES_HCAD_Nanoflares": "",
"R_BOTH_HRES_MCAD_Bright-Points": "",
"R_FULL_HRES_HCAD_Density-Fluctuations": "",
"R_FULL_LRES_HCAD_Full-Disk-Helioseismology": "",
"R_FULL_LRES_LCAD_Out-of-RSW-synoptics": "",
"R_FULL_LRES_LCAD_Transition-Corona": "",
"R_SMALL_HRES_HCAD_AR-Dynamics": "",
"R_SMALL_HRES_HCAD_Atmospheric-Dynamics-Structure": "",
"R_SMALL_HRES_HCAD_Ephemeral": "",
"R_SMALL_HRES_HCAD_Granulation-Tracking": "",
"R_SMALL_HRES_HCAD_Local-Area-Helioseismology": "",
"R_SMALL_HRES_HCAD_PDF-Mosaic": "",
"R_SMALL_HRES_HCAD_RS-burst": "",
"R_SMALL_HRES_HCAD_Wave-Stereoscopy": "",
"R_SMALL_HRES_LCAD_Composition-vs-Height": "",
"R_SMALL_HRES_LCAD_Fine-Scale-Structure": "",
"R_SMALL_HRES_MCAD_AR-Heating": "",
"R_SMALL_HRES_MCAD_Full-Disk-Mosaic": "",
"R_SMALL_HRES_MCAD_Polar-Observations": "",
"R_SMALL_MRES_HCAD_Sunspot-Oscillations": "",
"R_SMALL_MRES_MCAD_AR-Long-Term": ""
}
19 changes: 19 additions & 0 deletions sunpy_soar/tests/test_sunpy_soar.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,25 @@ def test_registered_instr_attrs():
assert "stix" in instr_attr._attr_registry[instr_attr].name


def test_registered_soop_names():
# Check if the soop names are registered in a.soar.SOOP
soop_attr = str(a.soar.SOOP)
assert "\nr_small_mres_mcad_ar_long_term" in soop_attr


def test_search_soop():
instr = a.Instrument("EUI")
time = a.Time("2022-04-01 01:00", "2022-04-01 02:00")
soop_attr = a.soar.SOOP.r_small_mres_mcad_ar_long_term
res = Fido.search(time, instr, soop_attr)
assert "SOOP Name" in res[0].columns
assert res.file_num == 16

# test non valid soop name passed
res = Fido.search(time, instr, a.soar.SOOP("hello"))
assert res.file_num == 0


def test_when_soar_provider_passed():
# Tests when a.Provider.soar is passed that only SOARClient results are returned
id = a.Instrument('EUI')
Expand Down
19 changes: 19 additions & 0 deletions tools/update_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,20 @@ def get_all_instruments():
return instr_desc


def get_all_soops():
# Get the unique soop names
print("Updating SOOP descriptors...")
SOAR = TapPlus(url="http://soar.esac.esa.int/soar-sl-tap/tap")
job = SOAR.launch_job('select * from soar.soop')
res = job.get_results()

soop_names = {}
for row in res:
soop_names[row["soop_name"]] = ''

return soop_names


if __name__ == '__main__':
attr_file = pathlib.Path(__file__).parent.parent / 'sunpy_soar' / 'data' / 'attrs.json'
descriptors = get_all_descriptors()
Expand All @@ -65,3 +79,8 @@ def get_all_instruments():
instr_descriptors = get_all_instruments()
with open(instr_file, 'w') as instrs_file:
json.dump(dict(sorted(instr_descriptors.items())), instrs_file, indent=2)

soop_file = pathlib.Path(__file__).parent.parent / 'sunpy_soar' / 'data' / 'soop_attrs.json'
soop_descriptors = get_all_soops()
with open(soop_file, 'w') as soops_file:
json.dump(dict(sorted(soop_descriptors.items())), soops_file, indent=2)

0 comments on commit 1e1ca52

Please sign in to comment.