Skip to content

Commit

Permalink
Merge 4d42060 into 941171f
Browse files Browse the repository at this point in the history
  • Loading branch information
smithara committed Feb 16, 2021
2 parents 941171f + 4d42060 commit 0bafa38
Show file tree
Hide file tree
Showing 7 changed files with 232 additions and 54 deletions.
3 changes: 2 additions & 1 deletion setup.py
Expand Up @@ -58,7 +58,8 @@ def read(fname):
scripts=[],
package_data={
'viresclient': [
'_wps/templates/*'
'_wps/templates/*',
'_data/*'
],
},
python_requires='>=3.6',
Expand Down
4 changes: 3 additions & 1 deletion viresclient/__init__.py
Expand Up @@ -34,5 +34,7 @@
from ._data_handling import ReturnedDataFile
from ._api.upload import DataUpload
from ._api.token import TokenManager
from . import _data

__version__ = "0.7.2"

__version__ = "0.8.0-alpha"
9 changes: 6 additions & 3 deletions viresclient/_client.py
Expand Up @@ -35,7 +35,7 @@
try:
from IPython import get_ipython
IN_JUPYTER = 'zmqshell' in str(type(get_ipython()))
except ImportError:
except Exception:
IN_JUPYTER = False
from tqdm import tqdm
from io import StringIO
Expand Down Expand Up @@ -79,6 +79,9 @@

# Maximum selectable time interval ~25 years
MAX_TIME_SELECTION = timedelta(days=25*365.25)
# Maximum time-chunk size ~50 years
MAX_CHUNK_DURATION = 2 * MAX_TIME_SELECTION


TEMPLATE_FILES = {
'list_jobs': "vires_list_jobs.xml",
Expand Down Expand Up @@ -395,9 +398,9 @@ def _chunkify_request(start_time, end_time, sampling_step, nrecords_limit):
e.g. [(start1, end1), (start2, end2)]
"""
# maximum chunk duration as a timedelta object
chunk_duration = timedelta(seconds=(
chunk_duration = min(timedelta(seconds=(
nrecords_limit * parse_duration(sampling_step).total_seconds()
))
)), MAX_CHUNK_DURATION)

# calculate the chunk intervals ...
request_intervals = []
Expand Down
171 changes: 136 additions & 35 deletions viresclient/_client_swarm.py
Expand Up @@ -12,7 +12,7 @@
from ._wps.time_util import parse_datetime
from ._client import WPSInputs, ClientRequest, TEMPLATE_FILES
from ._data_handling import ReturnedDataFile

from ._data import CONFIG_SWARM

TEMPLATE_FILES = {
**TEMPLATE_FILES,
Expand All @@ -33,7 +33,7 @@

MODEL_REFERENCES = {
'IGRF':
(" International Geomagnetic Reference Field: the 13th generation, (waiting for publication) ",
(" International Geomagnetic Reference Field: the thirteenth generation, (https://doi.org/10.1186/s40623-020-01288-x) ",
" https://www.ngdc.noaa.gov/IAGA/vmod/igrf.html "),
'IGRF12':
(" International Geomagnetic Reference Field: the 12th generation, https://doi.org/10.1186/s40623-015-0228-9 ",
Expand Down Expand Up @@ -161,6 +161,7 @@
"AUX_OBSH": ("https://doi.org/10.5047/eps.2013.07.011",),
"AUX_OBSM": ("https://doi.org/10.5047/eps.2013.07.011",),
"AUX_OBSS": ("https://doi.org/10.5047/eps.2013.07.011",),
"VOBS_SW_1M": ("https://www.space.dtu.dk/english/research/projects/project-descriptions/geomagnetic-virtual-observatories",),
}

DATA_CITATIONS = {
Expand All @@ -169,8 +170,10 @@
"AUX_OBSS": "ftp://ftp.nerc-murchison.ac.uk/geomag/Swarm/AUX_OBS/second/README",
}

IAGA_CODES = ['AAA', 'AAE', 'ABG', 'ABK', 'AIA', 'ALE', 'AMS', 'API', 'AQU', 'ARS', 'ASC', 'ASP', 'BDV', 'BEL', 'BFE', 'BFO', 'BGY', 'BJN', 'BLC', 'BMT', 'BNG', 'BOU', 'BOX', 'BRD', 'BRW', 'BSL', 'CBB', 'CBI', 'CDP', 'CKI', 'CLF', 'CMO', 'CNB', 'CNH', 'COI', 'CPL', 'CSY', 'CTA', 'CTS', 'CYG', 'CZT', 'DED', 'DLR', 'DLT', 'DMC', 'DOB', 'DOU', 'DRV', 'DUR', 'EBR', 'ELT', 'ESA', 'ESK', 'EYR', 'FCC', 'FRD', 'FRN', 'FUQ', 'FUR', 'GAN', 'GCK', 'GDH', 'GLM', 'GLN', 'GNA', 'GNG', 'GUA', 'GUI', 'GZH', 'HAD', 'HBK', 'HER', 'HLP', 'HON', 'HRB', 'HRN', 'HUA', 'HYB', 'IPM', 'IQA', 'IRT', 'IZN', 'JAI', 'JCO', 'KAK', 'KDU', 'KEP', 'KHB', 'KIR', 'KIV', 'KMH', 'KNY', 'KNZ', 'KOU', 'KSH', 'LER', 'LIV', 'LMM', 'LNP', 'LON', 'LOV', 'LRM', 'LRV', 'LVV', 'LYC', 'LZH', 'MAB', 'MAW', 'MBC', 'MBO', 'MCQ', 'MEA', 'MGD', 'MID', 'MIZ', 'MMB', 'MZL', 'NAQ', 'NCK', 'NEW', 'NGK', 'NGP', 'NMP', 'NUR', 'NVS', 'ORC', 'OTT', 'PAF', 'PAG', 'PBQ', 'PEG', 'PET', 'PHU', 'PIL', 'PND', 'PPT', 'PST', 'QGZ', 'QIX', 'QSB', 'QZH', 'RES', 'SBA', 'SBL', 'SFS', 'SHE', 'SHL', 'SHU', 'SIL', 'SIT', 'SJG', 'SOD', 'SPG', 'SPT', 'STJ', 'SUA', 'TAM', 'TAN', 'TDC', 'TEO', 'THJ', 'THL', 'THY', 'TIR', 'TND', 'TRO', 'TRW', 'TSU', 'TUC', 'UPS', 'VAL', 'VIC', 'VNA', 'VOS', 'VSK', 'VSS', 'WHN', 'WIC', 'WIK', 'WNG', 'YAK', 'YKC']
# IAGA_CODES = ['AAA', 'AAE', 'ABG', 'ABK', 'AIA', 'ALE', 'AMS', 'API', 'AQU', 'ARS', 'ASC', 'ASP', 'BDV', 'BEL', 'BFE', 'BFO', 'BGY', 'BJN', 'BLC', 'BMT', 'BNG', 'BOU', 'BOX', 'BRD', 'BRW', 'BSL', 'CBB', 'CBI', 'CDP', 'CKI', 'CLF', 'CMO', 'CNB', 'CNH', 'COI', 'CPL', 'CSY', 'CTA', 'CTS', 'CYG', 'CZT', 'DED', 'DLR', 'DLT', 'DMC', 'DOB', 'DOU', 'DRV', 'DUR', 'EBR', 'ELT', 'ESA', 'ESK', 'EYR', 'FCC', 'FRD', 'FRN', 'FUQ', 'FUR', 'GAN', 'GCK', 'GDH', 'GLM', 'GLN', 'GNA', 'GNG', 'GUA', 'GUI', 'GZH', 'HAD', 'HBK', 'HER', 'HLP', 'HON', 'HRB', 'HRN', 'HUA', 'HYB', 'IPM', 'IQA', 'IRT', 'IZN', 'JAI', 'JCO', 'KAK', 'KDU', 'KEP', 'KHB', 'KIR', 'KIV', 'KMH', 'KNY', 'KNZ', 'KOU', 'KSH', 'LER', 'LIV', 'LMM', 'LNP', 'LON', 'LOV', 'LRM', 'LRV', 'LVV', 'LYC', 'LZH', 'MAB', 'MAW', 'MBC', 'MBO', 'MCQ', 'MEA', 'MGD', 'MID', 'MIZ', 'MMB', 'MZL', 'NAQ', 'NCK', 'NEW', 'NGK', 'NGP', 'NMP', 'NUR', 'NVS', 'ORC', 'OTT', 'PAF', 'PAG', 'PBQ', 'PEG', 'PET', 'PHU', 'PIL', 'PND', 'PPT', 'PST', 'QGZ', 'QIX', 'QSB', 'QZH', 'RES', 'SBA', 'SBL', 'SFS', 'SHE', 'SHL', 'SHU', 'SIL', 'SIT', 'SJG', 'SOD', 'SPG', 'SPT', 'STJ', 'SUA', 'TAM', 'TAN', 'TDC', 'TEO', 'THJ', 'THL', 'THY', 'TIR', 'TND', 'TRO', 'TRW', 'TSU', 'TUC', 'UPS', 'VAL', 'VIC', 'VNA', 'VOS', 'VSK', 'VSS', 'WHN', 'WIC', 'WIK', 'WNG', 'YAK', 'YKC']
IAGA_CODES = CONFIG_SWARM.get("IAGA_CODES")

VOBS_SITES = CONFIG_SWARM.get("VOBS_SITES")

class SwarmWPSInputs(WPSInputs):
"""Holds the set of inputs to be passed to the request template for Swarm
Expand Down Expand Up @@ -233,8 +236,8 @@ def _spacecraft_from_collection(collection):
else:
# 12th character in name, e.g. SW_OPER_MAGx_LR_1B
sc = collection[11]
sc_to_name = {"A": "Alpha", "B": "Bravo", "C": "Charlie", "_": "NSC"}
name = sc_to_name[sc]
sc_to_name = {"A": "Alpha", "B": "Bravo", "C": "Charlie"}
name = sc_to_name.get(sc, "NSC")
return name

def set_collections(self, collections):
Expand Down Expand Up @@ -421,9 +424,66 @@ class SwarmRequest(ClientRequest):
"AUX_OBSS": [
"SW_OPER_AUX_OBSS2_",
*[f"SW_OPER_AUX_OBSS2_:{code}" for code in IAGA_CODES]
]
],
"VOBS_SW_1M": [
"SW_OPER_VOBS_1M_2_",
*[f"SW_OPER_VOBS_1M_2_:{site}" for site in VOBS_SITES]
],
"VOBS_SW_4M": [
"SW_OPER_VOBS_4M_2_",
*[f"SW_OPER_VOBS_4M_2_:{site}" for site in VOBS_SITES]
],
"VOBS_CH_1M": [
"CH_OPER_VOBS_1M_2_",
*[f"CH_OPER_VOBS_1M_2_:{site}" for site in VOBS_SITES]
],
"VOBS_CH_4M": [
"CH_OPER_VOBS_4M_2_",
*[f"CH_OPER_VOBS_4M_2_:{site}" for site in VOBS_SITES]
],
"VOBS_CR_4M": [
"CR_OPER_VOBS_4M_2_",
*[f"CR_OPER_VOBS_4M_2_:{site}" for site in VOBS_SITES]
],
"VOBS_SW_1M:SecularVariation": [
"SW_OPER_VOBS_1M_2_:SecularVariation",
*[f"SW_OPER_VOBS_1M_2_:SecularVariation:{site}" for site in VOBS_SITES]
],
"VOBS_SW_4M:SecularVariation": [
"SW_OPER_VOBS_4M_2_:SecularVariation",
*[f"SW_OPER_VOBS_4M_2_:SecularVariation:{site}" for site in VOBS_SITES]
],
"VOBS_CH_1M:SecularVariation": [
"CH_OPER_VOBS_1M_2_:SecularVariation",
*[f"CH_OPER_VOBS_1M_2_:SecularVariation:{site}" for site in VOBS_SITES]
],
"VOBS_CH_4M:SecularVariation": [
"CH_OPER_VOBS_4M_2_:SecularVariation",
*[f"CH_OPER_VOBS_4M_2_:SecularVariation:{site}" for site in VOBS_SITES]
],
"VOBS_CR_4M:SecularVariation": [
"CR_OPER_VOBS_4M_2_:SecularVariation",
*[f"CR_OPER_VOBS_4M_2_:SecularVariation:{site}" for site in VOBS_SITES]
],
}

OBS_COLLECTIONS = [
"SW_OPER_AUX_OBSH2_",
"SW_OPER_AUX_OBSM2_",
"SW_OPER_AUX_OBSS2_",
"SW_OPER_VOBS_1M_2_",
"SW_OPER_VOBS_4M_2_",
"CH_OPER_VOBS_1M_2_",
"CH_OPER_VOBS_4M_2_",
"CR_OPER_VOBS_4M_2_",
"SW_OPER_VOBS_1M_2_:SecularVariation",
"SW_OPER_VOBS_4M_2_:SecularVariation",
"CH_OPER_VOBS_1M_2_:SecularVariation",
"CH_OPER_VOBS_4M_2_:SecularVariation",
"CR_OPER_VOBS_4M_2_:SecularVariation",
]


# These are not necessarily real sampling steps, but are good enough to use
# for splitting long requests into chunks
COLLECTION_SAMPLING_STEPS = {
Expand All @@ -439,7 +499,17 @@ class SwarmRequest(ClientRequest):
"AEJ_LPS": "PT1S",
"AUX_OBSH": "PT60M",
"AUX_OBSM": "PT60S",
"AUX_OBSS": "PT1S"
"AUX_OBSS": "PT1S",
"VOBS_SW_1M": "P31D",
"VOBS_CH_1M": "P31D",
"VOBS_SW_4M": "P122D",
"VOBS_CH_4M": "P122D",
"VOBS_CR_4M": "P122D",
"VOBS_SW_1M:SecularVariation": "P31D",
"VOBS_CH_1M:SecularVariation": "P31D",
"VOBS_SW_4M:SecularVariation": "P122D",
"VOBS_CH_4M:SecularVariation": "P122D",
"VOBS_CR_4M:SecularVariation": "P122D",
}

PRODUCT_VARIABLES = {
Expand Down Expand Up @@ -501,9 +571,19 @@ class SwarmRequest(ClientRequest):
"Latitude_QD", "Longitude_QD", "MLT_QD",
"Boundary_Flag", "Quality", "Pair_Indicator"
],
"AUX_OBSH": ["B_NEC", "F", "IAGA_code", "Quality", "SensorIndex"],
"AUX_OBSH": ["B_NEC", "F", "IAGA_code", "Quality", "ObsIndex"],
"AUX_OBSM": ["B_NEC", "F", "IAGA_code", "Quality"],
"AUX_OBSS": ["B_NEC", "F", "IAGA_code", "Quality"],
"VOBS_SW_1M": ["SiteCode", "B_CF", "B_OB", "sigma_CF", "sigma_OB"],
"VOBS_CH_1M": ["SiteCode", "B_CF", "B_OB", "sigma_CF", "sigma_OB"],
"VOBS_SW_4M": ["SiteCode", "B_CF", "B_OB", "sigma_CF", "sigma_OB"],
"VOBS_CH_4M": ["SiteCode", "B_CF", "B_OB", "sigma_CF", "sigma_OB"],
"VOBS_CR_4M": ["SiteCode", "B_CF", "B_OB", "sigma_CF", "sigma_OB"],
"VOBS_SW_1M:SecularVariation": ["SiteCode", "B_SV", "sigma_SV"],
"VOBS_CH_1M:SecularVariation": ["SiteCode", "B_SV", "sigma_SV"],
"VOBS_SW_4M:SecularVariation": ["SiteCode", "B_SV", "sigma_SV"],
"VOBS_CH_4M:SecularVariation": ["SiteCode", "B_SV", "sigma_SV"],
"VOBS_CR_4M:SecularVariation": ["SiteCode", "B_SV", "sigma_SV"],
}

AUXILIARY_VARIABLES = [
Expand Down Expand Up @@ -622,11 +702,22 @@ def available_collections(self, groupname=None, details=True):
If False then return a dict of available collections.
"""
# Shorter form of the available collections
# Shorter form of the available collections,
# without all the individual SiteCodes
collections_short = self._available["collections"].copy()
collections_short["AUX_OBSS"] = ['SW_OPER_AUX_OBSS2_']
collections_short["AUX_OBSM"] = ['SW_OPER_AUX_OBSM2_']
collections_short["AUX_OBSH"] = ['SW_OPER_AUX_OBSH2_']
collections_short["AUX_OBSS"] = ["SW_OPER_AUX_OBSS2_"]
collections_short["AUX_OBSM"] = ["SW_OPER_AUX_OBSM2_"]
collections_short["AUX_OBSH"] = ["SW_OPER_AUX_OBSH2_"]
collections_short["VOBS_SW_1M"] = ["SW_OPER_VOBS_1M_2_"]
collections_short["VOBS_SW_4M"] = ["SW_OPER_VOBS_4M_2_"]
collections_short["VOBS_CH_1M"] = ["CH_OPER_VOBS_1M_2_"]
collections_short["VOBS_CH_4M"] = ["CH_OPER_VOBS_4M_2_"]
collections_short["VOBS_CR_4M"] = ["CR_OPER_VOBS_4M_2_"]
collections_short["VOBS_SW_1M:SecularVariation"] = ["SW_OPER_VOBS_1M_2_:SecularVariation"]
collections_short["VOBS_SW_4M:SecularVariation"] = ["SW_OPER_VOBS_4M_2_:SecularVariation"]
collections_short["VOBS_CH_1M:SecularVariation"] = ["CH_OPER_VOBS_1M_2_:SecularVariation"]
collections_short["VOBS_CH_4M:SecularVariation"] = ["CH_OPER_VOBS_4M_2_:SecularVariation"]
collections_short["VOBS_CR_4M:SecularVariation"] = ["CR_OPER_VOBS_4M_2_:SecularVariation"]

def _filter_collections(groupname):
""" Reduce the full list to just one group, e.g. "MAG """
Expand Down Expand Up @@ -811,14 +902,9 @@ def _csv_to_df(csv_data):
StringIO(str(csv_data, 'utf-8'))
)

obs_collections = [
"SW_OPER_AUX_OBSH2_",
"SW_OPER_AUX_OBSM2_",
"SW_OPER_AUX_OBSS2_"
]
if collection not in obs_collections:
if collection not in self.OBS_COLLECTIONS:
raise ValueError(
f"Invalid collection: {collection}. Must be one of: {obs_collections}."
f"Invalid collection: {collection}. Must be one of: {self.OBS_COLLECTIONS}."
)
if start_time and end_time:
start_time = parse_datetime(start_time)
Expand All @@ -832,7 +918,9 @@ def _csv_to_df(csv_data):
if details:
return df
else:
return list(df["IAGACode"])
# note: "IAGACode" has been renamed to "site" in VirES 3.5
key = "IAGACode" if "IAGACode" in df.keys() else "site"
return list(df[key])

def _detect_AUX_OBS(self, collections):
# Identify collection types present
Expand Down Expand Up @@ -948,25 +1036,38 @@ def set_products(self, measurements=None, models=None, custom_model=None,
raise OSError("Custom model .shc file not found")
else:
custom_shc = None

# Set up the variables that actually get passed to the WPS request

def _model_datavar_names(variable, residuals=False):
"""Give the list of allowable variable names containing model evaluations"""
if variable not in model_variables:
raise ValueError(f"Expected one of {model_variables}; got '{variable}'")
affix = "_res_" if residuals else "_"
return [f"{variable}{affix}{model_name}" for model_name in model_ids]

# Identify which (if any) of ["F", "B_NEC", ...] are requested
model_variables_present = set(measurements).intersection(set(model_variables))
# Create the list of variable names to request
variables = []
for variable in measurements:
if variable in model_variables:
if residuals:
variables.extend(
"%s_res_%s" % (variable, model_name)
for model_name in model_ids
)
else:
variables.append(variable)
variables.extend(
"%s_%s" % (variable, model_name)
for model_name in model_ids
)
else: # not a model variable
for variable in model_variables_present:
if not residuals:
# Include "F" / "B_NEC" as requested...
variables.append(variable)
# Include e.g. "F_IGRF" / "B_NEC_IGRF" / "B_NEC_res_IGRF" etc.
variables.extend(_model_datavar_names(variable, residuals=residuals))
if models and (len(model_variables_present) == 0):
if residuals:
raise ValueError(
f"""
Residuals requested without one of {model_variables} set as measurements
"""
)
# If "F" / "B_NEC" have not been requested, include e.g. "B_NEC_IGRF" etc.
variables.extend(_model_datavar_names("B_NEC"))
# Include all the non-model-related variables
variables.extend(list(set(measurements) - model_variables_present))
variables.extend(auxiliaries)
# Set these in the SwarmWPSInputs object
self._request_inputs.model_expression = model_expression_string
self._request_inputs.variables = variables
self._request_inputs.sampling_step = sampling_step
Expand Down
7 changes: 7 additions & 0 deletions viresclient/_data/__init__.py
@@ -0,0 +1,7 @@
from os.path import join, dirname
import json

_DIRNAME = dirname(__file__)

with open(join(_DIRNAME, "config_swarm.json"), "r") as f:
CONFIG_SWARM = json.load(f)

0 comments on commit 0bafa38

Please sign in to comment.