Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions fairdatapipeline/fdp_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,25 @@
import yaml


def get_first_entry(entries: list) -> dict:
"""
get_first_entry helper function for get_entry that return first element

exception handling is done in the main code

Parameters
----------
entries : list
[response list from api]

Returns
-------
dict
[dictionary output from api]
"""
return entries[0]


def get_entry(
url: str,
endpoint: str,
Expand Down
10 changes: 4 additions & 6 deletions fairdatapipeline/link.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def link_write(handle: dict, data_product: str) -> str:
}

# If output exists in handle, append new metadata, otherwise create dict
if "output" in handle.keys():
if "output" in handle:
key = "output_" + str(len(handle["output"]))
handle["output"][key] = output_dict
else:
Expand All @@ -90,7 +90,7 @@ def link_read(handle: dict, data_product: str) -> str:
"""

# If data product is already in handle, return path
if "input" in handle.keys():
if "input" in handle:
for index in handle["input"].keys():
if handle["input"][index]["data_product"] == data_product:
return handle["input"][index]["path"]
Expand Down Expand Up @@ -138,9 +138,7 @@ def link_read(handle: dict, data_product: str) -> str:
if "data_product" in use:
data_product = use["data_product"]

version = "0.0.1"
if "version" in use:
version = use["version"]
version = use["version"] if "version" in use else "0.0.1"

# Get data_product metadata and extract object id
data_product_response = fdp_utils.get_entry(
Expand Down Expand Up @@ -208,7 +206,7 @@ def link_read(handle: dict, data_product: str) -> str:
"component_url": component_url,
}

if "input" in handle.keys():
if "input" in handle:
index = "input_" + str(len(handle["input"]))
handle["input"][index] = input_dict
else:
Expand Down
29 changes: 19 additions & 10 deletions fairdatapipeline/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def initialise(token: str, config: str, script: str) -> dict:
if not results:
raise IndexError(f"list {results} empty")
else:
user = results[0]
user = fdp_utils.get_first_entry(results)
# Check users exists
if not user:
raise ValueError(
Expand All @@ -123,7 +123,7 @@ def initialise(token: str, config: str, script: str) -> dict:
if not results:
raise IndexError(f"list {results} empty")
else:
author = results[0]
author = fdp_utils.get_first_entry(results)
# Check user author exists
if not author:
raise ValueError(
Expand Down Expand Up @@ -350,7 +350,8 @@ def finalise(token: str, handle: dict) -> None:

# Check datastore is in registry
if datastore_root:
datastore_root_url = datastore_root[0]["url"]
datastore_root_dict = fdp_utils.get_first_entry(datastore_root)
datastore_root_url = datastore_root_dict["url"]
else:
datastore_root_url = fdp_utils.post_storage_root(
token=token,
Expand Down Expand Up @@ -378,7 +379,8 @@ def finalise(token: str, handle: dict) -> None:
)
write_namespace_url = None
if write_namespace:
write_namespace_url = write_namespace[0]["url"]
entry = fdp_utils.get_first_entry(write_namespace)
write_namespace_url = entry["url"]
else:
write_namespace_url = fdp_utils.post_entry(
token=token,
Expand All @@ -404,7 +406,8 @@ def finalise(token: str, handle: dict) -> None:
storage_location_url = None

if storage_exists:
storage_location_url = storage_exists[0]["url"]
storage_exists_dict = fdp_utils.get_first_entry(storage_exists)
storage_location_url = storage_exists_dict["url"]

os.remove(handle["output"][output]["path"])

Expand All @@ -427,13 +430,15 @@ def finalise(token: str, handle: dict) -> None:
if i > 4:
break

existing_path = storage_exists[0]["path"]
existing_path = storage_exists_dict["path"]

existing_root = fdp_utils.get_entity(
url=registry_url,
endpoint="storage_root",
id=int(
fdp_utils.extract_id(storage_exists[0]["storage_root"])
fdp_utils.extract_id(
storage_exists_dict["storage_root"]
)
),
api_version=api_version,
)["root"]
Expand Down Expand Up @@ -481,7 +486,8 @@ def finalise(token: str, handle: dict) -> None:
)

if file_type_exists:
file_type_url = file_type_exists[0]["url"]
entry = fdp_utils.get_first_entry(file_type_exists)
file_type_url = entry["url"]
else:
file_type_url = fdp_utils.post_entry(
token=token,
Expand All @@ -503,8 +509,11 @@ def finalise(token: str, handle: dict) -> None:
)

if data_product_exists:
data_product_url = data_product_exists[0]["url"]
object_url = data_product_exists[0]["object"]
data_product_exists_dict = fdp_utils.get_first_entry(
data_product_exists
)
data_product_url = data_product_exists_dict["url"]
object_url = data_product_exists_dict["object"]
obj = fdp_utils.get_entity(
url=registry_url,
endpoint="object",
Expand Down
46 changes: 25 additions & 21 deletions fairdatapipeline/raise_issue.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,41 +159,45 @@ def raise_issue(
group: bool = True,
) -> None:
current_group = issue + ":" + str(severity)
if issue_type in [
if issue_type in {
"config",
"submission_script",
"github_repo",
"existing_data_product",
]:
}:
logging.info("Adding issue {} for {} to handle".format(issue, type))
elif index is None:
reads = (
handle["yaml"]["read"] if "read" in handle["yaml"].keys() else None
)
writes = (
handle["yaml"]["write"]
if "write" in handle["yaml"].keys()
else None
)
data_product_in_config = False
reads = None
writes = None
if "read" in handle["yaml"].keys():
reads = handle["yaml"]["read"]
if "write" in handle["yaml"].keys():
writes = handle["yaml"]["write"]

if reads:
for i in reads:
if i["data_product"] == data_product:
data_product_in_config = True
if "use" in i.keys():
if "use_version" in i["use"].keys():
if not i["use"]["version"] == version:
data_product_in_config = False
if (
"use" in i.keys()
and "use_version" in i["use"].keys()
and i["use"]["version"] != version
):
data_product_in_config = False
data_product = i["data_product"]
if not group:
current_group = i["data_product"]
if writes:
for i in writes:
if i["data_product"] == data_product:
data_product_in_config = True
if "use" in i.keys():
if "use_version" in i["use"].keys():
if not i["use"]["version"] == version:
data_product_in_config = False
data_product_in_config = (
"use" not in i.keys()
or "use_version" not in i["use"].keys()
or i["use"]["version"] == version
)

data_product = i["data_product"]
if not group:
current_group = i["data_product"]
Expand All @@ -209,13 +213,13 @@ def raise_issue(

else:
tmp = None
if "output" in handle.keys():
if "output" in handle:
for output in handle["output"]:
if output == index:
tmp = handle["output"][output]
if not group:
current_group = handle["output"][output]
if "input" in handle.keys():
if "input" in handle:
for input in handle["input"]:
if input == index:
tmp = handle["input"][input]
Expand Down Expand Up @@ -243,7 +247,7 @@ def raise_issue(
"group": current_group,
}

if "issues" in handle.keys():
if "issues" in handle:
this_issue = "issue_" + str(len(handle["issues"]))
handle["issues"][this_issue] = issues_dict
else:
Expand Down
18 changes: 6 additions & 12 deletions simpleModel/common/SEIRS_Model.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,22 +58,16 @@ def SEIRS_Model(
time_unit_days = time_unit_years * 365.25

# Convert Parameters to days
alpha = alpha * time_unit_days
beta = beta * time_unit_days
alpha *= time_unit_days
beta *= time_unit_days
gamma = time_unit_days / inv_gamma
omega = time_unit_days / (inv_omega * 365.25)
mu = time_unit_days / (inv_mu * 365.25)
sigma = time_unit_days / inv_sigma

results = {}
results[0] = {
"time": 0,
"S": S_data,
"E": E_data,
"I": I_data,
"R": R_data,
results = {
0: {"time": 0, "S": S_data, "E": E_data, "I": I_data, "R": R_data}
}

for i in range(int(timesteps)):
N = (
results[i]["S"]
Expand Down Expand Up @@ -124,8 +118,8 @@ def write_model_to_csv(model_output: dict, path: str) -> None:
quoting=csv.QUOTE_NONNUMERIC,
)
dictWriter.writeheader()
for i in model_output:
dictWriter.writerow(model_output[i])
for i, value in model_output.items():
dictWriter.writerow(value)
logging.info("Success file: {} written".format(path))


Expand Down