Skip to content

Commit

Permalink
Updated retrieval code.
Browse files Browse the repository at this point in the history
  • Loading branch information
bobbyxng committed Jul 12, 2024
1 parent a183b93 commit de12f91
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 29 deletions.
4 changes: 2 additions & 2 deletions rules/retrieve.smk
Original file line number Diff line number Diff line change
Expand Up @@ -325,8 +325,8 @@ if config["enable"]["retrieve"] and any(c in ["UA", "MD"] for c in config["count

rule retrieve_gdp_uamd:
output:
"data/GDP_per_capita_PPP_1990_2015_v2.nc",
"data/ppp_2013_1km_Aggregated.tif",
gdp="data/GDP_per_capita_PPP_1990_2015_v2.nc",
ppp="data/ppp_2013_1km_Aggregated.tif",
log:
"logs/retrieve_gdp_uamd.log",
resources:
Expand Down
1 change: 1 addition & 0 deletions scripts/determine_availability_matrix_MD_UA.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ def get_wdpa_layer_name(wdpa_fn, layer_substring):
gpd.read_file(snakemake.input.regions).set_index("name").rename_axis("bus")
)
buses = regions.index
buses = regions.loc[regions["country"].isin(["UA", "MD"])].index.values

excluder = atlite.ExclusionContainer(crs=3035, res=100)

Expand Down
39 changes: 12 additions & 27 deletions scripts/retrieve_gdp_uamd.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,41 +9,26 @@
import logging
from pathlib import Path

from _helpers import retrieve_file
from _helpers import configure_logging, retrieve_file, set_scenario_config

logger = logging.getLogger(__name__)


def retrieve(url, destination):

logger.info(f"Downloading file from '{url}'.")
retrieve_file(url, destination)
logger.info("File downloaded and validated.")


if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake

snakemake = mock_snakemake("retrieve_gdp_uamd")
rootpath = ".."
else:
rootpath = "."

datasets = [
# GDP_PPP_30arcsec_v3.nc: raw dataset. Available at: [M. Kummu, M. Taka, J. H. A. Guillaume. (2020), Data from: Gridded global datasets for Gross Domestic Product and Human Development Index over 1990-2015, Dryad, Dataset. doi: https://doi.org/10.5061/dryad.dk1j0]
(
"https://datadryad.org/stash/downloads/file_stream/241947",
"GDP_per_capita_PPP_1990_2015_v2.nc",
),
# ppp_2020_1km_Aggregated.tif: raw dataset. Available at: https://data.humdata.org/dataset/
(
"https://data.worldpop.org/GIS/Population/Global_2000_2020/2020/0_Mosaicked/ppp_2020_1km_Aggregated.tif",
"ppp_2020_1km_Aggregated.tif",
),
]
configure_logging(snakemake)
set_scenario_config(snakemake)

dict_urls = dict(
{
"gdp": "https://datadryad.org/stash/downloads/file_stream/241947",
"ppp": "https://github.com/ecohealthalliance/sars_cov_risk/releases/download/v2.0.1/ppp_2020_1km_Aggregated.tif",
}
)

# Download and validate each dataset
for url, filename in datasets:
file_path = rootpath / "data" / filename
retrieve(url, file_path)
for key, path in snakemake.output.items():
retrieve_file(dict_urls[key], path)

0 comments on commit de12f91

Please sign in to comment.