Skip to content

Commit

Permalink
Merge pull request #6 from Risk-Team/Dev
Browse files Browse the repository at this point in the history
v3.1.0
  • Loading branch information
RSO9192 committed May 7, 2024
2 parents 75d5e51 + ed8dee2 commit 4f29ae4
Show file tree
Hide file tree
Showing 81 changed files with 1,121 additions and 402 deletions.
4 changes: 2 additions & 2 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Package: CAVAanalytics
Type: Package
Title: R package providing a framework for easy access, processing, and advanced visualization of gridded climate products
Version: 3.0.0
Version: 3.1.0
Authors@R: c(person("Riccardo", "Soldan", , "riccardosoldan@hotmail.it", role = c("aut", "cre"),
comment = c(ORCID = "0000-0003-2583-5840")),
person("Rodrigo", "Manzanas", , "rodrigo.manzanas@unican.es", role = "aut",
Expand All @@ -10,7 +10,7 @@ Authors@R: c(person("Riccardo", "Soldan", , "riccardosoldan@hotmail.it", role =
person("Hideki", "Kanamaru", , "Hideki.kanamaru@fao.org", role = "ctb", comment=c(ORCID="0000-0002-5306-5022")),
person("Jorge", "Alvar-Beltrán", , "jorge.alvarbeltran@fao.org", role = "ctb", comment = c(ORCID = "0000-0003-2454-0629")),
person("Arianna", "Gialletti", , "Arianna.gialletti@fao.org", role = "ctb"))
Description: CAVAanalytics is an operational R library for climate services, providing direct and easy access to CORDEX-CORE models and reanalysis datasets
Description: CAVAanalytics is a comprehensive framework for climate data analysis, offering streamlined access to data, advanced processing and visualization capabilities. It is designed to support a wide range of climate research and user needs
License: GPL-2 | GPL-3
Encoding: UTF-8
LazyData: true
Expand Down
11 changes: 11 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,17 @@ output:
toc_depth: 2
---

## CAVAanalytics 3.1.0

In this new version of CAVAanalytics, many improvments have been made:

- Users can now select which bias correction method to use (default to Empirical Quantile Mapping)
- Intervals can be specified for customizing breaks in the color palette
- Improved performance of the memory efficient functions
- Added option to visualize climate change signal as percentage
- Other minor changes


## CAVAanalytics 3.0.0

Version 3.0.0 of CAVAanalytics is the first stable release. Follow the tutorial to learn how to use it.
95 changes: 68 additions & 27 deletions R/climate_change_signal.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
#' @param bias.correction logical
#' @param threshold numerical value with range 0-1. It indicates the threshold for assigning model agreement. For example, 0.6 indicates that model agreement is assigned when 60 percent of the models agree in the sign of the change
#' @param n.sessions numeric, number of sessions to use, default is one. Parallelisation can be useful when multiple scenarios are used (RCPS, SSPs). However, note that parallelising will increase RAM usage
#' @param method character, bias-correction method to use. One of eqm (Empirical Quantile Mapping) or qdm (Quantile Delta Mapping). Default to eqm
#' @param percentage logical, whether the climate change signal is to be calculated as relative changes (in percentage). Default to FALSE
#' @importFrom magrittr %>%
#' @return list with SpatRaster. To explore the output run attributes(output)
#'
Expand All @@ -26,7 +28,9 @@ climate_change_signal <- function(data,
frequency = F,
bias.correction = F,
threshold = 0.6,
n.sessions = 1) {
n.sessions = 1,
method = "eqm",
percentage = F) {
# Intermediate functions --------------------------------------------------

# check inputs requirement
Expand All @@ -38,11 +42,17 @@ climate_change_signal <- function(data,
duration,
bias.correction,
season,
agreement) {
agreement,
method,
percentage) {
stopifnot(is.logical(consecutive))
stopifnot(is.logical(percentage))
stopifnot(is.numeric(threshold), threshold >= 0, threshold <= 1)
if (!is.list(season))
cli::cli_abort("season needs to be a list, for example, list(1:3)")
if (!(method == "eqm" || method == "qdm")) {
cli::cli_abort("method must be 'eqm' or qdm")
}
if (!(duration == "max" || is.numeric(duration))) {
cli::cli_abort("duration must be 'max' or a number")
}
Expand Down Expand Up @@ -99,11 +109,15 @@ climate_change_signal <- function(data,
consecutive,
duration,
bias.correction,
frequency) {
frequency,
percentage) {
if (is.null(uppert) & is.null(lowert)) {
paste0("Climate change signal for ",
ifelse(var == "pr", "total ", "mean "),
var)
paste0(
"Climate change signal for ",
ifelse(var == "pr", "total ", "mean "),
var,
ifelse(percentage, " in %", "")
)
}
else if ((!is.null(uppert) |
!is.null(lowert)) & !consecutive) {
Expand Down Expand Up @@ -187,7 +201,9 @@ climate_change_signal <- function(data,
bias.correction,
season,
frequency,
threshold) {
threshold,
method,
percentage) {
season_name <-
convert_vector_to_month_initials(season)
data_list <- datasets %>%
Expand All @@ -196,7 +212,8 @@ climate_change_signal <- function(data,
cli::cli_text(
paste(
"{cli::symbol$arrow_right}",
" Performing bias correction with the empirical quantile mapping",
" Performing bias correction with the ",
method,
" method, for each model and month separately. This can take a while. Season",
glue::glue_collapse(season, "-")
)
Expand All @@ -210,7 +227,7 @@ climate_change_signal <- function(data,
y = obs[[1]],
x = mod,
precipitation = ifelse(var == "pr", TRUE, FALSE),
method = "eqm",
method = method,
window = if (any(diffs == 1))
c(30, 30)
else
Expand All @@ -226,7 +243,7 @@ climate_change_signal <- function(data,
x = dplyr::filter(datasets, experiment == "historical")$models_mbrs[[1]],
newdata = mod,
precipitation = ifelse(var == "pr", TRUE, FALSE),
method = "eqm",
method = method,
window = if (any(diffs == 1))
c(30, 30)
else
Expand Down Expand Up @@ -284,7 +301,16 @@ climate_change_signal <- function(data,
ccs_mbrs = purrr::map(models_agg_tot, function(y) {
h <-
dplyr::filter(., stringr::str_detect(experiment, "hist"))$models_agg_tot[[1]]
transformeR::gridArithmetics(y, h, operator = "-")
delta <-
transformeR::gridArithmetics(y, h, operator = "-")

if (percentage) {
delta <-
transformeR::gridArithmetics(delta, h, operator = "/") %>%
transformeR::gridArithmetics(., 100, operator = "*")
}

delta
}),
rst_ccs_sign = purrr::map2(experiment, ccs_mbrs, function(x, y) {
y$Data <- apply(y$Data, c(1, 3, 4), mean)
Expand Down Expand Up @@ -339,6 +365,12 @@ climate_change_signal <- function(data,

delta <-
transformeR::gridArithmetics(y, h.expanded, operator = "-")
if (percentage) {
delta <-
transformeR::gridArithmetics(delta, h.expanded, operator = "/") %>%
transformeR::gridArithmetics(., 100, operator = "*")
}

dimnames(delta$Data)[[1]] <- delta$Members
dimnames(delta$Data)[[2]] <- delta$Dates$start
dimnames(delta$Data)[[3]] <- delta$xyCoords$y
Expand Down Expand Up @@ -384,14 +416,18 @@ climate_change_signal <- function(data,
if (class(data) != "CAVAanalytics_list")
cli::cli_abort(c("x" = "The input data is not the output of CAVAanalytics load_data"))
# check input requirements
check_inputs(data,
uppert,
lowert,
consecutive,
duration,
bias.correction,
season,
threshold)
check_inputs(
data,
uppert,
lowert,
consecutive,
duration,
bias.correction,
season,
threshold,
method,
percentage
)

# retrieve information
mod.numb <- dim(data[[1]]$models_mbrs[[1]]$Data) [1]
Expand All @@ -408,13 +444,16 @@ climate_change_signal <- function(data,
#create plots by season
data_list <- purrr::map(season, function(sns) {
mes <-
create_message(var,
uppert,
lowert,
consecutive,
duration,
bias.correction,
frequency)
create_message(
var,
uppert,
lowert,
consecutive,
duration,
bias.correction,
frequency,
percentage
)

# filter data by season
datasets <- filter_data_by_season(datasets, sns)
Expand Down Expand Up @@ -445,7 +484,9 @@ climate_change_signal <- function(data,
bias.correction,
season = sns,
frequency,
threshold
threshold,
method,
percentage
)
cli::cli_progress_done()
# return results
Expand Down
28 changes: 28 additions & 0 deletions R/load_data.R
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,9 @@ load_data <-
}
}

# used to convert wind speed to 10 m to 2 m level following FAO guidelines Allen at al
conversion_factor <- 4.87 / log((67.8 * 10) - 5.42)

# start -------------------------------------------------------------------

# check for valid path
Expand Down Expand Up @@ -332,6 +335,9 @@ load_data <-
} else if (stringr::str_detect(variable, "pr")) {
suppressMessages(transformeR::gridArithmetics(., 86400, operator = "*"))

} else if (stringr::str_detect(variable, "sfc")) {
suppressMessages(transformeR::gridArithmetics(., conversion_factor, operator = "*"))

} else {
.

Expand Down Expand Up @@ -364,6 +370,9 @@ load_data <-
} else if (stringr::str_detect(variable, "pr")) {
suppressMessages(transformeR::gridArithmetics(., 86400, operator = "*"))

} else if (stringr::str_detect(variable, "sfc")) {
suppressMessages(transformeR::gridArithmetics(., conversion_factor, operator = "*"))

} else {
.

Expand Down Expand Up @@ -460,6 +469,12 @@ load_data <-
operator = "/")
obs_tr$Variable$varName = variable
obs_tr
} else if (stringr::str_detect(variable, "sfc")) {
obs_tr <- transformeR::gridArithmetics(.,
conversion_factor,
operator = "*")
obs_tr$Variable$varName = variable
obs_tr
} else {
obs_tr <- transformeR::gridArithmetics(., 1, operator = "*")
obs_tr$Variable$varName = variable
Expand Down Expand Up @@ -497,6 +512,10 @@ load_data <-
cli::cli_text(
"{cli::symbol$arrow_right} Temperature data from CORDEX-CORE has been converted into Celsius"
)
} else if (stringr::str_detect(variable, "sfc")) {
cli::cli_text(
"{cli::symbol$arrow_right} Wind speed data from CORDEX-CORE has been converted to 2 m level"
)
}

}
Expand All @@ -522,6 +541,15 @@ load_data <-
" has been converted into Celsius"
)
)
} else if (stringr::str_detect(variable, "sfc")) {
cli::cli_text(
paste0(
"{cli::symbol$arrow_right}",
" Wind speed data from ",
path.to.obs,
" has been converted to 2 m level"
)
)
}
}
}
Expand Down
14 changes: 10 additions & 4 deletions R/load_data_and_climate_change_signal.R
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@
#' @param chunk.size numeric, indicating the number of chunks. The smaller the better when working with limited RAM
#' @param threshold numerical value with range 0-1. It indicates the threshold for assigning model agreement. For example, 0.6 indicates that model agreement is assigned when 60 percent of the models agree in the sign of the change
#' @param overlap numeric, amount of overlap needed to create the composite. Default 0.25
#' @param percentage logical, whether the climate change signal is to be calculated as relative changes (in percentage). Default to FALSE
#' @param method character, bias-correction method to use. One of eqm (Empirical Quantile Mapping) or qdm (Quantile Delta Mapping). Default to eqm
#' @importFrom magrittr %>%
#' @return list with SpatRaster. To explore the output run attributes(output)
#' @export
Expand All @@ -49,7 +51,9 @@ load_data_and_climate_change_signal <-
bias.correction = F,
domain = NULL,
threshold = 0.6,
n.sessions = 6) {
n.sessions = 6,
method = "eqm",
percentage = F) {
# calculate number of chunks based on xlim and ylim
if (missing(chunk.size) | missing(season)) {
cli::cli_abort("chunk.size and season must be specified")
Expand Down Expand Up @@ -164,7 +168,9 @@ load_data_and_climate_change_signal <-
duration = duration,
frequency = frequency,
threshold = threshold,
n.sessions = 1
n.sessions = 1,
method = method,
percentage = percentage
)
)

Expand All @@ -190,7 +196,7 @@ load_data_and_climate_change_signal <-
# Determine the smallest (finest) resolution among all rasters
resolutions <- sapply(rst_list, function(r)
terra::res(r))
common_res <- min(resolutions)
common_res <- max(resolutions)

# Resample all rasters to the common resolution
resampled_rasters <- lapply(rst_list, function(r) {
Expand All @@ -200,7 +206,7 @@ load_data_and_climate_change_signal <-
resolution = common_res,
crs = terra::crs(r)
),
method = "bilinear")
method = "mode")
})

# Merge the resampled rasters
Expand Down
11 changes: 7 additions & 4 deletions R/load_data_and_model_biases.R
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
#' @param n.sessions numeric, number of sessions to use in parallel processing for loading the data. Default to 6. Increasing the number of sessions will not necessarily results in better performances. Leave as default unless necessary
#' @param chunk.size numeric, indicating the number of chunks. The smaller the better when working with limited RAM
#' @param overlap numeric, amount of overlap needed to create the composite. Default 0.25
#' @param method character, bias-correction method to use. One of eqm (Empirical Quantile Mapping) or qdm (Quantile Delta Mapping). Default to eqm
#' @importFrom magrittr %>%
#' @return list with SpatRaster. To explore the output run attributes(output)
#' @export
Expand All @@ -47,7 +48,8 @@ load_data_and_model_biases <-
frequency = F,
bias.correction = F,
domain = NULL,
n.sessions = 6) {
n.sessions = 6,
method = "eqm") {
# calculate number of chunks based on xlim and ylim
if (missing(chunk.size) | missing(season)) {
cli::cli_abort("chunk.size and season must be specified")
Expand Down Expand Up @@ -160,7 +162,8 @@ load_data_and_model_biases <-
consecutive = consecutive,
duration = duration,
frequency = frequency,
n.sessions = 1
n.sessions = 1,
method = method
)
)

Expand All @@ -180,7 +183,7 @@ load_data_and_model_biases <-
# Determine the smallest (finest) resolution among all rasters
resolutions <- sapply(rst_list, function(r)
terra::res(r))
common_res <- min(resolutions)
common_res <- max(resolutions)

# Resample all rasters to the common resolution
resampled_rasters <- lapply(rst_list, function(r) {
Expand All @@ -190,7 +193,7 @@ load_data_and_model_biases <-
resolution = common_res,
crs = terra::crs(r)
),
method = "bilinear")
method = "mode")
})

# Merge the resampled rasters
Expand Down
Loading

0 comments on commit 4f29ae4

Please sign in to comment.