diff --git a/.Rbuildignore b/.Rbuildignore index 2a2a753d061baa9dab657853b564427c97df1ec6..89478ae5ef8cfeefc4e2e2a2f9b4e842cd35beea 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -12,3 +12,6 @@ ^cran-comments\.md$ ./vignettes/*.md ^inst/doc/paper-figure-PlotForecastPDF\.R$ +^.*\.Rproj$ +^\.Rproj\.user$ +^CONTRIBUTING.md diff --git a/.gitignore b/.gitignore index 23a560302d2818630fb7428585681dd3b07a2b3a..022a30629db6bd51b4ac7c3e219e369e28e7cdaf 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,4 @@ Rplots.pdf *.RData !data/*.RData .\.nfs* +.Rproj.user diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..3faa54d96dda3af8d4e29ab8b4c8101470f6ca41 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,66 @@ +### Questions and bug reports + +If you have questions about the usage of a function or would like to report a bug, please follow these steps: + +1. Use the search function in GitLab to see if a similar problem has already been reported and/or solved. + +2. Rather than including a long script, try to run your code in small blocks to narrow down where the problem is coming from as much as possible. + +3. Open an issue tagging the maintainers (@tkariyat), and include a descriptive title and a piece of code to reproduce your problem (if applicable). + +### How to contribute + +If you would like to add a bugfix, an enhancement or a new functions, follow these steps: + +1. Open an issue to ask for help or describe a feature to be integrated + +2. Agree with the maintainers (@tkariyat) on the requirements + +3. Create a new branch from master with a meaningful name + +4. Once the development is finished, open a merge request to merge the branch on master + +*Note: Remember to work with multidimensional arrays with named dimensions when possible and use multiApply (https://earth.bsc.es/gitlab/ces/multiApply)* + +### Adding a function + +To add a new function in this R package, follow these considerations: + +* Each function exposed to the users should be in its own separate file in the R folder +* The name of the function should match the name of the file (e.g.: `Function()` included in file **Function.R** +* The documentation should be in roxygen2 format as a header of the function +* Once the function and the documentation are finished, run the command `devtools::document()` in your R terminal to automatically generate the **Function.Rd** file +* When doing the development, please use an R version between R/4.1.2 and R/4.3.x + +### Style guide + +* Use `<-` for variable assignment +* Include spaces between operators (e.g. `+`, `-`, `&`), before `{`, and after `for`, `if`, `while`, `,` and `)`. +* When possible, maximum line length should be 100 characters (soft limit of 80 characters). +* Number of indentation spaces is 2, using tabs for indentation is forbidden. +* Double quotes are recommended for strings. When writing quotes within quoted text, use double quotes outside and single quotes inside. E.g.: `“Parameter 'na.rm' is missing.”` +* Self-explanatory names are preferred for variables. Try to be consistent with the variable naming style. Generally: avoid special characters (except underscores) and reserved words (ex: if, for, else, …) +* Remember to include short comments to make the code easier to understand. Comments should be in their own line and they should start with `#` followed by a space. Comments that start with `##` and a space are for details not needed to understand the general procedure but useful to make note of more technical aspects of the code. + +#### Examples: + +```r +# Proper spacing, indentation spaces and text quotes: +NewFunction <- function(text = "default", uppercase = TRUE) { + # Check uppercase parameter + if (!is.logical(uppercase)) { + stop("Parameter 'uppercase' should be TRUE or FALSE.") + } + # Only transform text if needed + if (uppercase && is.character(text)) { + text <- toupper(text) + } + return(text) +} + +# How to format line breaks to avoid long lines: +my_strings <- list(one = "one", + two = "two", + three = "three", + four = "four") +``` diff --git a/DESCRIPTION b/DESCRIPTION index 2fc850e7062e3837df08804b24d93cd9995afd59..7fdadd0c4d72f5780370a15e20025bc2284ff4f2 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,8 +1,9 @@ Package: CSIndicators Title: Climate Services' Indicators Based on Sub-Seasonal to Decadal Predictions -Version: 1.1.1 +Version: 1.1.2 Authors@R: c( - person("Victòria", "Agudetse", , "victoria.agudetse@bsc.es", role = c("cre")), + person("Theertha", "Kariyathan", ,"theertha.kariyathan@bsc.es", role = c("cre")), + person("Victòria", "Agudetse", , "victoria.agudetse@bsc.es", role = c("ctb")), person("Eva", "Rifà", , "eva.rifarovira@bsc.es", role = c("ctb")), person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut"), comment = c(ORCID = "0000-0001-8568-3071")), person("Chou", "Chihchung", ,"chihchung.chou@bsc.es", role = "aut"), diff --git a/NAMESPACE b/NAMESPACE index 15c3c9a6e194bd8fea84dac36081c0d8033674aa..466e3c69ba7c7209d4f30841941ed1a712e890c3 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -60,6 +60,7 @@ importFrom(stats,ecdf) importFrom(stats,qnorm) importFrom(stats,quantile) importFrom(stats,sd) +importFrom(stats,setNames) importFrom(stats,window) importFrom(utils,read.delim) importFrom(zoo,rollapply) diff --git a/NEWS.md b/NEWS.md index 613ff3d314ed923d54617b34aab89466e5221be6..025519f7b365a1f180655e23a7b3a438809ba0b9 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,22 @@ +# CSIndicators 1.1.2 (Release date: 2025-03-27) + +### Fixes +- Avoid using 'builtin' type functions in call to multiApply::Apply() +- CST_PeriodStandardization: Set na.rm = TRUE when replacing infinite values with maximum/minimum period values +- CST_PeriodPET(): Add longname and other variable metadata +- Not drop singleton time dimensions in + PeriodAccumulation + PeriodMean + PeriodMax + PeriodMin + PeriodVariance + AccumulationExceedingThreshold + TotalSpellTimeExceedingThreshold + TotalTimeExceedingThreshold + +### Other +- add CONTRIBUTING.md + # CSIndicators 1.1.1 (Release date: 2024-01-24) ### Fixes diff --git a/R/AccumulationExceedingThreshold.R b/R/AccumulationExceedingThreshold.R index b8ae9ae798909c22d8a9ee4b4804ed09af63ac0c..5235bace541754f36d7872c7daca6565261ab36a 100644 --- a/R/AccumulationExceedingThreshold.R +++ b/R/AccumulationExceedingThreshold.R @@ -113,7 +113,7 @@ CST_AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = na.rm = na.rm, ncores = ncores) data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -128,10 +128,10 @@ CST_AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -203,6 +203,7 @@ CST_AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = #'GDD <- AccumulationExceedingThreshold(data, threshold = 0, start = list(1, 4), #' end = list(31, 10)) #'@import multiApply +#'@importFrom stats setNames #'@export AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = FALSE, dates = NULL, start = NULL, end = NULL, @@ -436,6 +437,7 @@ AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = FAL ncores = ncores)$output1 } } + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } @@ -476,4 +478,4 @@ AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = FAL } return(res) -} \ No newline at end of file +} diff --git a/R/MergeRefToExp.R b/R/MergeRefToExp.R index 6d9fd6e88104d58c871254ac52171b4e4fce115d..69f722b7934de0df3185b4042304bd832946dc84 100644 --- a/R/MergeRefToExp.R +++ b/R/MergeRefToExp.R @@ -184,8 +184,10 @@ CST_MergeRefToExp <- function(data1, data2, start1 = NULL, end1 = NULL, names(dim(dates2)) <- time_dim } } - res <- Apply(list(dates1, dates2), target_dims = time_dim, - 'c', output_dims = time_dim, ncores = ncores)$output1 + res <- Apply(list(dates1, dates2), + target_dims = time_dim, + fun = function(x, ...) {c(x, ...)}, + output_dims = time_dim, ncores = ncores)$output1 if (inherits(dates1, 'Date')) { data1$attrs$Dates <- as.Date(res, origin = '1970-01-01') @@ -421,7 +423,8 @@ MergeRefToExp <- function(data1, data2, dates1 = NULL, dates2 = NULL, } } - data1 <- Apply(list(data1, data2), target_dims = time_dim, fun = 'c', + data1 <- Apply(list(data1, data2), target_dims = time_dim, + fun = function(x, ...) {c(x, ...)}, output_dims = time_dim, ncores = ncores)$output1 if (all(names(dim(data1)) %in% data1dims)) { diff --git a/R/PeriodAccumulation.R b/R/PeriodAccumulation.R index 8986e3ff97e070c87550fb8c4c840617f1d2b55f..2adf45f78da157b89aecaa4978a4f3090e2e9de4 100644 --- a/R/PeriodAccumulation.R +++ b/R/PeriodAccumulation.R @@ -129,12 +129,12 @@ CST_PeriodAccumulation <- function(data, start = NULL, end = NULL, data$attrs$Dates <- Dates } if (is.null(rollwidth)) { - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(dim(Dates))) { # Create time_bounds time_bounds <- NULL - time_bounds$start <- Subset(Dates, time_dim, 1, drop = 'selected') - time_bounds$end <- Subset(Dates, time_dim, dim(Dates)[time_dim], drop = 'selected') + time_bounds$start <- Subset(Dates, time_dim, 1, drop = FALSE) + time_bounds$end <- Subset(Dates, time_dim, dim(Dates)[time_dim], drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -222,6 +222,7 @@ CST_PeriodAccumulation <- function(data, start = NULL, end = NULL, #' #'@import multiApply #'@importFrom zoo rollapply +#'@importFrom stats setNames #'@export PeriodAccumulation <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', rollwidth = NULL, @@ -273,7 +274,7 @@ PeriodAccumulation <- function(data, dates = NULL, start = NULL, end = NULL, if (is.null(rollwidth)) { # period accumulation - total <- Apply(list(data), target_dims = time_dim, fun = sum, + total <- Apply(list(data), target_dims = time_dim, fun = function(...) {sum(...)}, na.rm = na.rm, ncores = ncores)$output1 } else { # rolling accumulation @@ -321,7 +322,7 @@ PeriodAccumulation <- function(data, dates = NULL, start = NULL, end = NULL, pos <- match(dimnames, names(dim(total))) total <- aperm(total, pos) } - + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } @@ -337,7 +338,9 @@ PeriodAccumulation <- function(data, dates = NULL, start = NULL, end = NULL, } } - data_accum <- rollapply(data = data_vector, width = rollwidth, FUN = sum, na.rm = na.rm) + data_accum <- rollapply(data = data_vector, width = rollwidth, + FUN = function(...) {sum(...)}, + na.rm = na.rm) if (!forwardroll) { data_accum <- c(rep(NA, rollwidth-1), data_accum) } else { diff --git a/R/PeriodMax.R b/R/PeriodMax.R index 3ae23ec4d519572de99059183cc980fb5e40df57..46f09846517677865317337b5dce9beb6415df52 100644 --- a/R/PeriodMax.R +++ b/R/PeriodMax.R @@ -83,8 +83,8 @@ CST_PeriodMax <- function(data, start = NULL, end = NULL, data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL - + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) + if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { Dates <- SelectPeriodOnDates(dates = Dates, start = start, end = end, @@ -98,10 +98,10 @@ CST_PeriodMax <- function(data, start = NULL, end = NULL, # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -163,6 +163,7 @@ CST_PeriodMax <- function(data, start = NULL, end = NULL, #'res <- PeriodMax(data, dates = Dates, start = list(01, 12), end = list(01, 01)) #' #'@import multiApply +#'@importFrom stats setNames #'@export PeriodMax <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', na.rm = FALSE, ncores = NULL) { @@ -205,9 +206,10 @@ PeriodMax <- function(data, dates = NULL, start = NULL, end = NULL, } } } - total <- Apply(list(data), target_dims = time_dim, fun = max, + total <- Apply(list(data), target_dims = time_dim, fun = function(...) {max(...)}, na.rm = na.rm, ncores = ncores)$output1 + + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } - diff --git a/R/PeriodMean.R b/R/PeriodMean.R index f58bbeb27c9fc1e12efd8de06b058efd3be737be..16752e4e4fdfda9adf964214afc37525fc9ec1cf 100644 --- a/R/PeriodMean.R +++ b/R/PeriodMean.R @@ -83,7 +83,7 @@ CST_PeriodMean <- function(data, start = NULL, end = NULL, data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -98,10 +98,10 @@ CST_PeriodMean <- function(data, start = NULL, end = NULL, # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -162,6 +162,7 @@ CST_PeriodMean <- function(data, start = NULL, end = NULL, #'SA <- PeriodMean(data, dates = Dates, start = list(01, 12), end = list(01, 01)) #' #'@import multiApply +#'@importFrom stats setNames #'@export PeriodMean <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', na.rm = FALSE, ncores = NULL) { @@ -206,6 +207,8 @@ PeriodMean <- function(data, dates = NULL, start = NULL, end = NULL, } total <- Apply(list(data), target_dims = time_dim, fun = mean, na.rm = na.rm, ncores = ncores)$output1 + + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } diff --git a/R/PeriodMin.R b/R/PeriodMin.R index 99ce8016a76fca9d80920abb2b44cf89094ad79d..52fb10a91cef7d805d3ff0ac0863e07d6e35b2f9 100644 --- a/R/PeriodMin.R +++ b/R/PeriodMin.R @@ -83,7 +83,7 @@ CST_PeriodMin <- function(data, start = NULL, end = NULL, data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -98,10 +98,10 @@ CST_PeriodMin <- function(data, start = NULL, end = NULL, # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -163,6 +163,7 @@ CST_PeriodMin <- function(data, start = NULL, end = NULL, #'res <- PeriodMin(data, dates = Dates, start = list(01, 12), end = list(01, 01)) #' #'@import multiApply +#'@importFrom stats setNames #'@export PeriodMin <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', na.rm = FALSE, ncores = NULL) { @@ -205,8 +206,10 @@ PeriodMin <- function(data, dates = NULL, start = NULL, end = NULL, } } } - total <- Apply(list(data), target_dims = time_dim, fun = min, + total <- Apply(list(data), target_dims = time_dim, + fun = function(...) {min(...)}, na.rm = na.rm, ncores = ncores)$output1 + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } diff --git a/R/PeriodPET.R b/R/PeriodPET.R index 27d6ecaeeed9d7a9e778426495348ebcd9f519ab..02ddb5f32c9e658a21d467b4b998fdefaa4dea11 100644 --- a/R/PeriodPET.R +++ b/R/PeriodPET.R @@ -91,8 +91,12 @@ CST_PeriodPET <- function(data, pet_method = 'hargreaves', source_files <- lapply(data, function(x) {x$attrs$source_files}) coords <- data[[1]]$coords Dates <- data[[1]]$attrs$Dates + metadata <- data[[1]]$attrs$Variable$metadata - metadata_names <- intersect(names(dim(res)), names(metadata)) + metadata[["PET"]]$longname <- "Potential evapotranspiration" + metadata[["PET"]]$units <- "mm" + + metadata_names <- c(intersect(names(dim(res)), names(metadata)), "PET") suppressWarnings( res <- s2dv_cube(data = res, coords = coords, varName = paste0('PET'), @@ -383,4 +387,4 @@ PeriodPET <- function(data, dates, lat, pet_method = 'hargreaves', pet <- array(pet[which(mask_dates == 1)], dim = dims) } return(pet) -} \ No newline at end of file +} diff --git a/R/PeriodStandardization.R b/R/PeriodStandardization.R index 6ab707e713174dc8ebb41ce8fce3e240946bd05f..dcef84706fc557b3c8802cb800a9e7144698939f 100644 --- a/R/PeriodStandardization.R +++ b/R/PeriodStandardization.R @@ -12,12 +12,12 @@ #'coordinates and therefore, the result will be filled with NA for the #'specific coordinates. When NAs are not removed, if the length of the data for #'a computational step is smaller than 4, there will not be enough data for -#'standarize and the result will be also filled with NAs for that coordinates. +#'standardization and the result will be also filled with NAs for those coordinates. #'About the distribution used to fit the data, there are only two possibilities: -#''log-logistic' and 'Gamma'. The 'Gamma' method only works when only -#'precipitation is provided and other variables are 0 because it is positive +#''log-logistic' and 'Gamma'. The 'Gamma' method works only when precipitation +#'is the sole variable provided, and all other variables are 0 because it is positive #'defined (SPI indicator). When only 'data' is provided ('data_cor' is NULL) the -#'standardization is computed with cross validation. This function is build to +#'standardization is computed with cross validation. This function is built to #'be compatible with other tools in that work with 's2dv_cube' object #'class. The input data must be this object class. If you don't work with #''s2dv_cube', see PeriodStandardization. For more information on the SPEI @@ -108,6 +108,7 @@ CST_PeriodStandardization <- function(data, data_cor = NULL, time_dim = 'syear', } } res <- PeriodStandardization(data = data$data, data_cor = data_cor$data, + dates = data$attrs$Dates, time_dim = time_dim, leadtime_dim = leadtime_dim, memb_dim = memb_dim, ref_period = ref_period, @@ -124,7 +125,10 @@ CST_PeriodStandardization <- function(data, data_cor = NULL, time_dim = 'syear', if (is.null(data_cor)) { data$data <- std - data$attrs$Variable$varName <- paste0(data$attrs$Variable$varName, ' standardized') + data_longname <- data$attrs$Variable$metadata[[data$attrs$Variable$varName]]$longname + if (!is.null(data_longname)) { + data$attrs$Variable$metadata[[data$attrs$Variable$varName]]$longname <- paste(data_longname, 'standardized') + } if (return_params) { return(list(spei = data, params = params)) } else { @@ -132,7 +136,10 @@ CST_PeriodStandardization <- function(data, data_cor = NULL, time_dim = 'syear', } } else { data_cor$data <- std - data_cor$attrs$Variable$varName <- paste0(data_cor$attrs$Variable$varName, ' standardized') + data_cor_longname <- data_cor$attrs$Variable$metadata[[data_cor$attrs$Variable$varName]]$longname + if (!is.null(data_cor_longname)) { + data_cor$attrs$Variable$metadata[[data_cor$attrs$Variable$varName]]$longname <- paste(data_cor_longname, 'standardized') + } data_cor$attrs$Datasets <- c(data_cor$attrs$Datasets, data$attrs$Datasets) data_cor$attrs$source_files <- c(data_cor$attrs$source_files, data$attrs$source_files) return(data_cor) @@ -337,7 +344,7 @@ PeriodStandardization <- function(data, data_cor = NULL, dates = NULL, "will not be used.") ref_period <- NULL } else if (!all(unlist(ref_period) %in% years_dates)) { - warning("Parameter 'ref_period' contain years outside the dates. ", + warning("Parameter 'ref_period' contains years outside the dates. ", "It will not be used.") ref_period <- NULL } else { @@ -610,8 +617,8 @@ PeriodStandardization <- function(data, data_cor = NULL, dates = NULL, } if (handle_infinity) { # could also use "param_error" ?; we are giving it the min/max value of the grid point - spei_mod[is.infinite(spei_mod) & spei_mod < 0] <- min(spei_mod[!is.infinite(spei_mod)]) - spei_mod[is.infinite(spei_mod) & spei_mod > 0] <- max(spei_mod[!is.infinite(spei_mod)]) + spei_mod[is.infinite(spei_mod) & spei_mod < 0] <- min(spei_mod[!is.infinite(spei_mod)],na.rm = TRUE) + spei_mod[is.infinite(spei_mod) & spei_mod > 0] <- max(spei_mod[!is.infinite(spei_mod)],na.rm = TRUE) } if (return_params) { return(list(spei = spei_mod, params = params_result)) @@ -643,4 +650,4 @@ PeriodStandardization <- function(data, data_cor = NULL, dates = NULL, } else { return(NA) } -} \ No newline at end of file +} diff --git a/R/PeriodVariance.R b/R/PeriodVariance.R index 4e1e93e89181fe40b8aa903ec4df8d7fd6a371f4..cc1883b2e9ec0038b6d76c26a2dbe31083fbaba4 100644 --- a/R/PeriodVariance.R +++ b/R/PeriodVariance.R @@ -87,7 +87,7 @@ CST_PeriodVariance <- function(data, start = NULL, end = NULL, data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -102,10 +102,10 @@ CST_PeriodVariance <- function(data, start = NULL, end = NULL, # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -171,6 +171,7 @@ CST_PeriodVariance <- function(data, start = NULL, end = NULL, #'res <- PeriodVariance(data, dates = Dates, start = list(01, 12), end = list(01, 01)) #' #'@import multiApply +#'@importFrom stats setNames #'@export PeriodVariance <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', na.rm = FALSE, ncores = NULL) { @@ -216,6 +217,7 @@ PeriodVariance <- function(data, dates = NULL, start = NULL, end = NULL, total <- Apply(list(data), target_dims = time_dim, fun = .periodvariance, na.rm = na.rm, ncores = ncores)$output1 + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } diff --git a/R/SelectPeriodOnData.R b/R/SelectPeriodOnData.R index 1cc2792ed9a1bb3cdd909fd99a4a1a1db6402559..731abb5c47320617b81e9b4816acf9b776708c78 100644 --- a/R/SelectPeriodOnData.R +++ b/R/SelectPeriodOnData.R @@ -121,7 +121,7 @@ SelectPeriodOnData <- function(data, dates, start, end, ncores = ncores)$output1 # when 29Feb is included the length of the output changes: regular <- Apply(list(res), target_dims = time_dim, - fun = sum, ncores = ncores)$output1 + fun = function(...) {sum(...)}, ncores = ncores)$output1 dims <- dim(data) dims[names(dims) == time_dim] <- max(regular) if (any(regular != max(regular))) { diff --git a/R/SelectPeriodOnDates.R b/R/SelectPeriodOnDates.R index 550aad94ff28525af253b13a68b80d785f10725f..b95c50912c7f7cacb7b26051957bdb9083dec03b 100644 --- a/R/SelectPeriodOnDates.R +++ b/R/SelectPeriodOnDates.R @@ -53,7 +53,7 @@ SelectPeriodOnDates <- function(dates, start, end, } # when 29Feb is included the length of the output changes: regular <- Apply(list(res), target_dims = time_dim, - fun = sum, ncores = ncores)$output1 + fun = function(...) {sum(...)}, ncores = ncores)$output1 dims <- dim(dates) dims[names(dims) == time_dim] <- max(regular) if (any(regular != max(regular))) { diff --git a/R/TotalSpellTimeExceedingThreshold.R b/R/TotalSpellTimeExceedingThreshold.R index 14507036558203064f334eeef869b8cc4dd51214..72a51e0a7d0cd06a85d5b86949c1517da2028768 100644 --- a/R/TotalSpellTimeExceedingThreshold.R +++ b/R/TotalSpellTimeExceedingThreshold.R @@ -115,7 +115,7 @@ CST_TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '> ncores = ncores) data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -130,10 +130,10 @@ CST_TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '> # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -219,6 +219,7 @@ CST_TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '> #' end = list(21, 6)) #' #'@import multiApply +#'@importFrom stats setNames #'@export TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '>', dates = NULL, start = NULL, end = NULL, @@ -435,6 +436,7 @@ TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '>', ncores = ncores)$output1 } } + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } @@ -484,4 +486,4 @@ TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '>', return(days) }))) return(total) -} \ No newline at end of file +} diff --git a/R/TotalTimeExceedingThreshold.R b/R/TotalTimeExceedingThreshold.R index 15b822c6ee37a8aea19c9f0c12d4cd91710cde44..31cbc585ddc3594a37c1da486b396da4b5c0ebb4 100644 --- a/R/TotalTimeExceedingThreshold.R +++ b/R/TotalTimeExceedingThreshold.R @@ -119,7 +119,7 @@ CST_TotalTimeExceedingThreshold <- function(data, threshold, op = '>', ncores = ncores) data$data <- total data$dims <- dim(total) - data$coords[[time_dim]] <- NULL + data$coords[[time_dim]] <- 1 : length(data$dims[[time_dim]]) if (!is.null(Dates)) { if (!is.null(start) && !is.null(end)) { @@ -134,10 +134,10 @@ CST_TotalTimeExceedingThreshold <- function(data, threshold, op = '>', # Create time_bounds time_bounds <- NULL time_bounds$start <- ClimProjDiags::Subset(x = Dates, along = time_dim, - indices = 1, drop = 'selected') + indices = 1, drop = FALSE) time_bounds$end <- ClimProjDiags::Subset(x = Dates, along = time_dim, indices = dim(Dates)[time_dim], - drop = 'selected') + drop = FALSE) # Add Dates in attrs data$attrs$Dates <- time_bounds$start @@ -221,6 +221,7 @@ CST_TotalTimeExceedingThreshold <- function(data, threshold, op = '>', #' start = list(21, 4), end = list(21, 6)) #' #'@import multiApply +#'@importFrom stats setNames #'@export TotalTimeExceedingThreshold <- function(data, threshold, op = '>', dates = NULL, start = NULL, end = NULL, @@ -431,6 +432,7 @@ TotalTimeExceedingThreshold <- function(data, threshold, op = '>', ncores = ncores)$output1 } } + dim(total) <- c(dim(total), setNames(1, time_dim)) return(total) } @@ -467,4 +469,4 @@ TotalTimeExceedingThreshold <- function(data, threshold, op = '>', } } return(res) -} \ No newline at end of file +} diff --git a/inst/CITATION b/inst/CITATION index af27d374775d31dab7fadd43ede5747e0e67fe31..5e2c82a00dab07e2cc251caa1ba4169414b6fd2e 100644 --- a/inst/CITATION +++ b/inst/CITATION @@ -17,7 +17,7 @@ bibentry( author = c(person("Núria", "Pérez-Zanón", email = "nuria.perez@bsc.es"), person("", "et al.")), title = "CSIndicators: Get tailored climate indicators for applications in your sector", doi = "10.1016/j.cliser.2023.100393", - url = "https://www.sciencedirect.com/science/article/pii/S2405880723000547", + url = URLencode("https://doi.org/10.1016/j.cliser.2023.100393"), journal = "Climate Services", publisher = "Elsevier", year = "2023" diff --git a/man/CST_PeriodStandardization.Rd b/man/CST_PeriodStandardization.Rd index fe0dd3b9f59e082ae322e884b94415ea4246c8af..95f6474d01337915052b2f731fd967f82b745dc4 100644 --- a/man/CST_PeriodStandardization.Rd +++ b/man/CST_PeriodStandardization.Rd @@ -105,12 +105,12 @@ parameter 'na.rm', the standardization cannot be carried out for those coordinates and therefore, the result will be filled with NA for the specific coordinates. When NAs are not removed, if the length of the data for a computational step is smaller than 4, there will not be enough data for -standarize and the result will be also filled with NAs for that coordinates. +standardization and the result will be also filled with NAs for those coordinates. About the distribution used to fit the data, there are only two possibilities: -'log-logistic' and 'Gamma'. The 'Gamma' method only works when only -precipitation is provided and other variables are 0 because it is positive +'log-logistic' and 'Gamma'. The 'Gamma' method works only when precipitation +is the sole variable provided, and all other variables are 0 because it is positive defined (SPI indicator). When only 'data' is provided ('data_cor' is NULL) the -standardization is computed with cross validation. This function is build to +standardization is computed with cross validation. This function is built to be compatible with other tools in that work with 's2dv_cube' object class. The input data must be this object class. If you don't work with 's2dv_cube', see PeriodStandardization. For more information on the SPEI diff --git a/tests/testthat/test-AccumulationExceedingThreshold.R b/tests/testthat/test-AccumulationExceedingThreshold.R index 2b557767f2bcf082de89fa72b19198c2c8b1e7fb..2e579b2ad551efcf1a755b9497f6bff807c0362f 100644 --- a/tests/testthat/test-AccumulationExceedingThreshold.R +++ b/tests/testthat/test-AccumulationExceedingThreshold.R @@ -141,36 +141,36 @@ test_that("1. Input checks", { test_that("2. Output checks", { expect_equal( AccumulationExceedingThreshold(dat1, 10), - 155 + array(155, c(time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat2_1, 10, time_dim = 'time'), - array(c(375, 390), c(x = 2)) + array(c(375, 390), c(x = 2, time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat2_1, thres2_1, time_dim = 'time'), - array(c(375, 390), c(x = 2)) + array(c(375, 390), c(x = 2, time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat2_1, thres2_1, time_dim = 'x'), - array(c(rep(0,5), seq(23, 79, 4)), c(time = 20)) + array(c(rep(0, 5), seq(23, 79, 4)), c(time = 20, x = 1)) ) expect_equal( AccumulationExceedingThreshold(dat2_1, 10, time_dim = 'time'), - array(c(375, 390), c(x = 2)) + array(c(375, 390), c(x = 2, time = 1)) ) # dimensions expect_equal( dim(AccumulationExceedingThreshold(dat2_3, thres2_3)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(AccumulationExceedingThreshold(dat2_3, thres2_4)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(AccumulationExceedingThreshold(dat2_4, thres2_4, time_dim = 'time')), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) }) @@ -180,27 +180,27 @@ test_that("3. Output checks", { expect_equal( dim(AccumulationExceedingThreshold(dat3_1, c(55,58), c('<', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( AccumulationExceedingThreshold(dat3_1, c(55,58), c(">", "<")), - array(c(rep(0,11),113), dim = c(fyear = 3, sdate = 2, lat = 2)) + array(c(rep(0, 11), 113), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat3_1, c(55,58), c(">=", "<=")), - array(c(rep(0,10),55,171), dim = c(fyear = 3, sdate = 2, lat = 2)) + array(c(rep(0, 10), 55, 171), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat3_2, c(46, 35), op = c("<", ">"), time_dim = 'time'), - array(c(76, 114), c(x = 2)) + array(c(76, 114), c(x = 2, time = 1)) ) expect_equal( AccumulationExceedingThreshold(dat3_2, c(7,11), op = c('>=', '<='), time_dim = 'time'), - array(c(27, 18), c(x = 2)) + array(c(27, 18), c(x = 2, time = 1)) ) expect_equal( dim(AccumulationExceedingThreshold(dat2_3, list(thres2_4, thres2_4+1), op = c('>=', '<'))), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) }) @@ -211,7 +211,7 @@ test_that("4. Output checks", { expect_equal( dim(AccumulationExceedingThreshold(dat4, list(thres4_2, thres4_1), c('<=', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( as.vector(AccumulationExceedingThreshold(dat4, list(thres4_1, thres4_2), c(">", "<="))[1:3]), @@ -244,7 +244,7 @@ test_that("5. Seasonal forecasts", { res <- CST_AccumulationExceedingThreshold(exp, threshold = 280, time_dim = 'ftime') expect_equal( - round(res$data[, 2, 2, 2]), + round(res$data[, 2, 2, 2, 1]), c(0, 280, 281, 281) ) @@ -267,12 +267,12 @@ test_that("5. Seasonal forecasts", { start = list(1, 4), end = list(31, 10), na.rm = TRUE) expect_equal( - round(GDD[,1,1,1]), + round(GDD[, 1, 1, 1, 1]), c(549, 387, 125, 554, 245, 282) ) expect_equal( dim(GDD), - c(member = 6, sdate = 3, lat = 4, lon = 4) + c(member = 6, sdate = 3, lat = 4, lon = 4, time = 1) ) expect_error( AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, start = list(1, 4), end = list(31, 10), time_dim = 'ftime'), @@ -291,19 +291,19 @@ test_that("5. Seasonal forecasts", { expect_equal( AccumulationExceedingThreshold(input_1, threshold_1, diff = TRUE), - 153 + array(153, c(time = 1)) ) expect_equal( AccumulationExceedingThreshold(input_1, threshold_1), - 204 + array(204, c(time = 1)) ) expect_equal( AccumulationExceedingThreshold(input_2, threshold_2, op = '<'), - -105 + array(-105, c(time = 1)) ) expect_equal( AccumulationExceedingThreshold(input_2, threshold_2, op = '<', diff = TRUE), - -55 + array(-55, c(time = 1)) ) }) @@ -321,7 +321,7 @@ test_that("6. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-PeriodAccumulation.R b/tests/testthat/test-PeriodAccumulation.R index 9dcbcf9c44cb96b10cbe0c520e875e7ad70dfd8e..3aa47ea1a7da387fbaf5d35c5ea3f0b9f71a8485 100644 --- a/tests/testthat/test-PeriodAccumulation.R +++ b/tests/testthat/test-PeriodAccumulation.R @@ -72,12 +72,12 @@ test_that("1. Initial checks", { ) expect_equal( PeriodAccumulation(1:10), - 55 + array(55, c(time = 1)) ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) expect_equal( PeriodAccumulation(data), - array(c(9, 12, 27, 30, 45, 48, 63, 66), c(sdate = 2, lon = 4)) + array(c(9, 12, 27, 30, 45, 48, 63, 66), c(sdate = 2, lon = 4, time = 1)) ) # Test dates warning expect_warning( @@ -104,7 +104,7 @@ test_that("2. Seasonal", { output$data <- array(c(sum(exp$data[1,1,21:82,1]), sum(exp$data[1,2,21:82,1]), sum(exp$data[1,3,21:82,1]), sum(exp$data[1,1,21:82,2]), sum(exp$data[1,2,21:82,2]), sum(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) + c(memb = 1, sdate = 3, lon = 2, ftime = 1)) expect_equal( CST_PeriodAccumulation(exp, start = list(21, 4), end = list(21, 6), @@ -127,16 +127,16 @@ test_that("3. Subset Dates and check time_bounds", { ) expect_equal( dim(res2$data), - dim(exp$data)[-which(names(dim(exp$data)) == 'ftime')] + c(dim(exp$data)[-which(names(dim(exp$data)) == 'ftime')], ftime = 1) ) # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) expect_equal( dim(res2$data)['sdate'], - dim(res2$attrs$Dates) + dim(res2$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( @@ -196,29 +196,29 @@ test_that("4. Rolling", { # Output checks expect_equal( PeriodAccumulation(data = dat2, rollwidth = -2, dates = dates2, frequency = 'daily'), - array(c(4,6,8, 10, NA, NA), dim = c(sdate = 2, time = 3, member = 1)) + array(c(4, 6, 8, 10, NA, NA), dim = c(sdate = 2, time = 3, member = 1, time = 1)) ) expect_equal( PeriodAccumulation(data = dat2, rollwidth = 3, dates = dates2, frequency = 'daily'), - array(c(rep(NA, 4), 9, 12), dim = c(sdate = 2, time = 3, member = 1)) + array(c(rep(NA, 4), 9, 12), dim = c(sdate = 2, time = 3, member = 1, time = 1)) ) dat2_1 <- dat2 dat2_1[1,1,1] <- NA expect_equal( PeriodAccumulation(data = dat2_1, rollwidth = 2, dates = dates2, na.rm = FALSE, frequency = 'daily'), - array(c(rep(NA, 3), 6,8,10), dim = c(sdate = 2, time = 3, member = 1)) + array(c(rep(NA, 3), 6, 8, 10), dim = c(sdate = 2, time = 3, member = 1, time = 1)) ) # Test rolling with start and end expect_equal( PeriodAccumulation(data = dat2, rollwidth = 1, dates = dates2, start = list(1, 4), end = list(2, 4), frequency = 'daily'), - array(c(1, 2, 3, 4), dim = c(sdate = 2, time = 2, member = 1)) + array(c(1, 2, 3, 4), dim = c(sdate = 2, time = 2, member = 1, time = 1)) ) expect_equal( PeriodAccumulation(data = dat2, rollwidth = 2, dates = dates2, start = list(1, 4), end = list(2, 4), frequency = 'daily'), - array(c(NA, NA, 4, 6), dim = c(sdate = 2, time = 2, member = 1)) + array(c(NA, NA, 4, 6), dim = c(sdate = 2, time = 2, member = 1, time = 1)) ) }) diff --git a/tests/testthat/test-PeriodMax.R b/tests/testthat/test-PeriodMax.R index 1d7437a40f90f272e7740ef0c4a838bb77cb72ea..ae29bb58e6c6b934f38a0168d8c2db5598050b3a 100644 --- a/tests/testthat/test-PeriodMax.R +++ b/tests/testthat/test-PeriodMax.R @@ -23,7 +23,7 @@ test_that("1. Sanity Checks", { suppressWarnings( expect_equal( PeriodMax(array(1, c(x = 1)), time_dim = 'x'), - 1 + array(1, c(x = 1)) ) ) expect_error( @@ -33,7 +33,7 @@ test_that("1. Sanity Checks", { suppressWarnings( expect_equal( PeriodMax(array(1:10, c(time = 10))), - 10 + array(10, c(time = 1)) ) ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) @@ -41,7 +41,7 @@ test_that("1. Sanity Checks", { expect_equal( PeriodMax(data), array(c(5, 6, 11, 12, 17, 18, 23, 24), - c(sdate = 2, lon = 4)) + c(sdate = 2, lon = 4, time = 1)) ) ) # Test dates warning @@ -81,7 +81,7 @@ test_that("2. Seasonal", { output$data <- array(c(max(exp$data[1,1,21:82,1]), max(exp$data[1,2,21:82,1]), max(exp$data[1,3,21:82,1]), max(exp$data[1,1,21:82,2]), max(exp$data[1,2,21:82,2]), max(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) + c(memb = 1, sdate = 3, lon = 2, time = 1)) expect_equal( CST_PeriodMax(exp, start = list(21, 4), end = list(21, 6))$data, output$data @@ -101,7 +101,7 @@ test_that("3. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-PeriodMean.R b/tests/testthat/test-PeriodMean.R index 9f8c4cf15faa16229c2ccf223ee3df350c1e72ee..6eab8eed610afc529a1d8d62caf6ac07c00afa87 100644 --- a/tests/testthat/test-PeriodMean.R +++ b/tests/testthat/test-PeriodMean.R @@ -10,7 +10,7 @@ test_that("1. Sanity Checks", { suppressWarnings( expect_equal( PeriodMean(array(1, c(x = 1)), time_dim = 'x'), - 1 + array(1, c(x = 1)) ) ) expect_error( @@ -33,7 +33,7 @@ test_that("1. Sanity Checks", { suppressWarnings( expect_equal( PeriodMean(array(1:10, c(time = 10))), - 5.5 + array(5.5, c(time = 1)) ) ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) @@ -41,7 +41,7 @@ test_that("1. Sanity Checks", { expect_equal( PeriodMean(data), array(c(3, 4, 9, 10, 15, 16, 21, 22), - c(sdate = 2, lon = 4)) + c(sdate = 2, lon = 4, time = 1)) ) ) # Test dates warning @@ -81,7 +81,7 @@ test_that("2. Seasonal", { output$data <- array(c(mean(exp$data[1,1,21:82,1]), mean(exp$data[1,2,21:82,1]), mean(exp$data[1,3,21:82,1]), mean(exp$data[1,1,21:82,2]), mean(exp$data[1,2,21:82,2]), mean(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) + c(memb = 1, sdate = 3, lon = 2, time = 1)) expect_equal( CST_PeriodMean(exp, start = list(21, 4), end = list(21, 6))$data, output$data @@ -101,7 +101,7 @@ test_that("3. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-PeriodMin.R b/tests/testthat/test-PeriodMin.R index fb97fc206681fcbcb773e42dab4aa5f0af3b9449..51e671b7b87c131034e0e391a42db13712045df7 100644 --- a/tests/testthat/test-PeriodMin.R +++ b/tests/testthat/test-PeriodMin.R @@ -10,7 +10,7 @@ test_that("1. Sanity Checks", { suppressWarnings( expect_equal( PeriodMin(array(1, c(x = 1)), time_dim = 'x'), - 1 + array(1, c(x = 1)) ) ) expect_error( @@ -32,13 +32,13 @@ test_that("1. Sanity Checks", { ) expect_equal( PeriodMin(array(1:10, c(time = 10))), - 1 + array(1, c(time = 1)) ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) expect_equal( PeriodMin(data), array(c(1, 2, 7, 8, 13, 14, 19, 20), - c(sdate = 2, lon = 4)) + c(sdate = 2, lon = 4, time = 1)) ) # Test dates warning expect_warning( @@ -77,7 +77,7 @@ test_that("2. Seasonal", { output$data <- array(c(min(exp$data[1,1,21:82,1]), min(exp$data[1,2,21:82,1]), min(exp$data[1,3,21:82,1]), min(exp$data[1,1,21:82,2]), min(exp$data[1,2,21:82,2]), min(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) + c(memb = 1, sdate = 3, lon = 2, time = 1)) expect_equal( CST_PeriodMin(exp, start = list(21, 4), end = list(21, 6))$data, output$data @@ -97,7 +97,7 @@ test_that("3. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-PeriodStandardization.R b/tests/testthat/test-PeriodStandardization.R index 7673db2d070cef7f71506bf80acbef78694ae42e..4def61da1641df7e5fe61f82c4384f647e8e7252 100644 --- a/tests/testthat/test-PeriodStandardization.R +++ b/tests/testthat/test-PeriodStandardization.R @@ -38,7 +38,15 @@ dims3 <- c(syear = 6, time = 2, lat = 2, ensemble = 25) set.seed(1) dat3 <- array(abs(rnorm(600, 21.19, 25.64)), dim = dims) - +# dat4 +test <- NULL +test$data <- array(rnorm(600, -204.1, 78.1), dim = dims) +test$dims <- dims +test$coords <- setNames(as.list(as.numeric(test$dims)), names(test$dims)) +test$attrs <- list(Variable = list(varName = 'prlr', + metadata = list(units = "m s-1", + longname = "Total precipitation"))) +class(test) <- 's2dv_cube' ############################################## test_that("1. Initial checks CST_PeriodStandardization", { @@ -130,7 +138,7 @@ test_that("1. Initial checks PeriodStandardization", { expect_warning( PeriodStandardization(data = dat1, ref_period = list(2020, 2021), dates = dates1), - paste0("Parameter 'ref_period' contain years outside the dates. ", + paste0("Parameter 'ref_period' contains years outside the dates. ", "It will not be used.") ) # handle_infinity @@ -152,11 +160,18 @@ test_that("1. Initial checks PeriodStandardization", { test_that("2. Output checks", { # CST_PeriodStandardization + prlr <- CST_PeriodStandardization(data = test) + expect_equal( + names(prlr), + c("data", "dims", "coords", "attrs") + ) + SPEI_s2dv_cube <- CST_PeriodStandardization(data = cube1) expect_equal( names(SPEI_s2dv_cube), - c('data', 'attrs') + c('data') ) + # PeriodStandardization SPEI <- PeriodStandardization(data = dat1) expect_equal( diff --git a/tests/testthat/test-PeriodVariance.R b/tests/testthat/test-PeriodVariance.R index e1de0327cd029d4283e443a5e31780939c50b074..e5c72754a8ad832a5c08939814ead2cb52710a6f 100644 --- a/tests/testthat/test-PeriodVariance.R +++ b/tests/testthat/test-PeriodVariance.R @@ -9,7 +9,7 @@ test_that("1. Sanity Checks", { ) expect_equal( PeriodVariance(array(1:2, c(x = 2)), time_dim = 'x'), - 0.5 + array(0.5, c(x = 1)) ) expect_error( PeriodVariance(data = NULL), @@ -30,14 +30,14 @@ test_that("1. Sanity Checks", { ) expect_equal( PeriodVariance(array(1:10, c(time = 10))), - 9.166667, + array(9.166667, c(time = 1)), tolerance = 0.001 ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) expect_equal( PeriodVariance(data), array(rep(4, 8), - c(sdate = 2, lon = 4)) + c(sdate = 2, lon = 4, time = 1)) ) # Test dates warning expect_warning( @@ -76,7 +76,7 @@ test_that("2. Seasonal", { output$data <- array(c(var(exp$data[1,1,21:82,1]), var(exp$data[1,2,21:82,1]), var(exp$data[1,3,21:82,1]), var(exp$data[1,1,21:82,2]), var(exp$data[1,2,21:82,2]), var(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) + c(memb = 1, sdate = 3, lon = 2, time = 1)) expect_equal( CST_PeriodVariance(exp, start = list(21, 4), end = list(21, 6))$data, output$data @@ -96,7 +96,7 @@ test_that("3. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-TotalSpellTimeExceedingThreshold.R b/tests/testthat/test-TotalSpellTimeExceedingThreshold.R index e65ec1e8523a3be3317378d895dc2643465d8719..d9a226e47e9710c9f4ef57f04023b78c2fe83d1f 100644 --- a/tests/testthat/test-TotalSpellTimeExceedingThreshold.R +++ b/tests/testthat/test-TotalSpellTimeExceedingThreshold.R @@ -156,36 +156,36 @@ test_that("1. Sanity checks", { test_that("2. Output checks", { expect_equal( TotalSpellTimeExceedingThreshold(dat1, thres1, spell = 2, time_dim = 'time'), - array(c(0,rep(2,9)), c(lat = 10)) + array(c(0, rep(2, 9)), c(lat = 10, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat1_2, 10, spell = 2), - array(c(15, 15), c(x = 2)) + array(c(15, 15), c(x = 2, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat1_2,threshold1_2, spell = 2), - array(c(15, 15), c(x = 2, member = 1)) + array(c(15, 15), c(x = 2, member = 1, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat2_1, thres2_1, spell = 10), - array(c(15, 15), c(x = 2)) + array(c(15, 15), c(x = 2, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat2_1, thres2_1, spell = 2, time_dim = 'x'), - array(c(rep(0,5), rep(2,15)), c(time = 20)) + array(c(rep(0, 5), rep(2, 15)), c(time = 20, x = 1)) ) # dimensions expect_equal( dim(TotalSpellTimeExceedingThreshold(dat2_3, thres2_3, spell = 3)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(TotalSpellTimeExceedingThreshold(dat2_3, thres2_4, spell = 3)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(TotalSpellTimeExceedingThreshold(dat2_4, thres2_4, spell = 3, time_dim = 'time')), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) }) @@ -194,28 +194,28 @@ test_that("2. Output checks", { test_that("3. Output checks", { expect_equal( - dim(TotalSpellTimeExceedingThreshold(dat3_1, c(55,58), spell = 3, c('<', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + dim(TotalSpellTimeExceedingThreshold(dat3_1, c(55, 58), spell = 3, c('<', '>'))), + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( - TotalSpellTimeExceedingThreshold(dat3_1, c(30,60), spell = 3, c(">", "<")), - array(c(rep(0,6),rep(5, 5), 4), dim = c(fyear = 3, sdate = 2, lat = 2)) + TotalSpellTimeExceedingThreshold(dat3_1, c(30, 60), spell = 3, c(">", "<")), + array(c(rep(0, 6),rep(5, 5), 4), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( - TotalSpellTimeExceedingThreshold(dat3_1, c(55,58), spell = 3, c(">=", "<=")), - array(c(rep(0,11),3), dim = c(fyear = 3, sdate = 2, lat = 2)) + TotalSpellTimeExceedingThreshold(dat3_1, c(55, 58), spell = 3, c(">=", "<=")), + array(c(rep(0, 11),3), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat3_2, c(46, 35), spell = 3, op = c("<", ">"), time_dim = 'time'), - array(c(0, 3), c(x = 2)) + array(c(0, 3), c(x = 2, time = 1)) ) expect_equal( TotalSpellTimeExceedingThreshold(dat3_2, c(7,11), spell = 3, op = c('>=', '<='), time_dim = 'time'), - array(c(3, 0), c(x = 2)) + array(c(3, 0), c(x = 2, time = 1)) ) expect_equal( dim(TotalSpellTimeExceedingThreshold(dat2_3, list(thres2_4, thres2_4+1), spell = 3, op = c('>=', '<'))), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) }) @@ -226,7 +226,7 @@ test_that("4. Output checks", { expect_equal( dim(TotalSpellTimeExceedingThreshold(dat4, list(thres4_2, thres4_1), spell = 3, c('<=', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( as.vector(TotalSpellTimeExceedingThreshold(dat4, list(thres4_1, thres4_2), spell = 3, c(">", "<="))[1:3]), @@ -254,20 +254,20 @@ test_that("5. Seasonal Forecasts", { exp$data <- exp$data[1,1:3,1:3,,,] res <- CST_TotalSpellTimeExceedingThreshold(exp, threshold = 260, spell = 2, time_dim = 'ftime') expect_equal( - res$data[,,1,1], + res$data[, , 1, 1, 1], array(c(3,3,3,3,3,3,3,3,3), c(member = 3, sdate = 3)) ) # compare with percentile thresholdP <- Threshold(exp$data, threshold = 0.9) WSDI <- CST_TotalSpellTimeExceedingThreshold(exp, threshold = thresholdP, spell = 2, time_dim = 'ftime') expect_equal( - WSDI$data[3,3,3,], + WSDI$data[3, 3, 3, ,1], c(rep(0, 6), rep(2, 2), 0, 2, 0, 2, rep(0, 29), 2, rep(0, 11)) ) thresholdP1 <- thresholdP[1,,] WSDI1 <- CST_TotalSpellTimeExceedingThreshold(exp, threshold = thresholdP1, spell = 2, time_dim = 'ftime') expect_equal( - WSDI1$data[3,3,3,], + WSDI1$data[3, 3, 3, ,1], c(rep(0, 53))) }) @@ -285,7 +285,7 @@ test_that("6. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal( diff --git a/tests/testthat/test-TotalTimeExceedingThreshold.R b/tests/testthat/test-TotalTimeExceedingThreshold.R index cba27ae03b2ddb5be192a701453f7a8e2cf6b336..53c8a84954f34c8bbb4547a8c00e4e0461c4a33a 100644 --- a/tests/testthat/test-TotalTimeExceedingThreshold.R +++ b/tests/testthat/test-TotalTimeExceedingThreshold.R @@ -149,36 +149,36 @@ test_that("1. Sanity checks", { test_that("2. Output checks", { expect_equal( TotalTimeExceedingThreshold(dat1, thres1, time_dim = 'time'), - array(c(0,rep(2,9)), c(lat = 10)) + array(c(0, rep(2, 9)), c(lat = 10, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat1_2, 10), - array(c(15, 15), c(x = 2)) + array(c(15, 15), c(x = 2, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat1_2,threshold1_2), - array(c(15, 15), c(x = 2, member = 1)) + array(c(15, 15), c(x = 2, member = 1, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat2_1, thres2_1), - array(c(15, 15), c(x = 2)) + array(c(15, 15), c(x = 2, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat2_1, thres2_1, time_dim = 'x'), - array(c(rep(0,5), rep(2,15)), c(time = 20)) + array(c(rep(0, 5), rep(2, 15)), c(time = 20, x = 1)) ) # dimensions expect_equal( dim(TotalTimeExceedingThreshold(dat2_3, thres2_3)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(TotalTimeExceedingThreshold(dat2_3, thres2_4)), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) expect_equal( dim(TotalTimeExceedingThreshold(dat2_4, thres2_4, time_dim = 'time')), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) }) @@ -187,27 +187,27 @@ test_that("2. Output checks", { test_that("3. Output checks", { expect_equal( dim(TotalTimeExceedingThreshold(dat3_1, c(55,58), c('<', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( TotalTimeExceedingThreshold(dat3_1, c(30,60), c(">", "<")), - array(c(rep(0, 6), rep(5, 5), 4), dim = c(fyear = 3, sdate = 2, lat = 2)) + array(c(rep(0, 6), rep(5, 5), 4), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat3_1, c(55, 58), c(">=", "<=")), - array(c(rep(0, 10), 1, 3), dim = c(fyear = 3, sdate = 2, lat = 2)) + array(c(rep(0, 10), 1, 3), dim = c(fyear = 3, sdate = 2, lat = 2, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat3_2, c(46, 35), op = c("<", ">"), time_dim = 'time'), - array(c(2, 3), c(x = 2)) + array(c(2, 3), c(x = 2, time = 1)) ) expect_equal( TotalTimeExceedingThreshold(dat3_2, c(7, 11), op = c('>=', '<='), time_dim = 'time'), - array(c(3, 2), c(x = 2)) + array(c(3, 2), c(x = 2, time = 1)) ) expect_equal( dim(TotalTimeExceedingThreshold(dat2_3, list(thres2_4, thres2_4+1), op = c('>=', '<'))), - c(sdate = 2, lat = 2) + c(sdate = 2, lat = 2, time = 1) ) }) @@ -216,7 +216,7 @@ test_that("3. Output checks", { test_that("4. Output checks", { expect_equal( dim(TotalTimeExceedingThreshold(dat4, list(thres4_2, thres4_1), c('<=', '>'))), - c(fyear = 3, sdate = 2, lat = 2) + c(fyear = 3, sdate = 2, lat = 2, time = 1) ) expect_equal( as.vector(TotalTimeExceedingThreshold(dat4, list(thres4_1, thres4_2), c(">", "<="))[1:3]), @@ -245,7 +245,7 @@ test_that("5. Seasonal forecasts", { exp$data <- exp$data[1, 1:3, , , , ] - 247 SU35_NoP <- CST_TotalTimeExceedingThreshold(exp, threshold = 35, time_dim = 'ftime')$data expect_equal( - SU35_NoP[1, , 15, 3], + SU35_NoP[1, , 15, 3, 1], c(0, 1, 1, 1, 0, 0) ) # convert to percentile @@ -255,7 +255,7 @@ test_that("5. Seasonal forecasts", { data$data <- exp_percentile SU35_P <- CST_TotalTimeExceedingThreshold(data, threshold = obs_percentile, time_dim = 'ftime')$data expect_equal( - SU35_P[2, , 5, 5], + SU35_P[2, , 5, 5, 1], c(3, 3, 3, 3, 3, 3) ) }) @@ -274,7 +274,7 @@ test_that("6. Subset Dates and check time_bounds", { # Check Dates expect_equal( dim(res$data)['sdate'], - dim(res$attrs$Dates) + dim(res$attrs$Dates)['sdate'] ) # Check time_bounds expect_equal(