diff --git a/.Rbuildignore b/.Rbuildignore index 0954c2a0092c62f36e7508d2d58568d39b20cdf1..dde50c4ed94c4e248b7326787171e0c149ff9d42 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -1,6 +1,9 @@ -.git -.gitignore -.tar.gz -.pdf -./.nc +.*\.git$ +.*\.gitignore$ +.*\.tar.gz$ +.*\.pdf$ +./.nc$ +.*\.gitlab-ci.yml$ +# Ignore tests when submitting to CRAN +^tests$ diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..049daf74c688c9cf187f54b92f053aaa4b415589 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,10 @@ +stages: + - build +build: + stage: build + script: + - module load R/4.1.2-foss-2015a-bare + - module load CDO/1.9.8-foss-2015a + - R CMD build --resave-data . + - R CMD check --as-cran --no-manual --run-donttest ClimProjDiags_*.tar.gz + - R -e 'covr::package_coverage()' diff --git a/DESCRIPTION b/DESCRIPTION index cbcbdb22375b4d81c1b5604a71730bb9ea718172..26f5f0e8402c732ce51e50731969413c628e89e5 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,13 +1,14 @@ Package: ClimProjDiags Title: Set of Tools to Compute Various Climate Indices -Version: 0.1.3 +Version: 0.2.0 Authors@R: c( person("BSC-CNS", role = c("aut", "cph")), - person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-8568-3071")), - person("An-Chi", "Ho", , "an.ho@bsc.es", role = "ctb"), + person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut"), comment = c(ORCID = "0000-0001-8568-3071")), + person("An-Chi", "Ho", , "an.ho@bsc.es", role = c("cre", "ctb")), person("Nicolau", "Manubens", , "nicolau.manubens@bsc.es", role = "ctb"), person("Alasdair", "Hunter", , "alasdair.hunter@bsc.es", role = "aut"), - person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "ctb")) + person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "ctb"), + person("Eva", "Rifà", , "eva.rifarovira@bsc.es", role = "ctb")) Description: Set of tools to compute metrics and indices for climate analysis. The package provides functions to compute extreme indices, evaluate the agreement between models and combine theses models into an ensemble. Multi-model @@ -23,14 +24,14 @@ Imports: plyr, climdex.pcic, stats -License: Apache License 2.0 -URL: https://earth.bsc.es/gitlab/es/ClimProjDiags -BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues -Encoding: UTF-8 -RoxygenNote: 5.0.0 Suggests: knitr, testthat, markdown, rmarkdown +License: Apache License 2.0 +URL: https://earth.bsc.es/gitlab/es/ClimProjDiags +BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues +Encoding: UTF-8 +RoxygenNote: 7.2.0 VignetteBuilder: knitr diff --git a/NAMESPACE b/NAMESPACE index 2a5d24344ef87c7cf0e3079730d01696e007d82d..e3226a5a05d98a2802001d21c5e6708f9d79b700 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -11,9 +11,11 @@ export(Extremes) export(Lon2Index) export(SeasonSelect) export(SelBox) +export(ShiftLon) export(Subset) export(Threshold) export(WaveDuration) +export(WeightedCells) export(WeightedMean) import(PCICt) import(climdex.pcic) diff --git a/NEWS.md b/NEWS.md new file mode 100644 index 0000000000000000000000000000000000000000..461ff84550e8216eefd6508bc2c6e6c3931eaf4a --- /dev/null +++ b/NEWS.md @@ -0,0 +1,8 @@ +# 0.2.0 (Release date: 2022-11-04) +- New functions: ShiftLon, WeightedCells +- Bugfix of Subset() when only one dimension left after subsetting and when +parameter "indices" is not a list. +- Bugfix of WeightedMean() at the grid points that are across positive and +negative longitudes. + + diff --git a/R/Extremes.R b/R/Extremes.R index f3e7b16b7e187e6cb20c705bd37b1901bf25c57e..facb6cf4a568356588948eacf246a706ec4dc446 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -25,19 +25,19 @@ #'@import climdex.pcic #'@examples #'##Example synthetic data: -#'data <- 1:(2 * 3 * 372 * 1) -#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 310 * 1) +#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) -#'threshold = rep(40, 31) #'attr(time, "variables") <- metadata #'attr(data, 'Variables')$dat1$time <- time +#'threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +#'res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, +#' max.missing.days = 5, ncores = NULL) +#'str(res) #' -#'a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, -#' max.missing.days = 5, ncores = NULL) -#'str(a) #'@export Extremes <- function(data, threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, timedim = NULL, calendar = NULL, ncores = NULL) { diff --git a/R/ShiftLon.R b/R/ShiftLon.R new file mode 100644 index 0000000000000000000000000000000000000000..88b034d929f246dc677ab80af4fac430d80e527b --- /dev/null +++ b/R/ShiftLon.R @@ -0,0 +1,136 @@ +#'Shift longitudes of a data array +#' +#'@description Shift the longitudes of a data array. Only reasonable for global +#' longitude shifting. It is useful for map plotting or aligning datasets. +#' +#'@param data A named multidimensional array with at least 'lon_dim' dimension. +#'@param lon A numeric vector of longitudes. The values are expected to be +#' monotonic increasing. +#'@param westB A number indicating the west boundary of the new longitudes. +#'@param lon_dim A character string indicating the name of the longitude +#' dimension in 'data'. The default value is 'lon'. +#'@param ncores An integer indicating the number of cores used for computation. +#' The default value is NULL (use only one core). +#' +#'@return +#'A list of 2: +#'\item{data}{ +#' Array of the shifted data with the same dimensions as parameter 'data'. +#'} +#'\item{lon}{ +#' The monotonic increasing new longitudes with the same length as parameter +#' 'lon' and start at 'westB'. +#'} +#' +#'@examples +#'data <- array(data = 1:50, dim = c(lon = 360, lat = 181)) +#'lon <- array(data = 0:359, dim = c(lon = 360)) +#'lat <- -90:90 ## lat does not change +#'shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) +#' +#' \dontrun{ +#'s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) +#'s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) +#' } +#' +#'@import multiApply +#'@export +ShiftLon <- function(data, lon, westB, lon_dim = 'lon', ncores = NULL) { + + # Check inputs + ## data + if (is.null(data)) { + stop("Parameter 'data' cannot be NULL.") + } + if (!is.array(data) | !is.numeric(data)) { + stop("Parameter 'data' must be a numeric array.") + } + if(any(is.null(names(dim(data))))| any(nchar(names(dim(data))) == 0)) { + stop("Parameter 'data' must have dimension names.") + } + ## lon_dim + if (!is.character(lon_dim) | length(lon_dim) > 1) { + stop("Parameter 'lon_dim' must be a character string.") + } + if (!(lon_dim %in% names(dim(data)))) { + stop("Parameter 'lon_dim' is not found in 'data' dimensions.") + } + ## lon + if (!is.numeric(lon)) { + stop("Parameter 'lon' must be numeric.") + } + if (!(length(lon) == as.numeric(dim(data)[lon_dim]))) { + stop("The length of 'lon' must be the same as the length of 'lon_dim' in 'data'.") + } + if (any(diff(lon) < 0)) { + stop("Parameter 'lon' must be monotonic increasing.") + } + ## westB + if (!is.numeric(westB)) { + stop("Parameter 'westB' must be numeric.") + } + if (length(westB) != 1) { + westB <- westB[1] + warning("Parameter 'westB' should be a number. Use the first element only.") + } + ## ncores + if (!is.null(ncores)) { + if (!is.numeric(ncores)) { + stop("Parameter 'ncores' must be a positive integer.") + } else if (any(ncores %% 1 != 0) | any(ncores <= 0) | length(ncores) > 1) { + stop("Parameter 'ncores' must be a positive integer.") + } + } + + ############################################# + + # Shifting the longitudes (only needed to do it once) + ## Adjust western boundary of lon vector if necessary + ## If westB is not within lon, adjust it to find 'first' below + shft_westB_back <- 0 + if (westB < min(lon)) { + westB <- westB + 360 + shft_westB_back <- -360 + } else if (westB > max(lon)) { + westB <- westB - 360 + shft_westB_back <- 360 + } + if (westB < min(lon) | westB > (min(lon) + 360)) { + stop("westB is too far away from the 'lon' range.") + } + + ## Find closest index in lon vector + first <- which.min(abs(westB - lon))[1] + if (first == 1) { + new.lon <- lon + } else { + new.lon <- c(lon[first:length(lon)],lon[1:(first-1)]) + } + ## Order to monotonically increasing + if (!all(diff(new.lon) > 0)) { + new.lon[(which(diff(new.lon) < 0) + 1):length(new.lon)] <- new.lon[(which(diff(new.lon) < 0) + 1):length(new.lon)] + 360 + + } + + # Shifting the data + output <- Apply(data = data, + target_dims = lon_dim, + fun = .ShiftLon, + lon = lon, + new.lon = new.lon, + ncores = ncores)$output1 + + # Shift new.lon back to start at westB + new.lon <- new.lon + shft_westB_back + + return(list(data = output, lon = new.lon)) +} + +.ShiftLon <- function(data, lon, new.lon) { + # data: [lon] + new.data <- NA * data + new.data[new.lon %in% lon] <- data[lon %in% new.lon, drop = F] + new.data[!new.lon %in% lon] <- data[!lon %in% new.lon, drop = F] + + return(new.data) +} diff --git a/R/Subset.R b/R/Subset.R index bad89e7b4a200e2e1591155141a177f629dd6b1a..6c2321519dc55a32aa8226cd136c90bc2b7b3667 100644 --- a/R/Subset.R +++ b/R/Subset.R @@ -1,22 +1,44 @@ #'Subset a Data Array #' -#'This function allows to subset (i.e. slice, take a chunk of) an array, in a similar way as done in the function \code{take()} in the package plyr. There are two main inprovements:\cr\cr The input array can have dimension names, either in \code{names(dim(x))} or in the attribute 'dimensions', and the dimensions to subset along can be specified via the parameter \code{along} either with integer indices or either by their name.\cr\cr There are additional ways to adjust which dimensions are dropped in the resulting array: either to drop all, to drop none, to drop only the ones that have been sliced or to drop only the ones that have not been sliced.\cr\cr If an array is provided without dimension names, dimension names taken from the parameter \code{dim_names} will be added to the array. -#'This function computes the threshold based on a quantile value for each day of the year of the daily data input. +#'This function allows to subset (i.e. slice, take a chunk of) an array, in a +#'similar way as done in the function \code{take()} in the package plyr. There +#'are two main snprovements:\cr\cr First, the input array can have dimension +#'names, either in \code{names(dim(x))} or in the attribute 'dimensions'. If +#'both exist, the attribute 'dimensions' is prioritized. The dimensions to +#'subset along can be specified via the parameter \code{along} either with +#'integer indices or either by their name.\cr\cr Second, there are additional +#'ways to adjust which dimensions are dropped in the resulting array: either to +#'drop all, to drop none, to drop only the ones that have been sliced or to drop +#'only the ones that have not been sliced.\cr\cr #' -#'@param x A multidimensional array to be sliced. It can have dimension names either in \code{names(dim(x))} or either in the attribute 'dimensions'. -#'@param along Vector with references to the dimensions to take the subset from: either integers or dimension names. -#'@param indices List of indices to take from each dimension specified in 'along'. If a single dimension is specified in 'along' the indices can be directly provided as a single integer or as a vector. -#'@param drop Whether to drop all the dimensions of length 1 in the resulting array, none, only those that are specified in 'along', or only those that are not specified in 'along'. The possible values are, respectively: 'all' or TRUE, 'none' or FALSE, 'selected', and 'non-selected'. +#'@param x A named multidimensional array to be sliced. It can have dimension +#' names either in \code{names(dim(x))} or in the attribute 'dimensions'. +#'@param along A vector with references to the dimensions to take the subset +#' from: either integers or dimension names. +#'@param indices A list of indices to take from each dimension specified in +#' 'along'. If a single dimension is specified in 'along', it can be directly +#' provided as an integer or a vector. +#'@param drop Whether to drop all the dimensions of length 1 in the resulting +#' array, none, only those that are specified in 'along', or only those that +#' are not specified in 'along'. The possible values are: 'all' or TRUE, 'none' +#' or FALSE, 'selected', and 'non-selected'. The default value is FALSE. #' -#'@return An array with similar dimensions as the \code{x} input, but with trimmed or dropped dimensions. +#'@return An array with similar dimensions as the \code{x} input, but with +#' trimmed or dropped dimensions. #' #'@examples -#'##Example synthetic data: +#'#Example synthetic data: +#'# Dimension has name already #'data <- 1:(2 * 3 * 372 * 1) #'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) #'data_subset <- Subset(data, c('time', 'model'), #' list(1:10, TRUE), drop = 'selected') #'dim(data_subset) +#'# Use attributes 'dimensions' +#'data <- array(1:(2 * 3 * 372 * 1), dim = c(2, 3, 372, 1)) +#'attributes(data)[['dimensions']] <- c('lat', 'lon', 'time', 'model') +#'data_subset <- Subset(data, c('lon', 'lat'), list(1, 1), drop = TRUE) +#'dim(data_subset) #' #'@export Subset <- function(x, along, indices, drop = FALSE) { @@ -29,7 +51,10 @@ Subset <- function(x, along, indices, drop = FALSE) { dim_names <- attr(x, 'dimensions') if (!is.character(dim_names)) { dim_names <- names(dim(x)) + } else { + names(dim(x)) <- dim_names } + if (!is.character(dim_names)) { if (any(sapply(along, is.character))) { stop("The input array 'x' doesn't have labels for the dimensions but the parameter 'along' contains dimension names.") @@ -37,7 +62,8 @@ Subset <- function(x, along, indices, drop = FALSE) { } # Check along - if (any(sapply(along, function(x) !is.numeric(x) && !is.character(x)))) { + if (any(sapply(along, function(x) !is.numeric(x) && !is.character(x))) | + length(along) == 0) { stop("All provided dimension indices in 'along' must be integers or character strings.") } if (any(sapply(along, is.character))) { @@ -54,7 +80,14 @@ Subset <- function(x, along, indices, drop = FALSE) { # Check indices if (!is.list(indices)) { - indices <- list(indices) + if (length(along) == 1) { + indices <- list(indices) + } else { + stop("Parameter 'indices' should be a list.") + } + } + if (length(indices) != length(along)) { + stop("Parameter 'along' and 'indices' should have the same length.") } # Check parameter drop @@ -90,25 +123,42 @@ Subset <- function(x, along, indices, drop = FALSE) { if (length(dim_names_to_remove) > 0) { dim_names <- dim_names[-dim_names_to_remove] } + # If there is one dim left, subset won't have dimension (but it should have one). Add it back + if (is.null(dim(subset))) { + if (!identical(dim_names, character(0))) { + # If there is one dim left, subset won't have dimension (but it should + # have one). Add it back. + dim(subset) <- dim(x)[dim_names] + } else { # a number left + dim(subset) <- 1 + } + } } # Amend the final dimensions and put dimnames and attributes metadata <- attributes(x) metadata[['dim']] <- dim(subset) if (length(dims_to_drop) > 0) { - metadata[['dim']] <- metadata[['dim']][-dims_to_drop] + if (length(dims_to_drop) == length(metadata[['dim']])) { # a number left + metadata[['dim']] <- 1 + } else { + metadata[['dim']] <- metadata[['dim']][-dims_to_drop] + } if (is.character(dim_names)) { - names(metadata[['dim']]) <- dim_names[-dims_to_drop] + if (!identical(dim_names[-dims_to_drop], character(0))) { + names(metadata[['dim']]) <- dim_names[-dims_to_drop] + } if ('dimensions' %in% names(attributes(x))) { metadata[['dimensions']] <- dim_names[-dims_to_drop] } } - } else if (is.character(dim_names)) { + } else if (is.character(dim_names) & !identical(dim_names, character(0))) { names(metadata[['dim']]) <- dim_names if ('dimensions' %in% names(attributes(x))) { metadata[['dimensions']] <- dim_names } } + attributes(subset) <- metadata subset } diff --git a/R/Threshold.R b/R/Threshold.R index 181a5f04fd0a9c2385f8d3069de0e0a16babf6d8..c6f4298742096dc2fb31da533de2470b208ca9c4 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -28,6 +28,7 @@ #' #'a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) #'str(a) +#' #'@export Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL, na.rm = FALSE) { if (is.null(data)) { @@ -102,7 +103,7 @@ Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qt "element will be used.") } dates <- as.PCICt(dates, cal = calendar) - dates = as.character(dates) + dates <- as.POSIXlt(as.character(dates), format = "%Y-%m-%d") jdays <- as.numeric(strftime(dates, format = "%j")) if (calendar == "gregorian" | calendar == "standard" | calendar == "proleptic_gregorian") { year <- as.numeric(strftime(dates, format = "%Y")) diff --git a/R/WeightedCells.R b/R/WeightedCells.R new file mode 100644 index 0000000000000000000000000000000000000000..884870fa21247d66169666bc3491de63fccc1ff9 --- /dev/null +++ b/R/WeightedCells.R @@ -0,0 +1,103 @@ +#'Compute the square-root of the cosine of the latitude weighting on the given +#'array. +#' +#'This function performs square-root of the cosine of the latitude weighting on +#'the given array. +#' +#'@param data A numeric array with named dimensions, representing the data to be +#' applied the weights. It should have at least the latitude dimension and it +#' can have more other dimensions. +#'@param lat A numeric vector or array with one dimension containing the +#' latitudes (in degrees). +#'@param lat_dim A character string indicating the name of the latitudinal +#' dimension. The default value is 'lat'. +#'@param method A character string indicating the type of weighting applied: +#' 'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the +#' cosine of the latitude). The default value is 'cos'. +#'@param ncores An integer indicating the number of cores to use for parallel +#' computation. The default value is NULL. +#' +#'@return An array containing the latitude weighted data with same dimensions as +#'parameter 'data'. +#' +#'@examples +#'exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) +#'lat <- c(10, 15, 20) +#'res <- WeightedCells(data = exp, lat = lat) +#'@import multiApply +#'@export +WeightedCells <- function(data, lat, lat_dim = 'lat', method = 'cos', ncores = NULL) { + + # Check inputs + ## data + if (is.null(data)) { + stop("Parameter 'data' cannot be NULL.") + } + if (!is.numeric(data)) { + stop("Parameter 'data' must be a numeric array.") + } + if (is.null(dim(data))) { + stop("Parameter 'data' must be at least latitude dimension.") + } + if(any(is.null(names(dim(data)))) | any(nchar(names(dim(data))) == 0)) { + stop("Parameter 'data' must have dimension names.") + } + + ## lat_dim + if (!is.character(lat_dim) | length(lat_dim) > 1) { + stop("Parameter 'lat_dim' must be a character string.") + } + if (!lat_dim %in% names(dim(data))) { + stop("Parameter 'lat_dim' is not found in 'data'.") + } + + ## lat + if (is.null(lat)) { + stop("Parameter 'lat' cannot be NULL.") + } + if (!is.numeric(lat)) { + stop("Parameter 'lat' must be a numeric vector or array.") + } + if (dim(data)[lat_dim] != length(lat)) { + stop("Length of parameter 'lat' doesn't match the length of ", + "latitudinal dimension in parameter 'data'.") + } + + ## method + if (!method %in% c('cos', 'sqrtcos')) { + stop("Parameter 'method' must be 'cos' or 'sqrtcos'.") + } + + ## ncores + if (!is.null(ncores)) { + if (!is.numeric(ncores) | ncores %% 1 != 0 | ncores <= 0 | + length(ncores) > 1) { + stop("Parameter 'ncores' must be either NULL or a positive integer.") + } + } + + namedims <- names(dim(data)) + lat <- as.vector(lat) + + if (method == 'cos') { + wt <- cos(lat * pi / 180) + } else { + wt <- sqrt(cos(lat * pi / 180)) + } + + res <- Apply(data = data, + target_dims = c(lat_dim), + fun = .WeightedCells, + wt = wt, + ncores = ncores)$output1 + + order <- match(namedims, names(dim(res))) + res <- aperm(res, order) + + return(res) +} + +.WeightedCells <- function(data, wt) { + data <- wt * data + return(data) +} diff --git a/R/WeightedMean.R b/R/WeightedMean.R index 81077ca4b92ddbf5b5cb5d1520cc12063da69cd1..393f56f680ffba9c459d69884043ed55f79a036b 100644 --- a/R/WeightedMean.R +++ b/R/WeightedMean.R @@ -124,11 +124,10 @@ WeightedMean <- function(data, lon, lat, region = NULL, mask = NULL, londim = NU cosphi <- t(array(cos(lat * pi / 180), dim = c(length(lat), length(lon)))) nblat <- length(lat) nblon <- length(lon) - lon[lon > 180] = lon[lon > 180] - 360 - dlon <- abs(c(abs(lon[2 : nblon]) - abs(lon[1 : nblon - 1]))) * pi / 180 + dlon <- diff(lon) * pi / 180 dlon <- c(dlon, dlon[1]) dlon <- array(dlon, dim = c(nblon, nblat)) - dlat <- abs(c(lat[2 : nblat] - lat[1 : nblat - 1])) * pi / 180 + dlat <- diff(lat) * pi / 180 dlat <- c(dlat, dlat[1]) dlat <- t(array(dlat, dim = c(nblat, nblon))) weight <- (dlon * dlat * cosphi) diff --git a/inst/doc/anomaly_agreement.html b/inst/doc/anomaly_agreement.html deleted file mode 100644 index a23f4d8ba46b0eb2c2d4bb04304c939ace0fb4ce..0000000000000000000000000000000000000000 --- a/inst/doc/anomaly_agreement.html +++ /dev/null @@ -1,474 +0,0 @@ - - - - - -Multi-model agreement - - - - - - - - - - - - - - - - - - -

Multi-model agreement

- -

Multi-model agreement performs a comparison of climate model projections anomalies. This vignette illustrates step-by-step how to perform a multi-model agreement assessment using ClimProjDiags package functionalities. The following example simulates the case of summer projections temperature anomalies for different models.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R, onces it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(ggplot2)
-
- -

2- Define the problem and the correspondent data and parameters

- -

The aim is to know, compared with a reference period:

- - - -

The ilustrative problem is to compare the monthly mean air temperature at 2 m in summer between four different models. The reference period used from the historical simulations to perform the anomalies is 1961 - 1990. While, the future scenario chosen is the rcp2.6 during the period 2006 - 2100. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

The parameters are defined by running the next lines in R:

- -
var <- 'tas'
-
-start_climatology <- '1961'
-end_climatology <- '1990'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is considered by subtracting randomly a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
multimodel_historical <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 360)) + rnorm(360)
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(360, sd = 5) * cos(2 * pi / 12 * (1 : 360)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(360, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_historical <- abind(multimodel_historical, gridpoint, along = 4)
-}
-multimodel_historical <- InsertDim(multimodel_historical, posdim = 5, lendim = 1)
-multimodel_historical <- aperm(multimodel_historical, c(4, 5, 1, 2, 3))
-names(dim(multimodel_historical)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(1961, 1, 15), ISOdate(1990, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added in order to make the data partially more realistic.

- -
multimodel_projection <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 1140)) + rnorm(1140) + 
-                 (1 : 1140) * rnorm(1, mean = 1.5) / 1140
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(1140, sd = 5) * cos(2 * pi / 12 * (1 : 1140)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(1140, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_projection <- abind(multimodel_projection, gridpoint, along = 4)
-}
-multimodel_projection <- InsertDim(multimodel_projection, posdim = 5, lendim = 1)
-multimodel_projection <- aperm(multimodel_projection, c(4, 5, 1, 2, 3))
-names(dim(multimodel_projection)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(2006, 1, 15), ISOdate(2100, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_projection, 'Variables')$dat1$time <- time
-
- -

Now, two objects called multimodel_historical and multimodel_projection are available in the R environment. A check can be done to the loaded data by comparing with the next lines (due to the random functions the results may differ between each execution):

- -
> dim(multimodel_historical)
-model   var  time   lon   lat 
-    4     1   360    12    8 
-
-> summary(multimodel_historical)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-   251.2   281.7   287.9   287.8   294.0   321.6 
-
-> dim(multimodel_projection)
-model   var  time   lon   lat 
-    4     1  1140    12     8 
-
-> summary(multimodel_projection)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-  254.8   282.8   288.8   288.8   294.9   322.6 
-
- -

3- Multi-model agreement based on seasonal analysis

- -

The multi-model agreement is a comparison based on seasonal anomalies, which are computed by following the next steps:

- - - -
summer_historical <- SeasonSelect(multimodel_historical, season = 'JJA')
-
- -

The new subsets are lists of two elements. The first element is the selected data and the second element contains the corresponding dates.

- -
> str(summer_historical)
-List of 2
- $ data : num [1:4, 1:90, 1:12, 1:8] 314 293 305 300 300 ...
- $ dates: chr [1:90] "1961-06-15 12:00:00" "1961-07-15 12:00:00" ...
-
-> dim(summer_historical$data)
-model  time   lon   lat 
-    4    90    12     8 
-
- - - -
climatology <- Mean1Dim(summer_historical$data, posdim = 2) 
-
- - - -
summer_projection <- Season(multimodel_projection, posdim = 3, 
-                            monini = 1, moninf = 6, monsup = 8)
-
- -

By running the next lines, it is possible to check the dimensions of the data:

- -
> dim(climatology)
-model   lon   lat 
-    4    12     8 
-> dim(summer_projection)
-model   var  time   lon   lat 
-    4     1   95    12     8 
-
- - - -
climatology <- InsertDim(InsertDim(climatology, posdim = 2, lendim = 1), 
-                         posdim = 3, lendim = 95)
-
- - - -
anomaly <- summer_projection - climatology
-
- -

4- Multi-model agreement spatial visualitzation

- -

In order to obtain a spatial visualitzation, the temporal mean is computed. So, the time average anomalies for all models is saved in the average object. AnoAgree() function from ClimProjDiags package calculates the percentages of models which agrees with a positive or negative mean in each grid point.

- -
average <- Mean1Dim(anomaly,  which(names(dim(anomaly)) == "time"))
-agreement <- AnoAgree(average, membersdim = which(names(dim(average)) == "model"))
-
- -

So, in a case when four models are being compared, the agreement object can take the following values: 100 (all models agree), 75 (only one model has opposite sign), 50 (only two models agree with the mean signal) and 25 (one model agrees with the sign of the mean signal because its magnitude is higher that the other three models). These values will change with the number of compared models.

- -

The next question will be answered by the example plot: Where do 80 % or more models agree in the signal? To obtain this plot, the next lines should be run in R. Notice you can modify the threshold by modifying the parameter agreement_threshold. The colour map shows the mean temperature anomaly and the dots the model agreement. The plot will be saved with the name “SpatialSummerAgreement.png”.

- -
agreement_threshold <- 80
-colorbar_lim <- ceiling(max(abs(max(average)), abs(min(average))))
-brks <- seq(-colorbar_lim, colorbar_lim, length.out = 11)
-
-PlotEquiMap(drop(Mean1Dim(average, which(names(dim(average)) == "model"))), 
-            lat = lat, lon = lon, units = "K", brks = brks, 
-            toptitle = paste(var, "- climatology:", start_climatology, "to", 
-                             end_climatology, "and future simulation:", 
-                             start_projection, "to", end_projection), 
-            filled.continents = FALSE, title_scale = 0.6,
-            dots = drop(agreement) >= agreement_threshold, 
-            fileout = "SpatialSummerAgreement.png")
-
- -

Multimodel Agreement for temperature in summer

- -

5- Multi-model agreement temporal visualization

- -

To visualize the time evolution of multi-model agreement, the spatial average is performed by a grid pixel size using the WeightedMean function from the ClimProjDiags package. Also, a smooth filter is applied with the Smoothing() function from the s2dverifiction package. In this example, a 5-year moving window filter is applied by defining the parameter runmeanlen = 5.

- -
temporal <- drop(WeightedMean(anomaly, lon = lon, lat = lat, mask = NULL))
-temporal <- Smoothing(temporal, runmeanlen = 5, numdimt = 2)
-
- -

Before visualizing, a data frame with the proper format is created.

- -
data_frame <- as.data.frame.table(t(temporal))
-years <- rep(start_projection : end_projection, 4)
-data_frame$Year <- c(years)
-names(data_frame)[2] <- "Model"
-for (i in 1 : length(levels(data_frame$Model))) {
-  levels(data_frame$Model)[i] <- paste0("model", i)
-}
-
- -

A new png file will be saved in the working directory with the name “TemporalSummerAgreement.png”.

- -
g <- ggplot(data_frame, aes(x = Year, y = Freq)) + theme_bw() + 
-            ylab("tas") +  xlab("Year") + theme(text=element_text(size = 12),
-            legend.text=element_text(size = 12), 
-            axis.title=element_text(size = 12)) +
-            stat_summary(data =  data_frame, fun.y= "mean", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3]), 
-                                      color = data_frame$Model),
-                         geom = "line", size = 0.8) + 
-            stat_summary(data =  data_frame, geom = "ribbon", 
-                         fun.ymin = "min", fun.ymax = "max", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3])), 
-                         alpha = 0.3, color = "red", fill = "red") +
-           ggtitle("Temporal Summer Agreement")
-ggsave(filename = "TemporalSummerAgreement.png", g, device = NULL, width = 8, 
-       height = 5, units = 'in', dpi = 100) 
-
- -

Note: if a warning appears when plotting the temporal time series, it might be due to the NA's values introduced when smoothing the time series.

- -

Temporal evolution of the mean summer agreement of air temperature

- - - - diff --git a/inst/doc/diurnaltemp.html b/inst/doc/diurnaltemp.html deleted file mode 100644 index 8ae3e7932cedf70a9b2197420f2bf8bf74e591f6..0000000000000000000000000000000000000000 --- a/inst/doc/diurnaltemp.html +++ /dev/null @@ -1,417 +0,0 @@ - - - - - -Diurnal Temperature Variation (DTR) Indicator - - - - - - - - - - - - - - - - - - -

Diurnal Temperature Variation (DTR) Indicator

- -

The diurnal temperature variation indicator is a proxy for energy demand. The diurnal temperature indicator is the number of days in a season when the daily temperature variation (tasmax - tasmin) exceeds the vulnerability threshold. Here, the vulnerability threshold is based on the mean daily temperature variation for a reference period plus 5 degrees.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R, once it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(s2dverification)
-library(abind)
-library(parallel)
-
- -

2- Problem, parameters and data definition

- -

To ilustrate the problem, daily maximum or minimum air temperature at 2 m for the reference period (1971 - 2000) and the future scenario rcp2.6 (2006 - 2100) in the northern hemisphere (between -40 - 20 ºE and 25 - 60 ºN) will be created artificially.

- -

A grid of 5 degrees is defined by running the next lines in R:

- -
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

The synthetic sample of maximum and minimum temperature for the historical period can be obtained by running the following lines. The maximum temperature data is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is computed by randomly subtracting a value proportional to the latitude. The minimum temperature is built by subtracting a random value (with anual cycle included) to the synthetic maximum temperature. Furthermore, attributes of time and dimensions are added to both samples.

- -
tmax_historical <- NULL
-tmin_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 10958)) - abs(rnorm(10958))
-
-
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(10958, mean = j * 0.001,
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(10958, mean = j * 0.75, sd = 0.5))})
-  tmax_historical <- abind(tmax_historical, gridnew1, along = 3)
-  tmin_historical <- abind(tmin_historical, gridnew2, along = 3)
-}
-
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_historical <- InsertDim(InsertDim(tmin_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-attr(tmin_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is done to build the synthetic data for the future projections. However, a trend is added.

- -
tmax_projection <- NULL
-tmin_projection <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698 
-
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 34698)) - abs(rnorm(34698)) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 40000
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(34698, mean = j * 0.01, 
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(34698, mean = j * 0.75, sd = 0.5))})
-  tmax_projection <- abind(tmax_projection, gridnew1, along = 3)
-  tmin_projection <- abind(tmin_projection, gridnew2, along = 3)
-}
-
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_projection <- InsertDim(InsertDim(tmin_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-attr(tmin_projection, 'Variables')$dat1$time <- time
-
- -

3- Reference diurnal temperature variation

- -

The function DTRRef, from the ClimProjDiags package, computes the mean between maximum and minimum temperature for each season during the selected reference period:

- -
dtr_reference <- DTRRef(tmax = tmax_historical, tmin = tmin_historical, by.seasons = TRUE, ncores = NULL)
-
- -

The label dtr_reference contains the output for each season and gridpoint:

- -
> str(dtr_reference)
-List of 2
- $ dtr.ref: num [1:4, 1, 1, 1:12, 1:8] 5.09 2.9 5.04 2.91 5.09 ...
- $ season : chr [1:4] "DJF" "MAM" "JJA" "SON"
-
- -

4- Computing the diurnal temperature variation indicator

- -

The diurnal temperature variation indicator is computed with the function DTRIndicator indicating the maximum and minimum temperature for the future projection and the reference temperature variation:

- -
dtr_indicator <- DTRIndicator(tmax = tmax_projection, tmin = tmin_projection, 
-                              ref = dtr_reference, 
-                              by.seasons = TRUE, ncores = NULL)
-
- -

The function returns a list of three elements, the label indicator being the desired output:

- -
> str(dtr_indicator)
-List of 3
- $ indicator: int [1:96, 1:4, 1, 1, 1:12, 1:8] 0 1 0 0 0 2 0 1 1 0 ...
- $ year     : chr [1:96] "2006" "2007" "2008" "2009" ...
- $ season   : chr [1:4] "DJF" "JJA" "MAM" "SON"
-
- -

Note: the total number of years in dtr_indicator$year (96) is greater than the period examined (in this case 95 years of future projection). This is because the last december of the time series belongs to the subsequent winter (in this example 2101 winter).

- -

5- Visualizing the diurnal temperature variation indicator

- -

A four panel plot can be generated to visualize the seasonal indicator by running the following lines:

- -
dtr_rcp <- array(dim = c(length(lon), length(lat), 4))             
-for (j in 1:4){
-              dtr_rcp[,,j] <- Mean1Dim(dtr_indicator$indicator[,j,,,,], 1)
-}            
-breaks <- 0 : (max(dtr_rcp) + 1)         
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_rcp, 
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "DTR", filled.continents = FALSE, 
-           units = "Days", title_scale = 0.5, axelab = FALSE, 
-           draw_separators = TRUE, subsampleg = 1, 
-           brks = breaks, color_fun = clim.palette("yellowred"),
-           bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialDTR.png")
-
- -

Diuranl Temperature Range Indicator

- -

Furthermore, the future diurnal temperature variation can be compared with the one observed during the reference period. So, the diurnal temperature variation indicator is computed for the reference period by running:

- -
dtr_indicator_reference <- DTRIndicator(tmax = tmax_historical, 
-                                        tmin = tmin_historical, 
-                                        ref = dtr_reference, by.seasons = TRUE, 
-                                        ncores = NULL)
-
- -

The comparison between the reference and the future projection diurnal temperature variation indicator can be computed With a simple subtraction. To visualize the result, PlotLayout function will be applied again. The resulting plot will be saved in the working directory.

- -
dtr_diff <- array(dim=c(length(lat), length(lon), 4))
-for (i in 1:4){
-  dtr_diff[,,i] <- Mean1Dim(dtr_indicator$indicator[,i,1,1,,], 1) - 
-                   Mean1Dim(dtr_indicator_reference$indicator[,i,1,1,,], 1)
-}
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_diff,
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "Change in DTR indicator",
-           filled.continents = FALSE, units = "Days",
-           axelab = FALSE, draw_separators = TRUE, subsampleg = 1, 
-           brks = -2 : 2, bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialComparisonDTR.png")
-
- -

Spatial Comparison of DTR

- -

Note: the outputs of this example, including the figures, are artificial.

- - - - diff --git a/inst/doc/extreme_indices.html b/inst/doc/extreme_indices.html deleted file mode 100644 index 2d064b97d9bcd51b49abb619d177ef48c096bb8f..0000000000000000000000000000000000000000 --- a/inst/doc/extreme_indices.html +++ /dev/null @@ -1,348 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Extreme Indices

-

The extreme indices are an ensemble of indices based on the Expert Team on Climate Change Detection Indices (ETCCDI). There are currently 5 available indices to be computed: extreme heat (tx90p), extreme cold (tn10p), extreme wind (wx), drought (ccd) and flooding (rx5day). The individual indices can be combined into a single index with or without weighting for each component.

-
-

1- Load dependencies

-

This example requires the following system libraries:

- -

The ClimProjDiags R package should be loaded by running the following lines in R once it’s integrated into CRAN mirror.

-
library(ClimProjDiags)
-

All the other R packages involved can be installed directly from CRAN and loaded as follows:

-
library(s2dverification)
-library(abind)
-library(multiApply)
-library(ggplot2)
-library(parallel)
-
-
-

2- Synthetic data

-

Daily maximum and minimum temperature, wind speed and precipitation are necessary to compute the different indices in both, the reference period (1971 - 2000) and the future projection (2006 - 2100). The defined region will be in the northern hemisphere between -40 - 20 ºE and 25 - 60 ºN.

-

Maximum temperature is generated considering the annual cycle:

-
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, 
-                   grid1 + rnorm(10958, sd = 5) * cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-

A similar procedure is considered to build the synthetic data for the future projections. However, a little trend is added.

-
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - 
-                               rnorm(34698, mean = j * 0.5, sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-

To build synthetic precipitation data, a lognormal distribution is considered:

-
ppt_historical <- rlnorm(10958 * 12 * 8)
-dim(ppt_historical) <- c(model = 1, var = 1, time = 10958, lon = 12, lat = 8)
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_historical, 'Variables')$dat1$time <- time
-ppt_projection <- rlnorm(34698 * 12 * 8)
-dim(ppt_projection) <- c(model = 1, var = 1, time = 34698, lon = 12, lat = 8)
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_projection, 'Variables')$dat1$time <- time
-
-
-

3- Computing the Extreme Heat Index

-

The Extreme Heat Index (t90p) is defined as the percentage of days when the maximum temperature exceeds the 90th percentile.

-

In order to evaluate the future projections, it is necessary to compute the index during a reference historical period. The next steps should be followed:

-

To remove seasonality effects, the anomaly is computed for each day and gridpoint by applying the DailyAno function. The name of the first dimensions is defined as ‘time’ dimension.

-
anomaly_data <- apply(tmax_historical, c(1,2,4,5), DailyAno, dates = attributes(tmax_historical)$Variables$dat1$time)
-
-names(dim(anomaly_data))[1] <- "time"
-

This data can be detrended by applying the Trend function from s2dverification package. In order to remove the trend from the tmax_historical, the correction is calculated by subtracting the detrended_data to the anomaly_data.

-
detrended_data <- Trend(anomaly_data, 
-                        posTR = which(names(dim(anomaly_data)) == "time"))
-diff <- anomaly_data - detrended_data$detrended
-diff <- aperm(diff, c(2,3,1,4,5))
-detrended_data <- tmax_historical - diff
-

For each gridpoint and day of the year (from the 1st of January to the 31st of December), the maximum temperature on the position at the 90 percent of the series will be calculated as the threshold.

-
quantile <- 0.9
-thresholds <- Threshold(detrended_data, qtiles = quantile, 
-                        ncores = detectCores() -1)
-
> dim(thresholds)
-jdays model   var   lon   lat 
-  366     1     1    12     8 
-

By indicating the metric and introducing the threshold, Climdex() function will return the extreme heat index during the reference period.

-
base_index <- Climdex(data = detrended_data, metric = 't90p', 
-                      threshold = thresholds, ncores = detectCores() - 1)
-

The output of ´Climdex´ function will be a ´list()´ object. Index values are saved in the ´base_index$result´ label.

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 11.23 8.74 10.41 11.78 10.14 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-> dim(base_index$result)
- year model   var   lon   lat 
-   30     1     1    12     8 
-

Now, the standard deviation is computed in order to standardize the index. Notice that, by definition, the mean of the percentage of the number of days exceeding the 90th percentile is 10. Only standard deviation is computed.

-
base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The index can be computed by considering the threshold obtain for the reference period.

-
projection_index <- Climdex(data = tmax_projection, metric = 't90p', 
-                            threshold = thresholds, ncores = detectCores() - 1)
-

It is normalized with mean 10 and the standard deviation of the reference period.

-
base_mean <- 10
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-HeatExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

A spatial representation of the mean index values is obtained and saved in PNG format in the working directory with the name: “SpatialExtremeHeatIndex.png”. The matrix masc is built and shown as dots in the plot indicating wich pixels are considered land.

-
masc <- rep(0, 8 * 12)
-masc[c(5 : 12, 18 : 24, 31 : 34, 43, 44, 47, 56 : 60, 67 : 72, 79, 
-       82 : 84, 93 : 96)] <- 1
-dim(masc) <- c(12, 8)
-PlotEquiMap(Mean1Dim(HeatExtremeIndex, 
-                     which(names(dim(HeatExtremeIndex)) == "year")), 
-                     lon = lon, lat = lat, filled.continents = FALSE, 
-                     toptitle = "Extreme Heat Index", dots = masc,
-                     fileout = "SpatialExtremeHeatIndex.png")
-

Spatial distribution of Extreme Heat Index

-

The inland average of the Extreme Heat Index can be computed to plot its time evolution using WeigthedMean function. Smoothing() returns the smoothed time series for a 3 year moving window which can be modified using runmeanlen parameter.

-
temporal <- WeightedMean(HeatExtremeIndex, lon = lon, lat = lat, mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-

The next code should be run to plot and save the original average and the 3 year smoothed data.

-
png("Temporal_Inland_ExtremeHeatIndex.png", width = 8, height = 5, units = 'in', 
-    res = 100, type = "cairo")
-  plot(2006 : 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-       xlab = "Time (years)", ylab = "Extreme Heat Index", 
-       main = "Inland average Extreme Heat Index")
-  lines(2006 : 2100, temporal_3ysmooth, col = "darkgreen", lwd = 2)
-  legend('bottomright', c('Anual', '3 years smooth'), col = c(1, 'darkgreen'), 
-         lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal Inland Extreme Heat Index

-
-
-

4- Extreme Drought Index

-

The Extreme Drought Index (cdd), which measures the maximum length of a dry spell, is defined as the maximum number of consecutive days with the daily precipitation amount lower than 1 mm.

-

To compute the Extreme Drought Index during the reference period and its standard deviation and mean:

-

Note: Precipitation data is not detrended. Furthermore, this index doesn’t require to compute a threshold as Climdex function integrates the threshold of precipitation amount lower than 1 mm internally. However, this case requires the calculation of the mean.

-
base_index <- Climdex(data = ppt_historical, metric = 'cdd',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The object base_index contains the output of the Climdex function as two list with the next dimensions:

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 6 11 8 8 8 12 9 10 6 8 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-

The Extreme Drought Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'cdd', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-DroughtExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Drought Index:

-
PlotEquiMap(Mean1Dim(DroughtExtremeIndex, 
-                     which(names(dim(DroughtExtremeIndex)) == "year")), 
-            lon = lon, lat = lat, filled.continents = FALSE, 
-            toptitle = "Drought Index", brks = seq(-1, 1, 0.01), 
-            fileout = "SpatialDroughtIndex.png")
-

Spatial distribution of Extreme Drought Index

-

Evolution of inland average of the Extreme Drought Index:

-
temporal <- WeightedMean(DroughtExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_5ysmooth <- Smoothing(temporal, runmeanlen = 5, numdimt = 1)
-png("Temporal_Inland_ExtremeDroughtIndex.png", width = 8, height = 5, units= 'in', 
-    res = 100,  type = "cairo")
-plot(2006: 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Drought Index", 
-     main = "Inland average Extreme Drought Index")
-lines(2006 : 2100, temporal_5ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'),
-        lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Drought Index

-
-
-

5- Extreme Flooding Index

-

The Extreme Flooding Index (rx5day) is defined as the maximum precipitation amount in 5 consecutive days.

-

The Extreme Flooding Index during the reference period and its standard deviation and mean can be calculated by executing:

-
base_index <- Climdex(data = ppt_historical, metric = 'rx5day',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The Extreme Flooding Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'rx5day', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-FloodingExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Flooding Index:

-
PlotEquiMap(Mean1Dim(FloodingExtremeIndex, 
-            which(names(dim(FloodingExtremeIndex)) == "year")), lon = lon, 
-            lat = lat, filled.continents = FALSE, 
-            toptitle = "Extreme Flooding Index", 
-            brks = seq(-1, 1, 0.1), fileout = "SpatialFloodingIndex.png")
-

Spatial distribution of Extreme Flooding Index

- -
temporal <- WeightedMean(FloodingExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-png("Temporal_Inland_ExtremeFloodingIndex.png", width = 8, height = 5, 
-    units= 'in', res = 100, type = "cairo")
-plot(2006 :  2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Flooding Index", 
-     main = "Inland average Extreme Flooding Index")
-lines(2006 :  2100, temporal_3ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'), 
-       lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Flooding Index

-
-
-

6- Combining Indices

-

The individual indices can be combined into a single index with or without weighting for each component. This combined index is roughly analogous to the Actuaries Climate Risk Index. Extreme Indices should be saved in the same list object.

-
indices <- list()
-indices[[1]] <- HeatExtremeIndex
-indices[[2]] <- DroughtExtremeIndex
-indices[[3]] <- FloodingExtremeIndex
-

If the weights parameter is defined as NULL, all indices will be equally weighted if the operation parameter is set as mean (by default). To define other weights a vector of length equal to the number of considered indices (5 in this example) and with total sum equal to 1.

-
aci <- CombineIndices(indices = indices, weights = NULL)
-

A spatial visualization can be performed by executing:

-
PlotEquiMap(Mean1Dim(aci, which(names(dim(aci)) == "year")), lon = lon, 
-            lat = lat,  filled.continents = FALSE, toptitle = "Indices Combination",
-            fileout = "CombinedIndices.png")
-

Spatial distribution of Combined Indices

-

Note: This vignette shows the computation of three indices, however, five different indices can be computed with the Climdex function. To consider other combination settings run ?CombinedIndices.

-
-
- - - - - - - - diff --git a/inst/doc/heatcoldwaves.html b/inst/doc/heatcoldwaves.html deleted file mode 100644 index c272ce8a24f36682e9f60b1be7b550b9e6e23a9a..0000000000000000000000000000000000000000 --- a/inst/doc/heatcoldwaves.html +++ /dev/null @@ -1,381 +0,0 @@ - - - - - -Heatwave and coldwave duration - - - - - - - - - - - - - - - - - - -

Heatwave and coldwave duration

- -

Heatwave and coldwave duration is considered to be the total duration of “extreme spells”, i.e. the total number of days in a season when the temperature exceeds a threshold for a given number of consecutive days. Other tools for computing such events are also available, but the novelty of this tool is that the users can select their own thresholds (based on quantiles) and the minimum duration of an event. The duration of heatwaves and coldwaves helps to understand potential changes in energy demand.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- - - -

The ClimProjDiags R package should be loaded by running the following lines in R onces it's integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(parallel)
-
- -

The ilustrative problem is to analyze the daily maximum or minimum air temperature at 2m. The reference period used to compute a threshold is 1971 - 2000. The future scenario chosen is the rcp2.6 during the period 2006 - 2100 for which the heat/cold wave duration will be determined. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

These parameters are defined by running:

- -
var <- 'tasmax'
-
-start_climatology <- '1971'
-end_climatology <- '2000'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbations to a sinusoidal function. The latitudinal behavior of the temperature is considered by randomly subtracting a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(10958, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1),
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added. -´

- -
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + 
-         rnorm(34698) + (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(34698, mean = j * 0.5, 
-                                                      sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-
- -

2- Heatwaves

- -

The heatwaves duration is the number of consecutive days for which the maximum temperature is exceeding a threshold for a minimum number of days during summer.

- -

2.1- Defining heatwaves threshold

- -

In this example, the threshold is defined as the 90th percentile of maximum temperature during the period 1971-2000.

- -

The summer data can be picked by the SeasonSelect function from the ClimProjDiags package.

- -
summer_tmax_historical <- SeasonSelect(tmax_historical, season = 'JJA')
-
- -

The output of SeasonSelect is a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * 30 years = 2760 dates. While the data dimensions also keeps the longitude and latitude dimensions.

- -
> str(summer_tmax_historical)
-List of 2
- $ data : num [1:2760, 1:12, 1:8] 302 285 301 302 312 ...
- $ dates: POSIXct[1:2760], format: "1971-06-01 12:00:00" "1971-06-04 12:00:00" ...
-
- -

To define the heatwave it is necessary to select the maximum temperature threshold that must be exceeded. In this example, the 90th percentile of the maximum temperature during the reference period is selected. This calculation is performed using Threshold function from ClimProjDiags package.

- -
quantile <- 0.9
-thresholds <- Threshold(data = summer_tmax_historical$data, 
-                        dates = summer_tmax_historical$dates, 
-                        calendar ="proleptic_gregorian", 
-                        qtiles = quantile, ncores = detectCores() -1)
-
- -

The output of Threshold is an array of the 92 days of summer in the first dimension and the spatial dimensions in the second and third position in this case:

- -
> str(thresholds)
- num [1:92, 1:12, 1:8] 310 308 312 309 310 ...
-
- -

2.2- Heatwaves projection

- -

The same selection should be done for the future projection data by applying SeasonSelect.

- -
summer_tmax_projection <- SeasonSelect(tmax_projection, season = 'JJA')
-
- -

The corresponding output is again a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * (2100 - 2006 + 1) years = 8740 dates.

- -
> str(summer_tmax_projection)
-List of 2
- $ data : num [1:8740, 1:12, 1:8] 303 300 315 297 298 ...
- $ dates: POSIXct[1:8740], format: "2006-06-01 12:00:00" "2006-06-02 12:00:00"...
-
- -

2.3- Heatwaves duration

- -

The duration of the heatwaves is obtained by using the WaveDuration function from ClimProjDiags package. By setting spell.length = 5, the minimum length of a heatwave is defined as 5 days in this example. So, this function returns the number of days for each summer in which the maximum temperature is exceeding the 90th percentile of the reference period when they occur in a cluster of a minimum length of 5 consecutive days. This means that isolated events are not included.

- -
duration <- WaveDuration(data = summer_tmax_projection$data, 
-                         threshold = thresholds, op = ">", spell.length = 5, 
-                         dates = summer_tmax_projection$dates, 
-                         calendar = "proleptic_gregorian")
-
- -
> str(duration)
-List of 2
- $ result: num [1:95, 1:12, 1:8] 5 5 0 5 9 11 5 11 0 5 ...
- $ years : chr [1:95] "2006-JJA" "2007-JJA" "2008-JJA" "2009-JJA" ...
-
- -

The spatial representation of the maximum, mean and minimum duration of heatwaves can be plotted and saved in the working directory by running:

- -
breaks <- seq(0,92,4)
-
-PlotEquiMap(apply(duration$result, c(2, 3), max), lon = lon, lat = lat, 
-            brks = breaks, filled.continents = FALSE, title_scale = 0.8,
-            toptitle = "Heat wave duration", 
-            cols = heat.colors(length(breaks)-1)[(length(breaks)-1):1],
-            fileout = "SpatialHeatwave.png")
-
- -

Spatial distribution of Heatwave duration

- - - - diff --git a/man/AnoAgree.Rd b/man/AnoAgree.Rd index c929352d71c27790644570ed71b937a22a7072d9..63c1450039013f94bee9c4e66d29c0923fcdab84 100644 --- a/man/AnoAgree.Rd +++ b/man/AnoAgree.Rd @@ -34,4 +34,3 @@ a <- rnorm(6) agree <- AnoAgree(ano = a, membersdim = 1, na.rm = TRUE, ncores = NULL) print(agree) } - diff --git a/man/ArrayToList.Rd b/man/ArrayToList.Rd index 9a7d181f1926cea32625c95a6861e0deaaaff7f0..9951ae13d42a263ce58fdfd20d954c8d23f84ed2 100644 --- a/man/ArrayToList.Rd +++ b/man/ArrayToList.Rd @@ -38,4 +38,3 @@ str(datalist) \seealso{ \link[s2dv]{PlotLayout} } - diff --git a/man/Climdex.Rd b/man/Climdex.Rd index 669570ab98f8060630d9df2ec90911deec345a1b..372712c240998579f23b70b8fbfd998656ce00f7 100644 --- a/man/Climdex.Rd +++ b/man/Climdex.Rd @@ -4,8 +4,16 @@ \alias{Climdex} \title{Wrapper for applying the climdex routine ETCCDI climate change indices to n-dimensional arrays.} \usage{ -Climdex(data, metric, threshold = NULL, base.range = NULL, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Climdex( + data, + metric, + threshold = NULL, + base.range = NULL, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature, wind speed or precipitation amount.} @@ -68,4 +76,3 @@ David Bronaugh for the Pacific Climate Impacts Consortium (2015). climdex.pcic: PCIC Implementation of Climdex Routines. R package version 1.1-6. http://CRAN.R-project.org/package=climdex.pcic } - diff --git a/man/CombineIndices.Rd b/man/CombineIndices.Rd index 95abc5ed4f5564527b93bd9646221b7c4b1a8d3d..6d032cddcba684a8375572aa6c259da2c9d5e982 100644 --- a/man/CombineIndices.Rd +++ b/man/CombineIndices.Rd @@ -33,4 +33,3 @@ dim(b) <- c(lon = 2, lat = 3, mod = 4) comb_ind <- CombineIndices(indices = list(a, b), weights = c(2, 1), operation = "add") print(comb_ind) } - diff --git a/man/DTRIndicator.Rd b/man/DTRIndicator.Rd index 33e7f507882ba4583c5796645b05991a1d93af22..e8be18bb79aa379757334633414c7ea5438dec0f 100644 --- a/man/DTRIndicator.Rd +++ b/man/DTRIndicator.Rd @@ -4,8 +4,16 @@ \alias{DTRIndicator} \title{Diurnal temperature range indicator (DTR) of multidimensional arrays} \usage{ -DTRIndicator(tmax, tmin, ref, by.seasons = TRUE, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +DTRIndicator( + tmax, + tmin, + ref, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -60,4 +68,3 @@ aa <- DTRIndicator(tmax, tmin, ref = a, by.seasons = FALSE, ncores = NULL) str(aa) dim(aa$indicator) } - diff --git a/man/DTRRef.Rd b/man/DTRRef.Rd index 6e32e31a0085e87305b3e30c601dec7fe28fa6bc..daea8c2390d609adb7055b306d56ad7b870d846a 100644 --- a/man/DTRRef.Rd +++ b/man/DTRRef.Rd @@ -4,8 +4,16 @@ \alias{DTRRef} \title{Diurnal temperature range of multidimensional arrays} \usage{ -DTRRef(tmax, tmin, by.seasons = TRUE, dates = NULL, timedim = NULL, - calendar = NULL, na.rm = TRUE, ncores = NULL) +DTRRef( + tmax, + tmin, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + na.rm = TRUE, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -68,4 +76,3 @@ dim(tmin) <- c(2, 3, 365) a <- DTRRef(tmax, tmin, by.seasons = FALSE, dates = time, timedim = 3, ncores = NULL) str(a) } - diff --git a/man/DailyAno.Rd b/man/DailyAno.Rd index d4033f79e505fbd3df46a6304be677212c811f1b..3bf59f6cd8941a724a7723c8ae6f16d72394e904 100644 --- a/man/DailyAno.Rd +++ b/man/DailyAno.Rd @@ -4,8 +4,7 @@ \alias{DailyAno} \title{Daily anomalies} \usage{ -DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, - na.rm = TRUE) +DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, na.rm = TRUE) } \arguments{ \item{data}{A vector of daily data.} @@ -31,4 +30,3 @@ jdays <- c(rep(1, 5), rep(2, 5)) daily_anomaly <- DailyAno(data = data, jdays = jdays, na.rm = TRUE) print(daily_anomaly) } - diff --git a/man/Extremes.Rd b/man/Extremes.Rd index 0939fa67275433b18b9f62df84150911ea72e352..8042619d0ee0b4aaf0368b227cb3b80d3f73c552 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -4,9 +4,18 @@ \alias{Extremes} \title{Sum of spell lengths exceeding daily threshold for n-dimensional arrays} \usage{ -Extremes(data, threshold, op = ">", min.length = 6, - spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Extremes( + data, + threshold, + op = ">", + min.length = 6, + spells.can.span.years = TRUE, + max.missing.days = 5, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A n-dimensional array containing daily data.} @@ -44,18 +53,17 @@ This routine compares data to the thresholds using the given operator, generatin } \examples{ ##Example synthetic data: -data <- 1:(2 * 3 * 372 * 1) -dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 310 * 1) +dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) -threshold = rep(40, 31) attr(time, "variables") <- metadata attr(data, 'Variables')$dat1$time <- time +threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, + max.missing.days = 5, ncores = NULL) +str(res) -a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, - max.missing.days = 5, ncores = NULL) -str(a) } - diff --git a/man/Lon2Index.Rd b/man/Lon2Index.Rd index b2012a61f97126ee886ba942f3e5020961d29994..dbae6dcbc533716959433999b059eba7276c1799 100644 --- a/man/Lon2Index.Rd +++ b/man/Lon2Index.Rd @@ -33,4 +33,3 @@ pos <- Lon2Index(lon, lonmin = 340, lonmax = 20) lon[pos] } - diff --git a/man/SeasonSelect.Rd b/man/SeasonSelect.Rd index a71fd242c9a71d0647fa10fad5fc1a440823af70..33066eed0e4ce2cd5b1a1abbe92930b43854975c 100644 --- a/man/SeasonSelect.Rd +++ b/man/SeasonSelect.Rd @@ -45,4 +45,3 @@ attr(data, 'Variables')$common[[2]]$dim[[3]]$vals <- time a <- SeasonSelect(data = data, season = 'JJA') str(a) } - diff --git a/man/SelBox.Rd b/man/SelBox.Rd index 07e92b20e28a41e872059f311378f8e1f90b7573..38e3547bf6ae6d6e699e78821aaefb1311896535 100644 --- a/man/SelBox.Rd +++ b/man/SelBox.Rd @@ -43,4 +43,3 @@ a <- SelBox(data = data, lon = lon, lat = lat, region = c(2, 20, 1, 5), londim = 1, latdim = 2, mask = NULL) str(a) } - diff --git a/man/ShiftLon.Rd b/man/ShiftLon.Rd new file mode 100644 index 0000000000000000000000000000000000000000..7d7a9d493b03d4aec16b54e6d19e69f2813a967e --- /dev/null +++ b/man/ShiftLon.Rd @@ -0,0 +1,48 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ShiftLon.R +\name{ShiftLon} +\alias{ShiftLon} +\title{Shift longitudes of a data array} +\usage{ +ShiftLon(data, lon, westB, lon_dim = "lon", ncores = NULL) +} +\arguments{ +\item{data}{A named multidimensional array with at least 'lon_dim' dimension.} + +\item{lon}{A numeric vector of longitudes. The values are expected to be +monotonic increasing.} + +\item{westB}{A number indicating the west boundary of the new longitudes.} + +\item{lon_dim}{A character string indicating the name of the longitude +dimension in 'data'. The default value is 'lon'.} + +\item{ncores}{An integer indicating the number of cores used for computation. +The default value is NULL (use only one core).} +} +\value{ +A list of 2: +\item{data}{ + Array of the shifted data with the same dimensions as parameter 'data'. +} +\item{lon}{ + The monotonic increasing new longitudes with the same length as parameter + 'lon' and start at 'westB'. +} +} +\description{ +Shift the longitudes of a data array. Only reasonable for global + longitude shifting. It is useful for map plotting or aligning datasets. +} +\examples{ +data <- array(data = 1:50, dim = c(lon = 360, lat = 181)) +lon <- array(data = 0:359, dim = c(lon = 360)) +lat <- -90:90 ## lat does not change +shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) + + \dontrun{ +s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) +s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) + } + +} diff --git a/man/Subset.Rd b/man/Subset.Rd index 634cfe2a60d07b8d31eeafa4885fca01afd71b42..f50c5da0bb4098fb407a0335904a70147fa47c70 100644 --- a/man/Subset.Rd +++ b/man/Subset.Rd @@ -7,28 +7,49 @@ Subset(x, along, indices, drop = FALSE) } \arguments{ -\item{x}{A multidimensional array to be sliced. It can have dimension names either in \code{names(dim(x))} or either in the attribute 'dimensions'.} +\item{x}{A named multidimensional array to be sliced. It can have dimension +names either in \code{names(dim(x))} or in the attribute 'dimensions'.} -\item{along}{Vector with references to the dimensions to take the subset from: either integers or dimension names.} +\item{along}{A vector with references to the dimensions to take the subset +from: either integers or dimension names.} -\item{indices}{List of indices to take from each dimension specified in 'along'. If a single dimension is specified in 'along' the indices can be directly provided as a single integer or as a vector.} +\item{indices}{A list of indices to take from each dimension specified in +'along'. If a single dimension is specified in 'along', it can be directly +provided as an integer or a vector.} -\item{drop}{Whether to drop all the dimensions of length 1 in the resulting array, none, only those that are specified in 'along', or only those that are not specified in 'along'. The possible values are, respectively: 'all' or TRUE, 'none' or FALSE, 'selected', and 'non-selected'.} +\item{drop}{Whether to drop all the dimensions of length 1 in the resulting +array, none, only those that are specified in 'along', or only those that +are not specified in 'along'. The possible values are: 'all' or TRUE, 'none' +or FALSE, 'selected', and 'non-selected'. The default value is FALSE.} } \value{ -An array with similar dimensions as the \code{x} input, but with trimmed or dropped dimensions. +An array with similar dimensions as the \code{x} input, but with + trimmed or dropped dimensions. } \description{ -This function allows to subset (i.e. slice, take a chunk of) an array, in a similar way as done in the function \code{take()} in the package plyr. There are two main inprovements:\cr\cr The input array can have dimension names, either in \code{names(dim(x))} or in the attribute 'dimensions', and the dimensions to subset along can be specified via the parameter \code{along} either with integer indices or either by their name.\cr\cr There are additional ways to adjust which dimensions are dropped in the resulting array: either to drop all, to drop none, to drop only the ones that have been sliced or to drop only the ones that have not been sliced.\cr\cr If an array is provided without dimension names, dimension names taken from the parameter \code{dim_names} will be added to the array. -This function computes the threshold based on a quantile value for each day of the year of the daily data input. +This function allows to subset (i.e. slice, take a chunk of) an array, in a +similar way as done in the function \code{take()} in the package plyr. There +are two main snprovements:\cr\cr First, the input array can have dimension +names, either in \code{names(dim(x))} or in the attribute 'dimensions'. If +both exist, the attribute 'dimensions' is prioritized. The dimensions to +subset along can be specified via the parameter \code{along} either with +integer indices or either by their name.\cr\cr Second, there are additional +ways to adjust which dimensions are dropped in the resulting array: either to +drop all, to drop none, to drop only the ones that have been sliced or to drop +only the ones that have not been sliced.\cr\cr } \examples{ -##Example synthetic data: +#Example synthetic data: +# Dimension has name already data <- 1:(2 * 3 * 372 * 1) dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) data_subset <- Subset(data, c('time', 'model'), list(1:10, TRUE), drop = 'selected') dim(data_subset) +# Use attributes 'dimensions' +data <- array(1:(2 * 3 * 372 * 1), dim = c(2, 3, 372, 1)) +attributes(data)[['dimensions']] <- c('lat', 'lon', 'time', 'model') +data_subset <- Subset(data, c('lon', 'lat'), list(1, 1), drop = TRUE) +dim(data_subset) } - diff --git a/man/Threshold.Rd b/man/Threshold.Rd index cec12e68bc83ea93767f034c6aa417b5076e81b0..e6ae99f6696ca434d49bd782226c75bb685062d3 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -4,8 +4,15 @@ \alias{Threshold} \title{Daily thresholds based on quantiles for n-dimensional arrays} \usage{ -Threshold(data, dates = NULL, calendar = NULL, base.range = NULL, - qtiles = 0.9, ncores = NULL, na.rm = FALSE) +Threshold( + data, + dates = NULL, + calendar = NULL, + base.range = NULL, + qtiles = 0.9, + ncores = NULL, + na.rm = FALSE +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily data.} @@ -41,5 +48,5 @@ attr(data, 'Variables')$dat1$time <- time a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) -} +} diff --git a/man/WaveDuration.Rd b/man/WaveDuration.Rd index b0682523c96b1d604c09c6c397b8040523c7e4e7..a43c6b9252de6976d55a2db1096a5b971c375eb5 100644 --- a/man/WaveDuration.Rd +++ b/man/WaveDuration.Rd @@ -4,8 +4,16 @@ \alias{WaveDuration} \title{Heat and cold waves duration for n-dimensional arrays} \usage{ -WaveDuration(data, threshold, op = ">", spell.length = 6, - by.seasons = TRUE, dates = NULL, calendar = NULL, ncores = NULL) +WaveDuration( + data, + threshold, + op = ">", + spell.length = 6, + by.seasons = TRUE, + dates = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature} @@ -48,4 +56,3 @@ threshold <- rep(40, 31) a <- WaveDuration(data, threshold, op = ">", spell.length = 6, by.seasons = TRUE, ncores = NULL) str(a) } - diff --git a/man/WeightedCells.Rd b/man/WeightedCells.Rd new file mode 100644 index 0000000000000000000000000000000000000000..9b5c880378cdcc25467bc60800c2833e9b632f6c --- /dev/null +++ b/man/WeightedCells.Rd @@ -0,0 +1,40 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeightedCells.R +\name{WeightedCells} +\alias{WeightedCells} +\title{Compute the square-root of the cosine of the latitude weighting on the given +array.} +\usage{ +WeightedCells(data, lat, lat_dim = "lat", method = "cos", ncores = NULL) +} +\arguments{ +\item{data}{A numeric array with named dimensions, representing the data to be +applied the weights. It should have at least the latitude dimension and it +can have more other dimensions.} + +\item{lat}{A numeric vector or array with one dimension containing the +latitudes (in degrees).} + +\item{lat_dim}{A character string indicating the name of the latitudinal +dimension. The default value is 'lat'.} + +\item{method}{A character string indicating the type of weighting applied: +'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the +cosine of the latitude). The default value is 'cos'.} + +\item{ncores}{An integer indicating the number of cores to use for parallel +computation. The default value is NULL.} +} +\value{ +An array containing the latitude weighted data with same dimensions as +parameter 'data'. +} +\description{ +This function performs square-root of the cosine of the latitude weighting on +the given array. +} +\examples{ +exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) +lat <- c(10, 15, 20) +res <- WeightedCells(data = exp, lat = lat) +} diff --git a/man/WeightedMean.Rd b/man/WeightedMean.Rd index 23f31c6c415b83ecd829fba0cc27c3c82d750504..ac3411cba7fc3fadea3f557220d7a6607aedad12 100644 --- a/man/WeightedMean.Rd +++ b/man/WeightedMean.Rd @@ -4,8 +4,15 @@ \alias{WeightedMean} \title{Calculate spatial area-weighted average of multidimensional arrays} \usage{ -WeightedMean(data, lon, lat, region = NULL, mask = NULL, londim = NULL, - latdim = NULL) +WeightedMean( + data, + lon, + lat, + region = NULL, + mask = NULL, + londim = NULL, + latdim = NULL +) } \arguments{ \item{data}{An array with minimum two dimensions of latitude and longitude.} @@ -60,4 +67,3 @@ a <- WeightedMean(data = data, lon = lon, lat = lat, region = NULL, mask = NULL, londim = 1, latdim = 2) str(a) } - diff --git a/tests/testthat/test-ShiftLon.R b/tests/testthat/test-ShiftLon.R new file mode 100644 index 0000000000000000000000000000000000000000..efa358ebb90cb2ac44903dd28f12492fab7a4cd1 --- /dev/null +++ b/tests/testthat/test-ShiftLon.R @@ -0,0 +1,342 @@ +context("ShiftLon tests") + +############################################## + +# dat1 +dat1 <- array(1:360, dim = c(lon = 360, lat = 181)) +lon1 <- 0:359 +lat1 <- -90:90 + +# dat2 +dat2 <- array(1:360, dim = c(lon = 360, lat = 181)) +lon2 <- -180:179 +lat2 <- -90:90 + +# dat3 +dat3 <- array(1:512, dim = c(lon = 512, lat = 216, time = 2)) +lon3 <- seq(0, 360, length.out = 513)[1:512] +lat3 <- seq(-90, 90, length.out = 216) + +############################################## +test_that("1. Input checks", { + # data + expect_error( + ShiftLon(c()), + "Parameter 'data' cannot be NULL." + ) + expect_error( + ShiftLon(1:10), + "Parameter 'data' must be a numeric array." + ) + expect_error( + ShiftLon(array(1:10)), + "Parameter 'data' must have dimension names." + ) + # lon_dim + expect_error( + ShiftLon(dat1, lon1, lon_dim = 1), + "Parameter 'lon_dim' must be a character string." + ) + expect_error( + ShiftLon(dat1, lon1, lon_dim = 'a'), + "Parameter 'lon_dim' is not found in 'data' dimensions." + ) + # lon + expect_error( + ShiftLon(dat1, lon = 'a'), + "Parameter 'lon' must be numeric." + ) + expect_error( + ShiftLon(dat1, lon = 1:100), + "The length of 'lon' must be the same as the length of 'lon_dim' in 'data'." + ) + expect_error( + ShiftLon(dat1, lon = 359:0), + "Parameter 'lon' must be monotonic increasing." + ) + # westB + expect_error( + ShiftLon(dat1, lon = lon1, westB = 'a'), + "Parameter 'westB' must be numeric." + ) + expect_warning( + ShiftLon(dat1, lon = lon1, westB = 1:10), + "Parameter 'westB' should be a number. Use the first element only." + ) + # ncores + expect_error( + ShiftLon(dat1, lon = lon1, westB = 10, ncores = 'a'), + "Parameter 'ncores' must be a positive integer." + ) + +}) + + +############################################## +test_that("2. dat1", { + + westB <- -180 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -180:179 + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(181:360, 1:180) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + 90:(90 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(91:360, 1:90) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + 300:(300 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(301:360, 1:300) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -270:(-270+359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(91:360, 1:90) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -10:(-10 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(351:360, 1:350) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -1:358 + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(360, 1:359) + ) + +}) + + +############################################## +test_that("3. dat2", { + + westB <- 0 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 0:359 + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(181:360, 1:180) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 90:(90 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(271:360, 1:270) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 300:(300 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(121:360, 1:120) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -270:(-270+359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(271:360, 1:270) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -10:(-10 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(171:360, 1:170) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -1:358 + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(180:360, 1:179) + ) + +}) + +############################################## +test_that("4. dat3", { + + westB <- -180 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(257:512, 1:256) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(129:512, 1:128) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(300.2344, 300.2344 + 360, length.out = 513)[-513], + tolerance = 0.001 + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(428:512, 1:427) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(129:512, 1:128) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(-9.843750, -9.843750 + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(499:512, 1:498) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(-0.703125, -0.703125 + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(512, 1:511) + ) + +}) diff --git a/tests/testthat/test-Subset.R b/tests/testthat/test-Subset.R new file mode 100644 index 0000000000000000000000000000000000000000..e769453cdca7a6188caf80ebbe41e5de2bfef3dd --- /dev/null +++ b/tests/testthat/test-Subset.R @@ -0,0 +1,290 @@ +context("Subset tests") + +dat1 <- array(1:20, dim = c(dat = 1, lat = 2, lon = 10)) + +dat2 <- dat1 +attributes(dat2)[['units']] <- 'K' + +dat3 <- array(1:20, dim = c(1, 2, 10)) +attributes(dat3)[['dimensions']] <- c('dat', 'lat', 'lon') + +dat4 <- dat1 +attributes(dat4)[['dimensions']] <- c('dataset', 'latitude', 'longitude') + +#============================================== + +test_that("1. Sanity checks", { + +expect_error( +Subset(x = 1:4), +"Input array 'x' must be a numeric array." +) +expect_error( +Subset(x = array(1:20, dim = c(1, 2, 10)), along = 'a'), +"The input array 'x' doesn't have labels for the dimensions but the parameter 'along' contains dimension names." +) +expect_error( +Subset(x = dat1, along = T), +"All provided dimension indices in 'along' must be integers or character strings." +) +expect_error( +Subset(x = dat1, along = integer(0)), +"All provided dimension indices in 'along' must be integers or character strings." +) +expect_error( +Subset(x = dat1, along = c('dat', 'dat')), +"The parameter 'along' must not contain repeated dimension names." +) +expect_error( +Subset(x = dat1, along = 'dataset'), +"Could not match all dimension names in 'indices' with dimension names in input array 'x'." +) +expect_error( +Subset(x = dat1, 'dat', 1, drop = 'yes'), +"Parameter 'drop' must be one of TRUE, FALSE, 'all', 'selected', 'non-selected', 'none'." +) + +}) + +test_that("2. dat1", { +# drop: dat +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = FALSE)), +dim(dat1) +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = FALSE)), +as.vector(dat1) +) +expect_equal( +attributes(Subset(dat1, 'dat', 1, drop = FALSE)), +attributes(dat1) +) +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = TRUE)), +dim(dat1)[-1] +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = TRUE)), +as.vector(dat1) +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = TRUE)), +as.vector(Subset(dat1, 1, 1, drop = TRUE)) +) +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = 'selected')), +dim(dat1)[-1] +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = 'selected')), +as.vector(dat1) +) +# drop: lat +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = FALSE)), +c(dat = 1, lat = 1, lon = 10) +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = FALSE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = TRUE)), +dim(dat1)[3] +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = TRUE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +Subset(dat1, 'lat', 1, drop = TRUE), +Subset(dat1, 2, 1, drop = TRUE) +) +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = 'selected')), +dim(dat1)[c(1, 3)] +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = 'selected')), +as.vector(dat1[, 1, ]) +) +# drop: lat, lon +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = FALSE)), +3 +) +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE)), +3 +) +expect_equal( +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +Subset(dat1, c(2, 3), list(1, 2), drop = TRUE) +) +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = 'selected')), +c(dat = 1) +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = 'selected')), +3 +) +# drop: dat, lat, lon +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +3 +) +expect_equal( +Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE), +Subset(dat1, c(1:3), list(1, 1, 2), drop = FALSE) +) +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +3 +) +expect_equal( +Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE), +Subset(dat1, c(1:3), list(1, 1, 2), drop = TRUE) +) +expect_equal( +class(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +'array' +) +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +1 +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +3 +) + +}) + +test_that("3. dat2", { +# Same as dat1 but with attributes + +# drop: lat +expect_equal( +dim(Subset(dat2, 'lat', 1, drop = TRUE)), +dim(dat1)[3] +) +expect_equal( +as.vector(Subset(dat2, 'lat', 1, drop = TRUE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +attributes(Subset(dat2, 'lat', 1, drop = TRUE)), +list(dim = c(lon = 10), units = 'K') +) + +# drop: dat, lat, lon +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +list(dim = c(dat = 1, lat = 1, lon = 1), units = 'K') +) +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +list(dim = 1, units = 'K') +) +expect_equal( +class(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +'array' +) +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +1 +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +list(dim = 1, units = 'K') +) + +}) + +test_that("4. dat3", { +# drop: dat +expect_equal( +Subset(dat1, 'dat', 1, drop = FALSE), +Subset(dat3, 'dat', 1, drop = FALSE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat3, 'dat', 1, drop = FALSE)), +list(dim = c(dat = 1, lat = 2, lon = 10), dimensions = c('dat', 'lat', 'lon')) +) + +# drop: lat, lon +expect_equal( +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +Subset(dat3, c('lat', 'lon'), list(1, 2), drop = TRUE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat3, c('lat', 'lon'), list(1, 2), drop = TRUE)), +list(dim = 1, dimensions = c('dat', 'lat', 'lon')) +) + +}) + +test_that("5. dat4", { + +# drop: lat +expect_equal( +Subset(dat4, 'latitude', 1, drop = FALSE), +Subset(dat1, 'lat', 1, drop = FALSE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat4, 'latitude', 1, drop = FALSE)), +list(dim = c(dataset = 1, latitude = 1, longitude = 10), dimensions = c('dataset', 'latitude', 'longitude')) +) +# drop: lat, lon +expect_equal( +Subset(dat4, c('latitude', 'longitude'), list(1, 2), drop = TRUE), +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat4, c('latitude', 'longitude'), list(1, 2), drop = TRUE)), +list(dim = 1, dimensions = c('dataset', 'latitude', 'longitude')) +) + +}) diff --git a/tests/testthat/test-WeightedCells.R b/tests/testthat/test-WeightedCells.R new file mode 100644 index 0000000000000000000000000000000000000000..dcf171d6f29eecfdcb2819aec595d6b279eafc64 --- /dev/null +++ b/tests/testthat/test-WeightedCells.R @@ -0,0 +1,111 @@ +context("s2dv::WeightedCells tests") + +############################################## + +# dat1 +set.seed(1) +data1 <- array(rnorm(5), dim = c(lat = 5)) +set.seed(2) +lat1 <- array(rnorm(5)*5) + +# dat2 +set.seed(1) +data2 <- array(rnorm(60), dim = c(sdate = 10, lat = 2, lon = 3)) +set.seed(2) +lat2 <- array(rnorm(2)*10) + + +############################################## +test_that("1. Input checks", { + # data + expect_error( + WeightedCells(c()), + "Parameter 'data' cannot be NULL." + ) + expect_error( + WeightedCells('lat'), + "Parameter 'data' must be a numeric array." + ) + expect_error( + WeightedCells(rnorm(5)), + "Parameter 'data' must be at least latitude dimension." + ) + expect_error( + WeightedCells(array(rnorm(5), dim = c(5))), + "Parameter 'data' must have dimension names." + ) + #lat_dim + expect_error( + WeightedCells(data1, lat1, 3), + "Parameter 'lat_dim' must be a character string." + ) + expect_error( + WeightedCells(data1, lat1, 'latitude'), + "Parameter 'lat_dim' is not found in 'data'." + ) + #lat + expect_error( + WeightedCells(data1, NULL), + "Parameter 'lat' cannot be NULL." + ) + expect_error( + WeightedCells(data1, list(lat1)), + "Parameter 'lat' must be a numeric vector or array." + ) + expect_error( + WeightedCells(data1, rnorm(10)), + "Length of parameter 'lat' doesn't match the length of latitudinal dimension in parameter 'data'." + ) + # method + expect_error( + WeightedCells(data1, lat1, method = 1.5), + "Parameter 'method' must be 'cos' or 'sqrtcos'." + ) + # ncores + expect_error( + WeightedCells(data1, lat1, ncores = 1.5), + "Parameter 'ncores' must be either NULL or a positive integer." + ) + +}) + +############################################## +test_that("2. Output checks: dat1", { + + expect_equal( + dim(WeightedCells(data1, lat1)), + c(lat = 5) + ) + expect_equal( + as.vector(WeightedCells(data1, lat1)), + c(-0.6245359, 0.1836194, -0.8276192, 1.5875256, 0.3294997), + tolerance = 0.0001 + ) + +}) + +############################################## +test_that("3. Output checks: dat2", { + + expect_equal( + dim(WeightedCells(data2, lat2)), + c(sdate = 10, lat = 2, lon = 3) + ) + expect_equal( + as.vector(WeightedCells(data2, lat2))[1:5], + c(-0.6187938, 0.1813978, -0.8254109, 1.5757744, 0.3254787), + tolerance = 0.0001 + ) + expect_equal( + dim(WeightedCells(data2, lat2)), + dim(data2) + ) + expect_equal( + WeightedCells(data2, lat2), + WeightedCells(data2, lat2, method = 'sqrtcos') * as.vector(sqrt(cos(lat2 * pi/180))), + tolerance = 0.01 + ) + +}) + +############################################## diff --git a/tests/testthat/test-WeightedMean.R b/tests/testthat/test-WeightedMean.R new file mode 100644 index 0000000000000000000000000000000000000000..2c5a23176bfe912072b9d12fbbdbafeb55615c5d --- /dev/null +++ b/tests/testthat/test-WeightedMean.R @@ -0,0 +1,71 @@ +context("WeightedMean tests") +set.seed(1) +dat1 <- array(rnorm(10000), dim = c(lat = 50, lon = 100)) +lat1 <- seq(-90, 90, length.out = 50) +lon1 <- seq(0, 360, length.out = 101)[1:100] + +set.seed(1) +dat2 <- array(rnorm(10000), dim = c(lat = 180, lon = 360, sdate = 2)) +lat2 <- seq(-89.5, 89.5, length.out = 180) +lon2 <- seq(-180, 179, length.out = 360) + +set.seed(1) +dat3 <- array(rnorm(10000), dim = c(sdate = 2, lat = 171, lon = 350)) +lat3 <- seq(-90, 90, length.out = 181)[c(1:80, seq(81, 101, by = 2), 102:181)] +lon3 <- seq(-180, 179, length.out = 360)[c(1:170, seq(171, 190, by = 2), 191:360)] + +set.seed(1) +dat4 <- array(rnorm(10000), dim = c(lat = 180, lon = 360, sdate = 2, time = 1)) +lat4 <- seq(-89.5, 89.5, length.out = 180) +lon4 <- seq(-179.5, 179.5, length.out = 360) + + +test_that("dat1", { +expect_equal( +dim(WeightedMean(dat1, lat = lat1, lon = lon1)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat1, lat = lat1, lon = lon1)), +-0.009785971, +tolerance = 0.0001 +) +}) + +test_that("dat2", { +expect_equal( +dim(WeightedMean(dat2, lat = lat2, lon = lon2)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat2, lat = lat2, lon = lon2)), +c(-0.005799676, -0.007599831), +tolerance = 0.0001 +) +}) + +test_that("dat3", { +expect_equal( +dim(WeightedMean(dat3, lat = lat3, lon = lon3)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat3, lat = lat3, lon = lon3)), +c(-0.0253997, 0.0132251), +tolerance = 0.0001 +) + +}) + + +test_that("dat4", { +expect_equal( +dim(WeightedMean(dat4, lat = lat4, lon = lon4)), +c(sdate = 2, time = 1) +) +expect_equal( +as.vector(WeightedMean(dat4, lat = lat4, lon = lon4)), +c(-0.005799676, -0.007599831), +tolerance = 0.0001 +) +})