From 79266cff28bc963f1086a2a589bed95919867a30 Mon Sep 17 00:00:00 2001 From: aho Date: Fri, 13 May 2022 12:13:36 +0200 Subject: [PATCH 01/25] Fix the incorrect dlon calculation and create unit test --- R/WeightedMean.R | 5 +-- tests/testthat/test-WeightedMean.R | 71 ++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 tests/testthat/test-WeightedMean.R diff --git a/R/WeightedMean.R b/R/WeightedMean.R index 81077ca..393f56f 100644 --- a/R/WeightedMean.R +++ b/R/WeightedMean.R @@ -124,11 +124,10 @@ WeightedMean <- function(data, lon, lat, region = NULL, mask = NULL, londim = NU cosphi <- t(array(cos(lat * pi / 180), dim = c(length(lat), length(lon)))) nblat <- length(lat) nblon <- length(lon) - lon[lon > 180] = lon[lon > 180] - 360 - dlon <- abs(c(abs(lon[2 : nblon]) - abs(lon[1 : nblon - 1]))) * pi / 180 + dlon <- diff(lon) * pi / 180 dlon <- c(dlon, dlon[1]) dlon <- array(dlon, dim = c(nblon, nblat)) - dlat <- abs(c(lat[2 : nblat] - lat[1 : nblat - 1])) * pi / 180 + dlat <- diff(lat) * pi / 180 dlat <- c(dlat, dlat[1]) dlat <- t(array(dlat, dim = c(nblat, nblon))) weight <- (dlon * dlat * cosphi) diff --git a/tests/testthat/test-WeightedMean.R b/tests/testthat/test-WeightedMean.R new file mode 100644 index 0000000..2c5a231 --- /dev/null +++ b/tests/testthat/test-WeightedMean.R @@ -0,0 +1,71 @@ +context("WeightedMean tests") +set.seed(1) +dat1 <- array(rnorm(10000), dim = c(lat = 50, lon = 100)) +lat1 <- seq(-90, 90, length.out = 50) +lon1 <- seq(0, 360, length.out = 101)[1:100] + +set.seed(1) +dat2 <- array(rnorm(10000), dim = c(lat = 180, lon = 360, sdate = 2)) +lat2 <- seq(-89.5, 89.5, length.out = 180) +lon2 <- seq(-180, 179, length.out = 360) + +set.seed(1) +dat3 <- array(rnorm(10000), dim = c(sdate = 2, lat = 171, lon = 350)) +lat3 <- seq(-90, 90, length.out = 181)[c(1:80, seq(81, 101, by = 2), 102:181)] +lon3 <- seq(-180, 179, length.out = 360)[c(1:170, seq(171, 190, by = 2), 191:360)] + +set.seed(1) +dat4 <- array(rnorm(10000), dim = c(lat = 180, lon = 360, sdate = 2, time = 1)) +lat4 <- seq(-89.5, 89.5, length.out = 180) +lon4 <- seq(-179.5, 179.5, length.out = 360) + + +test_that("dat1", { +expect_equal( +dim(WeightedMean(dat1, lat = lat1, lon = lon1)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat1, lat = lat1, lon = lon1)), +-0.009785971, +tolerance = 0.0001 +) +}) + +test_that("dat2", { +expect_equal( +dim(WeightedMean(dat2, lat = lat2, lon = lon2)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat2, lat = lat2, lon = lon2)), +c(-0.005799676, -0.007599831), +tolerance = 0.0001 +) +}) + +test_that("dat3", { +expect_equal( +dim(WeightedMean(dat3, lat = lat3, lon = lon3)), +NULL +) +expect_equal( +as.vector(WeightedMean(dat3, lat = lat3, lon = lon3)), +c(-0.0253997, 0.0132251), +tolerance = 0.0001 +) + +}) + + +test_that("dat4", { +expect_equal( +dim(WeightedMean(dat4, lat = lat4, lon = lon4)), +c(sdate = 2, time = 1) +) +expect_equal( +as.vector(WeightedMean(dat4, lat = lat4, lon = lon4)), +c(-0.005799676, -0.007599831), +tolerance = 0.0001 +) +}) -- GitLab From 6b0cc7cfa801c9b4def40788df7cbd2d1004e404 Mon Sep 17 00:00:00 2001 From: Carlos Delgado Date: Thu, 7 Jul 2022 15:15:03 +0200 Subject: [PATCH 02/25] first version --- R/ShiftLon.R | 102 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 R/ShiftLon.R diff --git a/R/ShiftLon.R b/R/ShiftLon.R new file mode 100644 index 0000000..aaa1c43 --- /dev/null +++ b/R/ShiftLon.R @@ -0,0 +1,102 @@ +#'Shift longitudes in maps +#' +#'@description This function shifts the longitudes in maps. +#' +#'@param data An array with, at least, 'lon_dim' dimension. +#'@param lon A vector with the longitudes. +#'@param westB A number with the west boundary for the new longitudes. +#'@param lon_dim A string with the name of the longitude dimension. +#' The default value is "lon". +#'@param ncores The number of cores used for computation. The default +#' value is NULL (it uses only one core). +#' +#'@return +#'\item{data}{ +#' Array with the shifted data with the same dimensions as parameter 'data'. +#'} +#'\item{lon}{ +#' The new longitudes with the same length as parameter 'lon'. +#'} +#' +#'@import multiApply +#'@examples +#'data <- array(data = 1:50, dim = c(lon = 360, lat = 181)) +#'lon <- array(data = 0:359, dim = c(lon = 360)) +#'lat <- -90:90 ## lat does not change +#'shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) +#'s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) +#'s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) +#' +#'@import multiApply +#'@export +ShiftLon <- function(data, lon, westB, lon_dim = 'lon', ncores = NULL) { + + ## Check inputs + ## data + if (!is.array(data)){ + stop("Parameter 'data' must be and array.") + } + ## lon + if (!is.numeric(lon)){ + stop("Parameter 'lon' must be numeric.") + } else if (!(length(lon) == as.numeric(dim(data)[lon_dim]))){ + stop("The length of 'lon' must coindide with the length of 'lon_dim' in 'data'.") + } + ## westB + if (!is.numeric(westB)){ + stop("Parameter 'westB' must be numeric.") + } + ## lon_dim + if (!is.character(lon_dim)){ + stop("Parameter 'lon_dim' must be a character string.") + } + if (!(lon_dim %in% names(dim(data)))){ + stop("Parameter 'data' must have 'lon_dim' dimension.") + } + ## ncores + if (!is.null(ncores)) { + if (!is.numeric(ncores) | any(ncores %% 1 != 0) | any(ncores < 0) | + length(ncores) > 1) { + stop("Parameter 'ncores' must be a positive integer.") + } + } + + #########--- Shifting the longitudes (only needed to do it once) ---############ + ## adjust western boundary to lon vector if necessary + if (min(abs(westB-lon))>90){ + westB <- ifelse(westB>0,westB-360,westB+360) + } + ## find closest index in lon vector + first <- which.min(abs(westB-lon))[1] + if (first==1){ + new.lon <- lon + } else { + new.lon <- c(lon[first:length(lon)],lon[1:(first-1)]) + } + ## order to monotonically increasing + if (!all(diff(new.lon)>0)){ + new.lon[1:(which(diff(new.lon)<0))] <- new.lon[1:(which(diff(new.lon)<0))]-360 + if (all(new.lon<0)){ + new.lon <- new.lon+360 + } + } + + ############--- Shifting the data ---############ + output <- multiApply::Apply(data = data, + target_dims = lon_dim, + fun = .ShiftLon, + lon = lon, + new.lon = new.lon, + ncores = ncores)$output1 + return(list(data = output, + lon = new.lon)) +} + +.ShiftLon <- function(data, lon, new.lon) { + + new.data <- NA * data + new.data[new.lon %in% lon] <- data[lon %in% new.lon, drop = F] + new.data[!new.lon %in% lon] <- data[!lon %in% new.lon, drop = F] + + return(new.data) +} -- GitLab From 140f4ac00845bc9bcc4fc38579ef6a15084ff51c Mon Sep 17 00:00:00 2001 From: aho Date: Fri, 5 Aug 2022 12:33:47 +0200 Subject: [PATCH 03/25] Revise ShiftLon.R and create unit tests --- R/ShiftLon.R | 140 +++++++++----- man/ShiftLon.Rd | 48 +++++ tests/testthat/test-ShiftLon.R | 342 +++++++++++++++++++++++++++++++++ 3 files changed, 477 insertions(+), 53 deletions(-) create mode 100644 man/ShiftLon.Rd create mode 100644 tests/testthat/test-ShiftLon.R diff --git a/R/ShiftLon.R b/R/ShiftLon.R index aaa1c43..aad4deb 100644 --- a/R/ShiftLon.R +++ b/R/ShiftLon.R @@ -1,99 +1,133 @@ -#'Shift longitudes in maps +#'Shift longitudes of a data array #' -#'@description This function shifts the longitudes in maps. +#'@description Shift the longitudes of a data array. Only reasonable for global +#' longitude shifting. It is useful for map plotting or aligning datasets. #' -#'@param data An array with, at least, 'lon_dim' dimension. -#'@param lon A vector with the longitudes. -#'@param westB A number with the west boundary for the new longitudes. -#'@param lon_dim A string with the name of the longitude dimension. -#' The default value is "lon". -#'@param ncores The number of cores used for computation. The default -#' value is NULL (it uses only one core). +#'@param data A named multidimensional array with at least 'lon_dim' dimension. +#'@param lon A numeric vector of longitudes. The values are expected to be +#' monotonic increasing. +#'@param westB A number indicating the west boundary of the new longitudes. +#'@param lon_dim A character string indicating the name of the longitude +#' dimension in 'data'. The default value is 'lon'. +#'@param ncores An integer indicating the number of cores used for computation. +#' The default value is NULL (use only one core). #' #'@return +#'A list of 2: #'\item{data}{ -#' Array with the shifted data with the same dimensions as parameter 'data'. +#' Array of the shifted data with the same dimensions as parameter 'data'. #'} #'\item{lon}{ -#' The new longitudes with the same length as parameter 'lon'. +#' The monotonic increasing new longitudes with the same length as parameter +#' 'lon' and start at 'westB'. #'} #' -#'@import multiApply #'@examples #'data <- array(data = 1:50, dim = c(lon = 360, lat = 181)) #'lon <- array(data = 0:359, dim = c(lon = 360)) #'lat <- -90:90 ## lat does not change #'shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) +#' +#' \donttest{ #'s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) #'s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) -#' +#' } +#' #'@import multiApply #'@export ShiftLon <- function(data, lon, westB, lon_dim = 'lon', ncores = NULL) { - ## Check inputs + # Check inputs ## data - if (!is.array(data)){ - stop("Parameter 'data' must be and array.") - } - ## lon - if (!is.numeric(lon)){ - stop("Parameter 'lon' must be numeric.") - } else if (!(length(lon) == as.numeric(dim(data)[lon_dim]))){ - stop("The length of 'lon' must coindide with the length of 'lon_dim' in 'data'.") + if (is.null(data)) { + stop("Parameter 'data' cannot be NULL.") } - ## westB - if (!is.numeric(westB)){ - stop("Parameter 'westB' must be numeric.") + if (!is.array(data) | !is.numeric(data)) { + stop("Parameter 'data' must be a numeric array.") + } + if(any(is.null(names(dim(data))))| any(nchar(names(dim(data))) == 0)) { + stop("Parameter 'data' must have dimension names.") } ## lon_dim - if (!is.character(lon_dim)){ + if (!is.character(lon_dim) | length(lon_dim) > 1) { stop("Parameter 'lon_dim' must be a character string.") } - if (!(lon_dim %in% names(dim(data)))){ - stop("Parameter 'data' must have 'lon_dim' dimension.") + if (!(lon_dim %in% names(dim(data)))) { + stop("Parameter 'lon_dim' is not found in 'data' dimensions.") + } + ## lon + if (!is.numeric(lon)) { + stop("Parameter 'lon' must be numeric.") + } + if (!(length(lon) == as.numeric(dim(data)[lon_dim]))) { + stop("The length of 'lon' must be the same as the length of 'lon_dim' in 'data'.") + } + if (any(diff(lon) < 0)) { + stop("Parameter 'lon' must be monotonic increasing.") + } + ## westB + if (!is.numeric(westB)) { + stop("Parameter 'westB' must be numeric.") + } + if (length(westB) != 1) { + westB <- westB[1] + warning("Parameter 'westB' should be a number. Use the first element only.") } ## ncores if (!is.null(ncores)) { - if (!is.numeric(ncores) | any(ncores %% 1 != 0) | any(ncores < 0) | - length(ncores) > 1) { + if (!is.numeric(ncores)) { + stop("Parameter 'ncores' must be a positive integer.") + } else if (any(ncores %% 1 != 0) | any(ncores <= 0) | length(ncores) > 1) { stop("Parameter 'ncores' must be a positive integer.") } } - #########--- Shifting the longitudes (only needed to do it once) ---############ - ## adjust western boundary to lon vector if necessary - if (min(abs(westB-lon))>90){ - westB <- ifelse(westB>0,westB-360,westB+360) + ############################################# + + # Shifting the longitudes (only needed to do it once) + ## Adjust western boundary of lon vector if necessary + ## If westB is not within lon, adjust it to find 'first' below + shft_westB_back <- 0 + if (westB < min(lon)) { + westB <- westB + 360 + shft_westB_back <- -360 + } else if (westB > max(lon)) { + westB <- westB - 360 + shft_westB_back <- 360 + } + if (westB < min(lon) | westB > (min(lon) + 360)) { + stop("westB is too far away from the 'lon' range.") } - ## find closest index in lon vector - first <- which.min(abs(westB-lon))[1] - if (first==1){ + + ## Find closest index in lon vector + first <- which.min(abs(westB - lon))[1] + if (first == 1) { new.lon <- lon } else { new.lon <- c(lon[first:length(lon)],lon[1:(first-1)]) } - ## order to monotonically increasing - if (!all(diff(new.lon)>0)){ - new.lon[1:(which(diff(new.lon)<0))] <- new.lon[1:(which(diff(new.lon)<0))]-360 - if (all(new.lon<0)){ - new.lon <- new.lon+360 - } + ## Order to monotonically increasing + if (!all(diff(new.lon) > 0)) { + new.lon[(which(diff(new.lon) < 0) + 1):length(new.lon)] <- new.lon[(which(diff(new.lon) < 0) + 1):length(new.lon)] + 360 + } - ############--- Shifting the data ---############ - output <- multiApply::Apply(data = data, - target_dims = lon_dim, - fun = .ShiftLon, - lon = lon, - new.lon = new.lon, - ncores = ncores)$output1 - return(list(data = output, - lon = new.lon)) + # Shifting the data + output <- Apply(data = data, + target_dims = lon_dim, + fun = .ShiftLon, + lon = lon, + new.lon = new.lon, + ncores = ncores)$output1 + + # Shift new.lon back to start at westB + new.lon <- new.lon + shft_westB_back + + return(list(data = output, lon = new.lon)) } .ShiftLon <- function(data, lon, new.lon) { - + # data: [lon] new.data <- NA * data new.data[new.lon %in% lon] <- data[lon %in% new.lon, drop = F] new.data[!new.lon %in% lon] <- data[!lon %in% new.lon, drop = F] diff --git a/man/ShiftLon.Rd b/man/ShiftLon.Rd new file mode 100644 index 0000000..bf8e0de --- /dev/null +++ b/man/ShiftLon.Rd @@ -0,0 +1,48 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ShiftLon.R +\name{ShiftLon} +\alias{ShiftLon} +\title{Shift longitudes of a data array} +\usage{ +ShiftLon(data, lon, westB, lon_dim = "lon", ncores = NULL) +} +\arguments{ +\item{data}{A named multidimensional array with at least 'lon_dim' dimension.} + +\item{lon}{A numeric vector of longitudes. The values are expected to be +monotonic increasing.} + +\item{westB}{A number indicating the west boundary of the new longitudes.} + +\item{lon_dim}{A character string indicating the name of the longitude +dimension in 'data'. The default value is 'lon'.} + +\item{ncores}{An integer indicating the number of cores used for computation. +The default value is NULL (use only one core).} +} +\value{ +A list of 2: +\item{data}{ + Array of the shifted data with the same dimensions as parameter 'data'. +} +\item{lon}{ + The monotonic increasing new longitudes with the same length as parameter + 'lon' and start at 'westB'. +} +} +\description{ +Shift the longitudes of a data array. Only reasonable for global + longitude shifting. It is useful for map plotting or aligning datasets. +} +\examples{ +data <- array(data = 1:50, dim = c(lon = 360, lat = 181)) +lon <- array(data = 0:359, dim = c(lon = 360)) +lat <- -90:90 ## lat does not change +shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) + + \donttest{ +s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) +s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) + } + +} diff --git a/tests/testthat/test-ShiftLon.R b/tests/testthat/test-ShiftLon.R new file mode 100644 index 0000000..efa358e --- /dev/null +++ b/tests/testthat/test-ShiftLon.R @@ -0,0 +1,342 @@ +context("ShiftLon tests") + +############################################## + +# dat1 +dat1 <- array(1:360, dim = c(lon = 360, lat = 181)) +lon1 <- 0:359 +lat1 <- -90:90 + +# dat2 +dat2 <- array(1:360, dim = c(lon = 360, lat = 181)) +lon2 <- -180:179 +lat2 <- -90:90 + +# dat3 +dat3 <- array(1:512, dim = c(lon = 512, lat = 216, time = 2)) +lon3 <- seq(0, 360, length.out = 513)[1:512] +lat3 <- seq(-90, 90, length.out = 216) + +############################################## +test_that("1. Input checks", { + # data + expect_error( + ShiftLon(c()), + "Parameter 'data' cannot be NULL." + ) + expect_error( + ShiftLon(1:10), + "Parameter 'data' must be a numeric array." + ) + expect_error( + ShiftLon(array(1:10)), + "Parameter 'data' must have dimension names." + ) + # lon_dim + expect_error( + ShiftLon(dat1, lon1, lon_dim = 1), + "Parameter 'lon_dim' must be a character string." + ) + expect_error( + ShiftLon(dat1, lon1, lon_dim = 'a'), + "Parameter 'lon_dim' is not found in 'data' dimensions." + ) + # lon + expect_error( + ShiftLon(dat1, lon = 'a'), + "Parameter 'lon' must be numeric." + ) + expect_error( + ShiftLon(dat1, lon = 1:100), + "The length of 'lon' must be the same as the length of 'lon_dim' in 'data'." + ) + expect_error( + ShiftLon(dat1, lon = 359:0), + "Parameter 'lon' must be monotonic increasing." + ) + # westB + expect_error( + ShiftLon(dat1, lon = lon1, westB = 'a'), + "Parameter 'westB' must be numeric." + ) + expect_warning( + ShiftLon(dat1, lon = lon1, westB = 1:10), + "Parameter 'westB' should be a number. Use the first element only." + ) + # ncores + expect_error( + ShiftLon(dat1, lon = lon1, westB = 10, ncores = 'a'), + "Parameter 'ncores' must be a positive integer." + ) + +}) + + +############################################## +test_that("2. dat1", { + + westB <- -180 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -180:179 + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(181:360, 1:180) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + 90:(90 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(91:360, 1:90) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + 300:(300 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(301:360, 1:300) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -270:(-270+359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(91:360, 1:90) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -10:(-10 + 359) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(351:360, 1:350) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat1, lon = lon1, westB = westB)$data), + dim(dat1) + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$lon, + -1:358 + ) + expect_equal( + ShiftLon(dat1, lon = lon1, westB = westB)$data[,1], + c(360, 1:359) + ) + +}) + + +############################################## +test_that("3. dat2", { + + westB <- 0 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 0:359 + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(181:360, 1:180) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 90:(90 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(271:360, 1:270) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + 300:(300 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(121:360, 1:120) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -270:(-270+359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(271:360, 1:270) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -10:(-10 + 359) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(171:360, 1:170) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat2, lon = lon2, westB = westB)$data), + dim(dat2) + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$lon, + -1:358 + ) + expect_equal( + ShiftLon(dat2, lon = lon2, westB = westB)$data[,1], + c(180:360, 1:179) + ) + +}) + +############################################## +test_that("4. dat3", { + + westB <- -180 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(257:512, 1:256) + ) + #----------------- + westB <- 90 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(129:512, 1:128) + ) + #----------------- + westB <- 300 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(300.2344, 300.2344 + 360, length.out = 513)[-513], + tolerance = 0.001 + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(428:512, 1:427) + ) + #----------------- + westB <- -270 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(westB, westB + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(129:512, 1:128) + ) + #----------------- + westB <- -10 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(-9.843750, -9.843750 + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(499:512, 1:498) + ) + #----------------- + westB <- -0.5 + expect_equal( + dim(ShiftLon(dat3, lon = lon3, westB = westB)$data), + dim(dat3) + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$lon, + seq(-0.703125, -0.703125 + 360, length.out = 513)[-513] + ) + expect_equal( + ShiftLon(dat3, lon = lon3, westB = westB)$data[,1, 2], + c(512, 1:511) + ) + +}) -- GitLab From 29ad5aa0e92fdbbebaaaefa4c28e9772bdfbcc79 Mon Sep 17 00:00:00 2001 From: aho Date: Fri, 5 Aug 2022 12:34:01 +0200 Subject: [PATCH 04/25] Format update --- DESCRIPTION | 2 +- NAMESPACE | 1 + man/AnoAgree.Rd | 1 - man/ArrayToList.Rd | 1 - man/Climdex.Rd | 13 ++++++++++--- man/CombineIndices.Rd | 1 - man/DTRIndicator.Rd | 13 ++++++++++--- man/DTRRef.Rd | 13 ++++++++++--- man/DailyAno.Rd | 4 +--- man/Extremes.Rd | 16 ++++++++++++---- man/Lon2Index.Rd | 1 - man/SeasonSelect.Rd | 1 - man/SelBox.Rd | 1 - man/Subset.Rd | 1 - man/Threshold.Rd | 12 +++++++++--- man/WaveDuration.Rd | 13 ++++++++++--- man/WeightedMean.Rd | 12 +++++++++--- 17 files changed, 73 insertions(+), 33 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index cbcbdb2..bd980e3 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -27,7 +27,7 @@ License: Apache License 2.0 URL: https://earth.bsc.es/gitlab/es/ClimProjDiags BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues Encoding: UTF-8 -RoxygenNote: 5.0.0 +RoxygenNote: 7.2.0 Suggests: knitr, testthat, diff --git a/NAMESPACE b/NAMESPACE index 2a5d243..40ba733 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -11,6 +11,7 @@ export(Extremes) export(Lon2Index) export(SeasonSelect) export(SelBox) +export(ShiftLon) export(Subset) export(Threshold) export(WaveDuration) diff --git a/man/AnoAgree.Rd b/man/AnoAgree.Rd index c929352..63c1450 100644 --- a/man/AnoAgree.Rd +++ b/man/AnoAgree.Rd @@ -34,4 +34,3 @@ a <- rnorm(6) agree <- AnoAgree(ano = a, membersdim = 1, na.rm = TRUE, ncores = NULL) print(agree) } - diff --git a/man/ArrayToList.Rd b/man/ArrayToList.Rd index 9a7d181..9951ae1 100644 --- a/man/ArrayToList.Rd +++ b/man/ArrayToList.Rd @@ -38,4 +38,3 @@ str(datalist) \seealso{ \link[s2dv]{PlotLayout} } - diff --git a/man/Climdex.Rd b/man/Climdex.Rd index 669570a..372712c 100644 --- a/man/Climdex.Rd +++ b/man/Climdex.Rd @@ -4,8 +4,16 @@ \alias{Climdex} \title{Wrapper for applying the climdex routine ETCCDI climate change indices to n-dimensional arrays.} \usage{ -Climdex(data, metric, threshold = NULL, base.range = NULL, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Climdex( + data, + metric, + threshold = NULL, + base.range = NULL, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature, wind speed or precipitation amount.} @@ -68,4 +76,3 @@ David Bronaugh for the Pacific Climate Impacts Consortium (2015). climdex.pcic: PCIC Implementation of Climdex Routines. R package version 1.1-6. http://CRAN.R-project.org/package=climdex.pcic } - diff --git a/man/CombineIndices.Rd b/man/CombineIndices.Rd index 95abc5e..6d032cd 100644 --- a/man/CombineIndices.Rd +++ b/man/CombineIndices.Rd @@ -33,4 +33,3 @@ dim(b) <- c(lon = 2, lat = 3, mod = 4) comb_ind <- CombineIndices(indices = list(a, b), weights = c(2, 1), operation = "add") print(comb_ind) } - diff --git a/man/DTRIndicator.Rd b/man/DTRIndicator.Rd index 33e7f50..e8be18b 100644 --- a/man/DTRIndicator.Rd +++ b/man/DTRIndicator.Rd @@ -4,8 +4,16 @@ \alias{DTRIndicator} \title{Diurnal temperature range indicator (DTR) of multidimensional arrays} \usage{ -DTRIndicator(tmax, tmin, ref, by.seasons = TRUE, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +DTRIndicator( + tmax, + tmin, + ref, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -60,4 +68,3 @@ aa <- DTRIndicator(tmax, tmin, ref = a, by.seasons = FALSE, ncores = NULL) str(aa) dim(aa$indicator) } - diff --git a/man/DTRRef.Rd b/man/DTRRef.Rd index 6e32e31..daea8c2 100644 --- a/man/DTRRef.Rd +++ b/man/DTRRef.Rd @@ -4,8 +4,16 @@ \alias{DTRRef} \title{Diurnal temperature range of multidimensional arrays} \usage{ -DTRRef(tmax, tmin, by.seasons = TRUE, dates = NULL, timedim = NULL, - calendar = NULL, na.rm = TRUE, ncores = NULL) +DTRRef( + tmax, + tmin, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + na.rm = TRUE, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -68,4 +76,3 @@ dim(tmin) <- c(2, 3, 365) a <- DTRRef(tmax, tmin, by.seasons = FALSE, dates = time, timedim = 3, ncores = NULL) str(a) } - diff --git a/man/DailyAno.Rd b/man/DailyAno.Rd index d4033f7..3bf59f6 100644 --- a/man/DailyAno.Rd +++ b/man/DailyAno.Rd @@ -4,8 +4,7 @@ \alias{DailyAno} \title{Daily anomalies} \usage{ -DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, - na.rm = TRUE) +DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, na.rm = TRUE) } \arguments{ \item{data}{A vector of daily data.} @@ -31,4 +30,3 @@ jdays <- c(rep(1, 5), rep(2, 5)) daily_anomaly <- DailyAno(data = data, jdays = jdays, na.rm = TRUE) print(daily_anomaly) } - diff --git a/man/Extremes.Rd b/man/Extremes.Rd index 0939fa6..ddc57e8 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -4,9 +4,18 @@ \alias{Extremes} \title{Sum of spell lengths exceeding daily threshold for n-dimensional arrays} \usage{ -Extremes(data, threshold, op = ">", min.length = 6, - spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Extremes( + data, + threshold, + op = ">", + min.length = 6, + spells.can.span.years = TRUE, + max.missing.days = 5, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A n-dimensional array containing daily data.} @@ -58,4 +67,3 @@ a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can. max.missing.days = 5, ncores = NULL) str(a) } - diff --git a/man/Lon2Index.Rd b/man/Lon2Index.Rd index b2012a6..dbae6dc 100644 --- a/man/Lon2Index.Rd +++ b/man/Lon2Index.Rd @@ -33,4 +33,3 @@ pos <- Lon2Index(lon, lonmin = 340, lonmax = 20) lon[pos] } - diff --git a/man/SeasonSelect.Rd b/man/SeasonSelect.Rd index a71fd24..33066ee 100644 --- a/man/SeasonSelect.Rd +++ b/man/SeasonSelect.Rd @@ -45,4 +45,3 @@ attr(data, 'Variables')$common[[2]]$dim[[3]]$vals <- time a <- SeasonSelect(data = data, season = 'JJA') str(a) } - diff --git a/man/SelBox.Rd b/man/SelBox.Rd index 07e92b2..38e3547 100644 --- a/man/SelBox.Rd +++ b/man/SelBox.Rd @@ -43,4 +43,3 @@ a <- SelBox(data = data, lon = lon, lat = lat, region = c(2, 20, 1, 5), londim = 1, latdim = 2, mask = NULL) str(a) } - diff --git a/man/Subset.Rd b/man/Subset.Rd index 634cfe2..5d24310 100644 --- a/man/Subset.Rd +++ b/man/Subset.Rd @@ -31,4 +31,3 @@ data_subset <- Subset(data, c('time', 'model'), dim(data_subset) } - diff --git a/man/Threshold.Rd b/man/Threshold.Rd index cec12e6..a0fa10a 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -4,8 +4,15 @@ \alias{Threshold} \title{Daily thresholds based on quantiles for n-dimensional arrays} \usage{ -Threshold(data, dates = NULL, calendar = NULL, base.range = NULL, - qtiles = 0.9, ncores = NULL, na.rm = FALSE) +Threshold( + data, + dates = NULL, + calendar = NULL, + base.range = NULL, + qtiles = 0.9, + ncores = NULL, + na.rm = FALSE +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily data.} @@ -42,4 +49,3 @@ attr(data, 'Variables')$dat1$time <- time a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) } - diff --git a/man/WaveDuration.Rd b/man/WaveDuration.Rd index b068252..a43c6b9 100644 --- a/man/WaveDuration.Rd +++ b/man/WaveDuration.Rd @@ -4,8 +4,16 @@ \alias{WaveDuration} \title{Heat and cold waves duration for n-dimensional arrays} \usage{ -WaveDuration(data, threshold, op = ">", spell.length = 6, - by.seasons = TRUE, dates = NULL, calendar = NULL, ncores = NULL) +WaveDuration( + data, + threshold, + op = ">", + spell.length = 6, + by.seasons = TRUE, + dates = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature} @@ -48,4 +56,3 @@ threshold <- rep(40, 31) a <- WaveDuration(data, threshold, op = ">", spell.length = 6, by.seasons = TRUE, ncores = NULL) str(a) } - diff --git a/man/WeightedMean.Rd b/man/WeightedMean.Rd index 23f31c6..ac3411c 100644 --- a/man/WeightedMean.Rd +++ b/man/WeightedMean.Rd @@ -4,8 +4,15 @@ \alias{WeightedMean} \title{Calculate spatial area-weighted average of multidimensional arrays} \usage{ -WeightedMean(data, lon, lat, region = NULL, mask = NULL, londim = NULL, - latdim = NULL) +WeightedMean( + data, + lon, + lat, + region = NULL, + mask = NULL, + londim = NULL, + latdim = NULL +) } \arguments{ \item{data}{An array with minimum two dimensions of latitude and longitude.} @@ -60,4 +67,3 @@ a <- WeightedMean(data = data, lon = lon, lat = lat, region = NULL, mask = NULL, londim = 1, latdim = 2) str(a) } - -- GitLab From c6ac245a508c69a0e2db4a638ae1f1477f4c4c2a Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 25 Aug 2022 11:36:42 +0200 Subject: [PATCH 05/25] Bugfix when the output dimension length is 1. --- R/Subset.R | 64 ++++++-- tests/testthat/test-Subset.R | 286 +++++++++++++++++++++++++++++++++++ 2 files changed, 339 insertions(+), 11 deletions(-) create mode 100644 tests/testthat/test-Subset.R diff --git a/R/Subset.R b/R/Subset.R index bad89e7..397eaa5 100644 --- a/R/Subset.R +++ b/R/Subset.R @@ -1,22 +1,44 @@ #'Subset a Data Array #' -#'This function allows to subset (i.e. slice, take a chunk of) an array, in a similar way as done in the function \code{take()} in the package plyr. There are two main inprovements:\cr\cr The input array can have dimension names, either in \code{names(dim(x))} or in the attribute 'dimensions', and the dimensions to subset along can be specified via the parameter \code{along} either with integer indices or either by their name.\cr\cr There are additional ways to adjust which dimensions are dropped in the resulting array: either to drop all, to drop none, to drop only the ones that have been sliced or to drop only the ones that have not been sliced.\cr\cr If an array is provided without dimension names, dimension names taken from the parameter \code{dim_names} will be added to the array. -#'This function computes the threshold based on a quantile value for each day of the year of the daily data input. +#'This function allows to subset (i.e. slice, take a chunk of) an array, in a +#'similar way as done in the function \code{take()} in the package plyr. There +#'are two main snprovements:\cr\cr First, the input array can have dimension +#'names, either in \code{names(dim(x))} or in the attribute 'dimensions'. If +#'both exist, the attribute 'dimensions' is prioritized. The dimensions to +#'subset along can be specified via the parameter \code{along} either with +#'integer indices or either by their name.\cr\cr Second, there are additional +#'ways to adjust which dimensions are dropped in the resulting array: either to +#'drop all, to drop none, to drop only the ones that have been sliced or to drop +#'only the ones that have not been sliced.\cr\cr #' -#'@param x A multidimensional array to be sliced. It can have dimension names either in \code{names(dim(x))} or either in the attribute 'dimensions'. -#'@param along Vector with references to the dimensions to take the subset from: either integers or dimension names. -#'@param indices List of indices to take from each dimension specified in 'along'. If a single dimension is specified in 'along' the indices can be directly provided as a single integer or as a vector. -#'@param drop Whether to drop all the dimensions of length 1 in the resulting array, none, only those that are specified in 'along', or only those that are not specified in 'along'. The possible values are, respectively: 'all' or TRUE, 'none' or FALSE, 'selected', and 'non-selected'. +#'@param x A named multidimensional array to be sliced. It can have dimension +#' names either in \code{names(dim(x))} or in the attribute 'dimensions'. +#'@param along A vector with references to the dimensions to take the subset +#' from: either integers or dimension names. +#'@param indices A list of indices to take from each dimension specified in +#' 'along'. If a single dimension is specified in 'along', it can be directly +#' provided as an integer or a vector. +#'@param drop Whether to drop all the dimensions of length 1 in the resulting +#' array, none, only those that are specified in 'along', or only those that +#' are not specified in 'along'. The possible values are: 'all' or TRUE, 'none' +#' or FALSE, 'selected', and 'non-selected'. The default value is FALSE. #' -#'@return An array with similar dimensions as the \code{x} input, but with trimmed or dropped dimensions. +#'@return An array with similar dimensions as the \code{x} input, but with +#' trimmed or dropped dimensions. #' #'@examples -#'##Example synthetic data: +#'#Example synthetic data: +#'# Dimension has name already #'data <- 1:(2 * 3 * 372 * 1) #'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) #'data_subset <- Subset(data, c('time', 'model'), #' list(1:10, TRUE), drop = 'selected') #'dim(data_subset) +#'# Use attributes 'dimensions' +#'data <- array(1:(2 * 3 * 372 * 1), dim = c(2, 3, 372, 1)) +#'attributes(data)[['dimensions']] <- c('lat', 'lon', 'time', 'model') +#'data_subset <- Subset(data, c('lon', 'lat'), list(1, 1), drop = TRUE) +#'dim(data_subset) #' #'@export Subset <- function(x, along, indices, drop = FALSE) { @@ -29,7 +51,10 @@ Subset <- function(x, along, indices, drop = FALSE) { dim_names <- attr(x, 'dimensions') if (!is.character(dim_names)) { dim_names <- names(dim(x)) + } else { + names(dim(x)) <- dim_names } + if (!is.character(dim_names)) { if (any(sapply(along, is.character))) { stop("The input array 'x' doesn't have labels for the dimensions but the parameter 'along' contains dimension names.") @@ -90,25 +115,42 @@ Subset <- function(x, along, indices, drop = FALSE) { if (length(dim_names_to_remove) > 0) { dim_names <- dim_names[-dim_names_to_remove] } + # If there is one dim left, subset won't have dimension (but it should have one). Add it back + if (is.null(dim(subset))) { + if (!identical(dim_names, character(0))) { + # If there is one dim left, subset won't have dimension (but it should + # have one). Add it back. + dim(subset) <- dim(x)[dim_names] + } else { # a number left + dim(subset) <- 1 + } + } } # Amend the final dimensions and put dimnames and attributes metadata <- attributes(x) metadata[['dim']] <- dim(subset) if (length(dims_to_drop) > 0) { - metadata[['dim']] <- metadata[['dim']][-dims_to_drop] + if (length(dims_to_drop) == length(metadata[['dim']])) { # a number left + metadata[['dim']] <- 1 + } else { + metadata[['dim']] <- metadata[['dim']][-dims_to_drop] + } if (is.character(dim_names)) { - names(metadata[['dim']]) <- dim_names[-dims_to_drop] + if (!identical(dim_names[-dims_to_drop], character(0))) { + names(metadata[['dim']]) <- dim_names[-dims_to_drop] + } if ('dimensions' %in% names(attributes(x))) { metadata[['dimensions']] <- dim_names[-dims_to_drop] } } - } else if (is.character(dim_names)) { + } else if (is.character(dim_names) & !identical(dim_names, character(0))) { names(metadata[['dim']]) <- dim_names if ('dimensions' %in% names(attributes(x))) { metadata[['dimensions']] <- dim_names } } + attributes(subset) <- metadata subset } diff --git a/tests/testthat/test-Subset.R b/tests/testthat/test-Subset.R new file mode 100644 index 0000000..42f609a --- /dev/null +++ b/tests/testthat/test-Subset.R @@ -0,0 +1,286 @@ +context("Subset tests") + +dat1 <- array(1:20, dim = c(dat = 1, lat = 2, lon = 10)) + +dat2 <- dat1 +attributes(dat2)[['units']] <- 'K' + +dat3 <- array(1:20, dim = c(1, 2, 10)) +attributes(dat3)[['dimensions']] <- c('dat', 'lat', 'lon') + +dat4 <- dat1 +attributes(dat4)[['dimensions']] <- c('dataset', 'latitude', 'longitude') + +#============================================== + +test_that("1. Sanity checks", { + +expect_error( +Subset(x = 1:4), +"Input array 'x' must be a numeric array." +) +expect_error( +Subset(x = array(1:20, dim = c(1, 2, 10)), along = 'a'), +"The input array 'x' doesn't have labels for the dimensions but the parameter 'along' contains dimension names." +) +expect_error( +Subset(x = dat1, along = T), +"All provided dimension indices in 'along' must be integers or character strings." +) +expect_error( +Subset(x = dat1, along = c('dat', 'dat')), +"The parameter 'along' must not contain repeated dimension names." +) +expect_error( +Subset(x = dat1, along = 'dataset'), +"Could not match all dimension names in 'indices' with dimension names in input array 'x'." +) +expect_error( +Subset(x = dat1, 'dat', 1, drop = 'yes'), +"Parameter 'drop' must be one of TRUE, FALSE, 'all', 'selected', 'non-selected', 'none'." +) + +}) + +test_that("2. dat1", { +# drop: dat +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = FALSE)), +dim(dat1) +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = FALSE)), +as.vector(dat1) +) +expect_equal( +attributes(Subset(dat1, 'dat', 1, drop = FALSE)), +attributes(dat1) +) +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = TRUE)), +dim(dat1)[-1] +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = TRUE)), +as.vector(dat1) +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = TRUE)), +as.vector(Subset(dat1, 1, 1, drop = TRUE)) +) +expect_equal( +dim(Subset(dat1, 'dat', 1, drop = 'selected')), +dim(dat1)[-1] +) +expect_equal( +as.vector(Subset(dat1, 'dat', 1, drop = 'selected')), +as.vector(dat1) +) +# drop: lat +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = FALSE)), +c(dat = 1, lat = 1, lon = 10) +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = FALSE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = TRUE)), +dim(dat1)[3] +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = TRUE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +Subset(dat1, 'lat', 1, drop = TRUE), +Subset(dat1, 2, 1, drop = TRUE) +) +expect_equal( +dim(Subset(dat1, 'lat', 1, drop = 'selected')), +dim(dat1)[c(1, 3)] +) +expect_equal( +as.vector(Subset(dat1, 'lat', 1, drop = 'selected')), +as.vector(dat1[, 1, ]) +) +# drop: lat, lon +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = FALSE)), +3 +) +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE)), +3 +) +expect_equal( +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +Subset(dat1, c(2, 3), list(1, 2), drop = TRUE) +) +expect_equal( +dim(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = 'selected')), +c(dat = 1) +) +expect_equal( +as.vector(Subset(dat1, c('lat', 'lon'), list(1, 2), drop = 'selected')), +3 +) +# drop: dat, lat, lon +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +3 +) +expect_equal( +Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE), +Subset(dat1, c(1:3), list(1, 1, 2), drop = FALSE) +) +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +3 +) +expect_equal( +Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE), +Subset(dat1, c(1:3), list(1, 1, 2), drop = TRUE) +) +expect_equal( +class(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +'array' +) +expect_equal( +dim(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +1 +) +expect_equal( +as.vector(Subset(dat1, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +3 +) + +}) + +test_that("3. dat2", { +# Same as dat1 but with attributes + +# drop: lat +expect_equal( +dim(Subset(dat2, 'lat', 1, drop = TRUE)), +dim(dat1)[3] +) +expect_equal( +as.vector(Subset(dat2, 'lat', 1, drop = TRUE)), +as.vector(dat1[, 1, ]) +) +expect_equal( +attributes(Subset(dat2, 'lat', 1, drop = TRUE)), +list(dim = c(lon = 10), units = 'K') +) + +# drop: dat, lat, lon +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +c(dat = 1, lat = 1, lon = 1) +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = FALSE)), +list(dim = c(dat = 1, lat = 1, lon = 1), units = 'K') +) +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +1 +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = TRUE)), +list(dim = 1, units = 'K') +) +expect_equal( +class(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +'array' +) +expect_equal( +dim(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +1 +) +expect_equal( +as.vector(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +3 +) +expect_equal( +attributes(Subset(dat2, c('dat', 'lat', 'lon'), list(1, 1, 2), drop = 'selected')), +list(dim = 1, units = 'K') +) + +}) + +test_that("4. dat3", { +# drop: dat +expect_equal( +Subset(dat1, 'dat', 1, drop = FALSE), +Subset(dat3, 'dat', 1, drop = FALSE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat3, 'dat', 1, drop = FALSE)), +list(dim = c(dat = 1, lat = 2, lon = 10), dimensions = c('dat', 'lat', 'lon')) +) + +# drop: lat, lon +expect_equal( +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +Subset(dat3, c('lat', 'lon'), list(1, 2), drop = TRUE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat3, c('lat', 'lon'), list(1, 2), drop = TRUE)), +list(dim = 1, dimensions = c('dat', 'lat', 'lon')) +) + +}) + +test_that("5. dat4", { + +# drop: lat +expect_equal( +Subset(dat4, 'latitude', 1, drop = FALSE), +Subset(dat1, 'lat', 1, drop = FALSE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat4, 'latitude', 1, drop = FALSE)), +list(dim = c(dataset = 1, latitude = 1, longitude = 10), dimensions = c('dataset', 'latitude', 'longitude')) +) +# drop: lat, lon +expect_equal( +Subset(dat4, c('latitude', 'longitude'), list(1, 2), drop = TRUE), +Subset(dat1, c('lat', 'lon'), list(1, 2), drop = TRUE), +check.attributes = F +) +expect_equal( +attributes(Subset(dat4, c('latitude', 'longitude'), list(1, 2), drop = TRUE)), +list(dim = 1, dimensions = c('dataset', 'latitude', 'longitude')) +) + +}) -- GitLab From 702dfb02cc4e3313f142880f6ace341503d8027a Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 25 Aug 2022 11:38:40 +0200 Subject: [PATCH 06/25] update roxygen2 format --- DESCRIPTION | 2 +- man/AnoAgree.Rd | 1 - man/ArrayToList.Rd | 1 - man/Climdex.Rd | 13 ++++++++++--- man/CombineIndices.Rd | 1 - man/DTRIndicator.Rd | 13 ++++++++++--- man/DTRRef.Rd | 13 ++++++++++--- man/DailyAno.Rd | 4 +--- man/Extremes.Rd | 16 ++++++++++++---- man/Lon2Index.Rd | 1 - man/SeasonSelect.Rd | 1 - man/SelBox.Rd | 1 - man/Subset.Rd | 39 ++++++++++++++++++++++++++++++--------- man/Threshold.Rd | 12 +++++++++--- man/WaveDuration.Rd | 13 ++++++++++--- man/WeightedMean.Rd | 12 +++++++++--- 16 files changed, 102 insertions(+), 41 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index cbcbdb2..bd980e3 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -27,7 +27,7 @@ License: Apache License 2.0 URL: https://earth.bsc.es/gitlab/es/ClimProjDiags BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues Encoding: UTF-8 -RoxygenNote: 5.0.0 +RoxygenNote: 7.2.0 Suggests: knitr, testthat, diff --git a/man/AnoAgree.Rd b/man/AnoAgree.Rd index c929352..63c1450 100644 --- a/man/AnoAgree.Rd +++ b/man/AnoAgree.Rd @@ -34,4 +34,3 @@ a <- rnorm(6) agree <- AnoAgree(ano = a, membersdim = 1, na.rm = TRUE, ncores = NULL) print(agree) } - diff --git a/man/ArrayToList.Rd b/man/ArrayToList.Rd index 9a7d181..9951ae1 100644 --- a/man/ArrayToList.Rd +++ b/man/ArrayToList.Rd @@ -38,4 +38,3 @@ str(datalist) \seealso{ \link[s2dv]{PlotLayout} } - diff --git a/man/Climdex.Rd b/man/Climdex.Rd index 669570a..372712c 100644 --- a/man/Climdex.Rd +++ b/man/Climdex.Rd @@ -4,8 +4,16 @@ \alias{Climdex} \title{Wrapper for applying the climdex routine ETCCDI climate change indices to n-dimensional arrays.} \usage{ -Climdex(data, metric, threshold = NULL, base.range = NULL, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Climdex( + data, + metric, + threshold = NULL, + base.range = NULL, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature, wind speed or precipitation amount.} @@ -68,4 +76,3 @@ David Bronaugh for the Pacific Climate Impacts Consortium (2015). climdex.pcic: PCIC Implementation of Climdex Routines. R package version 1.1-6. http://CRAN.R-project.org/package=climdex.pcic } - diff --git a/man/CombineIndices.Rd b/man/CombineIndices.Rd index 95abc5e..6d032cd 100644 --- a/man/CombineIndices.Rd +++ b/man/CombineIndices.Rd @@ -33,4 +33,3 @@ dim(b) <- c(lon = 2, lat = 3, mod = 4) comb_ind <- CombineIndices(indices = list(a, b), weights = c(2, 1), operation = "add") print(comb_ind) } - diff --git a/man/DTRIndicator.Rd b/man/DTRIndicator.Rd index 33e7f50..e8be18b 100644 --- a/man/DTRIndicator.Rd +++ b/man/DTRIndicator.Rd @@ -4,8 +4,16 @@ \alias{DTRIndicator} \title{Diurnal temperature range indicator (DTR) of multidimensional arrays} \usage{ -DTRIndicator(tmax, tmin, ref, by.seasons = TRUE, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +DTRIndicator( + tmax, + tmin, + ref, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -60,4 +68,3 @@ aa <- DTRIndicator(tmax, tmin, ref = a, by.seasons = FALSE, ncores = NULL) str(aa) dim(aa$indicator) } - diff --git a/man/DTRRef.Rd b/man/DTRRef.Rd index 6e32e31..daea8c2 100644 --- a/man/DTRRef.Rd +++ b/man/DTRRef.Rd @@ -4,8 +4,16 @@ \alias{DTRRef} \title{Diurnal temperature range of multidimensional arrays} \usage{ -DTRRef(tmax, tmin, by.seasons = TRUE, dates = NULL, timedim = NULL, - calendar = NULL, na.rm = TRUE, ncores = NULL) +DTRRef( + tmax, + tmin, + by.seasons = TRUE, + dates = NULL, + timedim = NULL, + calendar = NULL, + na.rm = TRUE, + ncores = NULL +) } \arguments{ \item{tmax}{A numeric multidimensional array containing daily maximum temperature.} @@ -68,4 +76,3 @@ dim(tmin) <- c(2, 3, 365) a <- DTRRef(tmax, tmin, by.seasons = FALSE, dates = time, timedim = 3, ncores = NULL) str(a) } - diff --git a/man/DailyAno.Rd b/man/DailyAno.Rd index d4033f7..3bf59f6 100644 --- a/man/DailyAno.Rd +++ b/man/DailyAno.Rd @@ -4,8 +4,7 @@ \alias{DailyAno} \title{Daily anomalies} \usage{ -DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, - na.rm = TRUE) +DailyAno(data, jdays = NULL, dates = NULL, calendar = NULL, na.rm = TRUE) } \arguments{ \item{data}{A vector of daily data.} @@ -31,4 +30,3 @@ jdays <- c(rep(1, 5), rep(2, 5)) daily_anomaly <- DailyAno(data = data, jdays = jdays, na.rm = TRUE) print(daily_anomaly) } - diff --git a/man/Extremes.Rd b/man/Extremes.Rd index 0939fa6..ddc57e8 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -4,9 +4,18 @@ \alias{Extremes} \title{Sum of spell lengths exceeding daily threshold for n-dimensional arrays} \usage{ -Extremes(data, threshold, op = ">", min.length = 6, - spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, - timedim = NULL, calendar = NULL, ncores = NULL) +Extremes( + data, + threshold, + op = ">", + min.length = 6, + spells.can.span.years = TRUE, + max.missing.days = 5, + dates = NULL, + timedim = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A n-dimensional array containing daily data.} @@ -58,4 +67,3 @@ a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can. max.missing.days = 5, ncores = NULL) str(a) } - diff --git a/man/Lon2Index.Rd b/man/Lon2Index.Rd index b2012a6..dbae6dc 100644 --- a/man/Lon2Index.Rd +++ b/man/Lon2Index.Rd @@ -33,4 +33,3 @@ pos <- Lon2Index(lon, lonmin = 340, lonmax = 20) lon[pos] } - diff --git a/man/SeasonSelect.Rd b/man/SeasonSelect.Rd index a71fd24..33066ee 100644 --- a/man/SeasonSelect.Rd +++ b/man/SeasonSelect.Rd @@ -45,4 +45,3 @@ attr(data, 'Variables')$common[[2]]$dim[[3]]$vals <- time a <- SeasonSelect(data = data, season = 'JJA') str(a) } - diff --git a/man/SelBox.Rd b/man/SelBox.Rd index 07e92b2..38e3547 100644 --- a/man/SelBox.Rd +++ b/man/SelBox.Rd @@ -43,4 +43,3 @@ a <- SelBox(data = data, lon = lon, lat = lat, region = c(2, 20, 1, 5), londim = 1, latdim = 2, mask = NULL) str(a) } - diff --git a/man/Subset.Rd b/man/Subset.Rd index 634cfe2..f50c5da 100644 --- a/man/Subset.Rd +++ b/man/Subset.Rd @@ -7,28 +7,49 @@ Subset(x, along, indices, drop = FALSE) } \arguments{ -\item{x}{A multidimensional array to be sliced. It can have dimension names either in \code{names(dim(x))} or either in the attribute 'dimensions'.} +\item{x}{A named multidimensional array to be sliced. It can have dimension +names either in \code{names(dim(x))} or in the attribute 'dimensions'.} -\item{along}{Vector with references to the dimensions to take the subset from: either integers or dimension names.} +\item{along}{A vector with references to the dimensions to take the subset +from: either integers or dimension names.} -\item{indices}{List of indices to take from each dimension specified in 'along'. If a single dimension is specified in 'along' the indices can be directly provided as a single integer or as a vector.} +\item{indices}{A list of indices to take from each dimension specified in +'along'. If a single dimension is specified in 'along', it can be directly +provided as an integer or a vector.} -\item{drop}{Whether to drop all the dimensions of length 1 in the resulting array, none, only those that are specified in 'along', or only those that are not specified in 'along'. The possible values are, respectively: 'all' or TRUE, 'none' or FALSE, 'selected', and 'non-selected'.} +\item{drop}{Whether to drop all the dimensions of length 1 in the resulting +array, none, only those that are specified in 'along', or only those that +are not specified in 'along'. The possible values are: 'all' or TRUE, 'none' +or FALSE, 'selected', and 'non-selected'. The default value is FALSE.} } \value{ -An array with similar dimensions as the \code{x} input, but with trimmed or dropped dimensions. +An array with similar dimensions as the \code{x} input, but with + trimmed or dropped dimensions. } \description{ -This function allows to subset (i.e. slice, take a chunk of) an array, in a similar way as done in the function \code{take()} in the package plyr. There are two main inprovements:\cr\cr The input array can have dimension names, either in \code{names(dim(x))} or in the attribute 'dimensions', and the dimensions to subset along can be specified via the parameter \code{along} either with integer indices or either by their name.\cr\cr There are additional ways to adjust which dimensions are dropped in the resulting array: either to drop all, to drop none, to drop only the ones that have been sliced or to drop only the ones that have not been sliced.\cr\cr If an array is provided without dimension names, dimension names taken from the parameter \code{dim_names} will be added to the array. -This function computes the threshold based on a quantile value for each day of the year of the daily data input. +This function allows to subset (i.e. slice, take a chunk of) an array, in a +similar way as done in the function \code{take()} in the package plyr. There +are two main snprovements:\cr\cr First, the input array can have dimension +names, either in \code{names(dim(x))} or in the attribute 'dimensions'. If +both exist, the attribute 'dimensions' is prioritized. The dimensions to +subset along can be specified via the parameter \code{along} either with +integer indices or either by their name.\cr\cr Second, there are additional +ways to adjust which dimensions are dropped in the resulting array: either to +drop all, to drop none, to drop only the ones that have been sliced or to drop +only the ones that have not been sliced.\cr\cr } \examples{ -##Example synthetic data: +#Example synthetic data: +# Dimension has name already data <- 1:(2 * 3 * 372 * 1) dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) data_subset <- Subset(data, c('time', 'model'), list(1:10, TRUE), drop = 'selected') dim(data_subset) +# Use attributes 'dimensions' +data <- array(1:(2 * 3 * 372 * 1), dim = c(2, 3, 372, 1)) +attributes(data)[['dimensions']] <- c('lat', 'lon', 'time', 'model') +data_subset <- Subset(data, c('lon', 'lat'), list(1, 1), drop = TRUE) +dim(data_subset) } - diff --git a/man/Threshold.Rd b/man/Threshold.Rd index cec12e6..a0fa10a 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -4,8 +4,15 @@ \alias{Threshold} \title{Daily thresholds based on quantiles for n-dimensional arrays} \usage{ -Threshold(data, dates = NULL, calendar = NULL, base.range = NULL, - qtiles = 0.9, ncores = NULL, na.rm = FALSE) +Threshold( + data, + dates = NULL, + calendar = NULL, + base.range = NULL, + qtiles = 0.9, + ncores = NULL, + na.rm = FALSE +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily data.} @@ -42,4 +49,3 @@ attr(data, 'Variables')$dat1$time <- time a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) } - diff --git a/man/WaveDuration.Rd b/man/WaveDuration.Rd index b068252..a43c6b9 100644 --- a/man/WaveDuration.Rd +++ b/man/WaveDuration.Rd @@ -4,8 +4,16 @@ \alias{WaveDuration} \title{Heat and cold waves duration for n-dimensional arrays} \usage{ -WaveDuration(data, threshold, op = ">", spell.length = 6, - by.seasons = TRUE, dates = NULL, calendar = NULL, ncores = NULL) +WaveDuration( + data, + threshold, + op = ">", + spell.length = 6, + by.seasons = TRUE, + dates = NULL, + calendar = NULL, + ncores = NULL +) } \arguments{ \item{data}{A numeric n-dimensional array containing daily maximum or minimum temperature} @@ -48,4 +56,3 @@ threshold <- rep(40, 31) a <- WaveDuration(data, threshold, op = ">", spell.length = 6, by.seasons = TRUE, ncores = NULL) str(a) } - diff --git a/man/WeightedMean.Rd b/man/WeightedMean.Rd index 23f31c6..ac3411c 100644 --- a/man/WeightedMean.Rd +++ b/man/WeightedMean.Rd @@ -4,8 +4,15 @@ \alias{WeightedMean} \title{Calculate spatial area-weighted average of multidimensional arrays} \usage{ -WeightedMean(data, lon, lat, region = NULL, mask = NULL, londim = NULL, - latdim = NULL) +WeightedMean( + data, + lon, + lat, + region = NULL, + mask = NULL, + londim = NULL, + latdim = NULL +) } \arguments{ \item{data}{An array with minimum two dimensions of latitude and longitude.} @@ -60,4 +67,3 @@ a <- WeightedMean(data = data, lon = lon, lat = lat, region = NULL, mask = NULL, londim = 1, latdim = 2) str(a) } - -- GitLab From 6471370a73f11c1477334c5c11719bdbc9c58ac5 Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 25 Aug 2022 16:06:40 +0200 Subject: [PATCH 07/25] Add checks to ensure 'indices' is correct --- R/Subset.R | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/R/Subset.R b/R/Subset.R index 397eaa5..c077c2a 100644 --- a/R/Subset.R +++ b/R/Subset.R @@ -79,7 +79,14 @@ Subset <- function(x, along, indices, drop = FALSE) { # Check indices if (!is.list(indices)) { - indices <- list(indices) + if (length(along) == 1) { + indices <- list(indices) + } else { + stop("Parameter 'indices' should be a list.") + } + } + if (length(indices) != length(along)) { + stop("Parameter 'along' and 'indices' should have the same length.") } # Check parameter drop -- GitLab From 60400aee13ac302d9e6ed637cf211cdebcb9e955 Mon Sep 17 00:00:00 2001 From: aho Date: Tue, 6 Sep 2022 11:12:44 +0200 Subject: [PATCH 08/25] Add .gitlab-ci.yml --- .gitlab-ci.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .gitlab-ci.yml diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..049daf7 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,10 @@ +stages: + - build +build: + stage: build + script: + - module load R/4.1.2-foss-2015a-bare + - module load CDO/1.9.8-foss-2015a + - R CMD build --resave-data . + - R CMD check --as-cran --no-manual --run-donttest ClimProjDiags_*.tar.gz + - R -e 'covr::package_coverage()' -- GitLab From 169c23070553cb90365a9942987e7c8cc76a35b8 Mon Sep 17 00:00:00 2001 From: aho Date: Tue, 6 Sep 2022 11:18:41 +0200 Subject: [PATCH 09/25] Correct syntax --- .Rbuildignore | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.Rbuildignore b/.Rbuildignore index 0954c2a..0c51051 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -1,6 +1,9 @@ -.git -.gitignore -.tar.gz -.pdf -./.nc +.*\.git$ +.*\.gitignore$ +.*\.tar.gz$ +.*\.pdf$ +./.nc$ +.*\.gitlab-ci.yml$ +# Ignore tests when submitting to CRAN +#^tests$ -- GitLab From 7baf6183524c42d0fd6f43b26c63fbf1381657df Mon Sep 17 00:00:00 2001 From: aho Date: Tue, 6 Sep 2022 12:40:53 +0200 Subject: [PATCH 10/25] Change example to dontrun due to s2dv function --- R/ShiftLon.R | 2 +- man/ShiftLon.Rd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/R/ShiftLon.R b/R/ShiftLon.R index aad4deb..88b034d 100644 --- a/R/ShiftLon.R +++ b/R/ShiftLon.R @@ -28,7 +28,7 @@ #'lat <- -90:90 ## lat does not change #'shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) #' -#' \donttest{ +#' \dontrun{ #'s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) #'s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) #' } diff --git a/man/ShiftLon.Rd b/man/ShiftLon.Rd index bf8e0de..7d7a9d4 100644 --- a/man/ShiftLon.Rd +++ b/man/ShiftLon.Rd @@ -40,7 +40,7 @@ lon <- array(data = 0:359, dim = c(lon = 360)) lat <- -90:90 ## lat does not change shifted <- ShiftLon(data = data, lon = lon, westB = -180, ncores = 1) - \donttest{ + \dontrun{ s2dv::PlotEquiMap(var = data, lon = lon, lat = lat, filled.continents = FALSE) s2dv::PlotEquiMap(var = shifted$data, lon = shifted$lon, lat = lat, filled.continents = FALSE) } -- GitLab From 496405213fdd80e4bc4b6f9235744a6c67649bcb Mon Sep 17 00:00:00 2001 From: aho Date: Tue, 6 Sep 2022 12:48:22 +0200 Subject: [PATCH 11/25] Remove .html files. They will be generated automatically when the package is built --- inst/doc/anomaly_agreement.html | 474 -------------------------------- inst/doc/diurnaltemp.html | 417 ---------------------------- inst/doc/extreme_indices.html | 348 ----------------------- inst/doc/heatcoldwaves.html | 381 ------------------------- 4 files changed, 1620 deletions(-) delete mode 100644 inst/doc/anomaly_agreement.html delete mode 100644 inst/doc/diurnaltemp.html delete mode 100644 inst/doc/extreme_indices.html delete mode 100644 inst/doc/heatcoldwaves.html diff --git a/inst/doc/anomaly_agreement.html b/inst/doc/anomaly_agreement.html deleted file mode 100644 index a23f4d8..0000000 --- a/inst/doc/anomaly_agreement.html +++ /dev/null @@ -1,474 +0,0 @@ - - - - - -Multi-model agreement - - - - - - - - - - - - - - - - - - -

Multi-model agreement

- -

Multi-model agreement performs a comparison of climate model projections anomalies. This vignette illustrates step-by-step how to perform a multi-model agreement assessment using ClimProjDiags package functionalities. The following example simulates the case of summer projections temperature anomalies for different models.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- -
    -
  • libssl-dev
  • -
  • libnecdf-dev
  • -
  • cdo
  • -
- -

The ClimProjDiags R package should be loaded by running the following lines in R, onces it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(ggplot2)
-
- -

2- Define the problem and the correspondent data and parameters

- -

The aim is to know, compared with a reference period:

- -
    -
  • what is the sign of the future anomaly for a certain climate variable, and
  • -
  • what is the percentage of models projecting this anomaly
  • -
- -

The ilustrative problem is to compare the monthly mean air temperature at 2 m in summer between four different models. The reference period used from the historical simulations to perform the anomalies is 1961 - 1990. While, the future scenario chosen is the rcp2.6 during the period 2006 - 2100. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

The parameters are defined by running the next lines in R:

- -
var <- 'tas'
-
-start_climatology <- '1961'
-end_climatology <- '1990'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is considered by subtracting randomly a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
multimodel_historical <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 360)) + rnorm(360)
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(360, sd = 5) * cos(2 * pi / 12 * (1 : 360)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(360, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_historical <- abind(multimodel_historical, gridpoint, along = 4)
-}
-multimodel_historical <- InsertDim(multimodel_historical, posdim = 5, lendim = 1)
-multimodel_historical <- aperm(multimodel_historical, c(4, 5, 1, 2, 3))
-names(dim(multimodel_historical)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(1961, 1, 15), ISOdate(1990, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added in order to make the data partially more realistic.

- -
multimodel_projection <- NULL
-for (k in 1 : 4) {
-  grid1 <- 293 - 10 * cos(2 * pi / 12 * (1 : 1140)) + rnorm(1140) + 
-                 (1 : 1140) * rnorm(1, mean = 1.5) / 1140
-  gridlon <- NULL
-  for (i in 1 : 12) {
-    gridlon <- cbind(gridlon, 
-                     grid1 + rnorm(1140, sd = 5) * cos(2 * pi / 12 * (1 : 1140)))
-  }
-  gridpoint <- NULL
-  for (j in 1 : 8) {
-    gridnew <- apply(gridlon, 2, function(x) {x - rnorm(1140, mean = j * 0.5, 
-                                                        sd = 3)})
-    gridpoint <- abind(gridpoint, gridnew, along = 3)
-  }
-  multimodel_projection <- abind(multimodel_projection, gridpoint, along = 4)
-}
-multimodel_projection <- InsertDim(multimodel_projection, posdim = 5, lendim = 1)
-multimodel_projection <- aperm(multimodel_projection, c(4, 5, 1, 2, 3))
-names(dim(multimodel_projection)) <- c("model", "var", "time", "lon", "lat")
-
-time <- seq(ISOdate(2006, 1, 15), ISOdate(2100, 12, 15), "month")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(multimodel_projection, 'Variables')$dat1$time <- time
-
- -

Now, two objects called multimodel_historical and multimodel_projection are available in the R environment. A check can be done to the loaded data by comparing with the next lines (due to the random functions the results may differ between each execution):

- -
> dim(multimodel_historical)
-model   var  time   lon   lat 
-    4     1   360    12    8 
-
-> summary(multimodel_historical)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-   251.2   281.7   287.9   287.8   294.0   321.6 
-
-> dim(multimodel_projection)
-model   var  time   lon   lat 
-    4     1  1140    12     8 
-
-> summary(multimodel_projection)
-   Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
-  254.8   282.8   288.8   288.8   294.9   322.6 
-
- -

3- Multi-model agreement based on seasonal analysis

- -

The multi-model agreement is a comparison based on seasonal anomalies, which are computed by following the next steps:

- -
    -
  • First, the desired season is selected for the reference data with the SeasonSelect function from ClimProjDiags package. In this case, the boreal summer is chosen by defining the parameter season = 'JJA'.
  • -
- -
summer_historical <- SeasonSelect(multimodel_historical, season = 'JJA')
-
- -

The new subsets are lists of two elements. The first element is the selected data and the second element contains the corresponding dates.

- -
> str(summer_historical)
-List of 2
- $ data : num [1:4, 1:90, 1:12, 1:8] 314 293 305 300 300 ...
- $ dates: chr [1:90] "1961-06-15 12:00:00" "1961-07-15 12:00:00" ...
-
-> dim(summer_historical$data)
-model  time   lon   lat 
-    4    90    12     8 
-
- -
    -
  • Now, the mean climatology value for each grid point and model can be computed by using the Mean1Dim() function belonging to the s2dverification package. The position of the temporal dimension should be specified in parameter posdim.
  • -
- -
climatology <- Mean1Dim(summer_historical$data, posdim = 2) 
-
- -
    -
  • Season() function from s2dverification package returns the mean annual time series for the selected season by defining the parameters of the initial month of the data (monini = 1), the first month of the season (moninf = 6) and the final month of the season (monsup = 8). The last two parameters, moninf and monsup, have their origin with respect to the first one monini.
  • -
- -
summer_projection <- Season(multimodel_projection, posdim = 3, 
-                            monini = 1, moninf = 6, monsup = 8)
-
- -

By running the next lines, it is possible to check the dimensions of the data:

- -
> dim(climatology)
-model   lon   lat 
-    4    12     8 
-> dim(summer_projection)
-model   var  time   lon   lat 
-    4     1   95    12     8 
-
- -
    -
  • A new dimension will be added by running the function InsertDim() in order to obtain the same dimensions as the projections data. InsertDim() repeats the original data the required number of times (21 years of future simulations) in the adequated position (the temporal dimension in the summer_projection data is in the third position).
  • -
- -
climatology <- InsertDim(InsertDim(climatology, posdim = 2, lendim = 1), 
-                         posdim = 3, lendim = 95)
-
- -
    -
  • The anomaly for each model is obtained by simply subtracting.
  • -
- -
anomaly <- summer_projection - climatology
-
- -

4- Multi-model agreement spatial visualitzation

- -

In order to obtain a spatial visualitzation, the temporal mean is computed. So, the time average anomalies for all models is saved in the average object. AnoAgree() function from ClimProjDiags package calculates the percentages of models which agrees with a positive or negative mean in each grid point.

- -
average <- Mean1Dim(anomaly,  which(names(dim(anomaly)) == "time"))
-agreement <- AnoAgree(average, membersdim = which(names(dim(average)) == "model"))
-
- -

So, in a case when four models are being compared, the agreement object can take the following values: 100 (all models agree), 75 (only one model has opposite sign), 50 (only two models agree with the mean signal) and 25 (one model agrees with the sign of the mean signal because its magnitude is higher that the other three models). These values will change with the number of compared models.

- -

The next question will be answered by the example plot: Where do 80 % or more models agree in the signal? To obtain this plot, the next lines should be run in R. Notice you can modify the threshold by modifying the parameter agreement_threshold. The colour map shows the mean temperature anomaly and the dots the model agreement. The plot will be saved with the name “SpatialSummerAgreement.png”.

- -
agreement_threshold <- 80
-colorbar_lim <- ceiling(max(abs(max(average)), abs(min(average))))
-brks <- seq(-colorbar_lim, colorbar_lim, length.out = 11)
-
-PlotEquiMap(drop(Mean1Dim(average, which(names(dim(average)) == "model"))), 
-            lat = lat, lon = lon, units = "K", brks = brks, 
-            toptitle = paste(var, "- climatology:", start_climatology, "to", 
-                             end_climatology, "and future simulation:", 
-                             start_projection, "to", end_projection), 
-            filled.continents = FALSE, title_scale = 0.6,
-            dots = drop(agreement) >= agreement_threshold, 
-            fileout = "SpatialSummerAgreement.png")
-
- -

Multimodel Agreement for temperature in summer

- -

5- Multi-model agreement temporal visualization

- -

To visualize the time evolution of multi-model agreement, the spatial average is performed by a grid pixel size using the WeightedMean function from the ClimProjDiags package. Also, a smooth filter is applied with the Smoothing() function from the s2dverifiction package. In this example, a 5-year moving window filter is applied by defining the parameter runmeanlen = 5.

- -
temporal <- drop(WeightedMean(anomaly, lon = lon, lat = lat, mask = NULL))
-temporal <- Smoothing(temporal, runmeanlen = 5, numdimt = 2)
-
- -

Before visualizing, a data frame with the proper format is created.

- -
data_frame <- as.data.frame.table(t(temporal))
-years <- rep(start_projection : end_projection, 4)
-data_frame$Year <- c(years)
-names(data_frame)[2] <- "Model"
-for (i in 1 : length(levels(data_frame$Model))) {
-  levels(data_frame$Model)[i] <- paste0("model", i)
-}
-
- -

A new png file will be saved in the working directory with the name “TemporalSummerAgreement.png”.

- -
g <- ggplot(data_frame, aes(x = Year, y = Freq)) + theme_bw() + 
-            ylab("tas") +  xlab("Year") + theme(text=element_text(size = 12),
-            legend.text=element_text(size = 12), 
-            axis.title=element_text(size = 12)) +
-            stat_summary(data =  data_frame, fun.y= "mean", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3]), 
-                                      color = data_frame$Model),
-                         geom = "line", size = 0.8) + 
-            stat_summary(data =  data_frame, geom = "ribbon", 
-                         fun.ymin = "min", fun.ymax = "max", 
-                         mapping = aes(x = data_frame$Year, y = data_frame$Freq, 
-                         group = interaction(data_frame[2,3])), 
-                         alpha = 0.3, color = "red", fill = "red") +
-           ggtitle("Temporal Summer Agreement")
-ggsave(filename = "TemporalSummerAgreement.png", g, device = NULL, width = 8, 
-       height = 5, units = 'in', dpi = 100) 
-
- -

Note: if a warning appears when plotting the temporal time series, it might be due to the NA's values introduced when smoothing the time series.

- -

Temporal evolution of the mean summer agreement of air temperature

- - - - diff --git a/inst/doc/diurnaltemp.html b/inst/doc/diurnaltemp.html deleted file mode 100644 index 8ae3e79..0000000 --- a/inst/doc/diurnaltemp.html +++ /dev/null @@ -1,417 +0,0 @@ - - - - - -Diurnal Temperature Variation (DTR) Indicator - - - - - - - - - - - - - - - - - - -

Diurnal Temperature Variation (DTR) Indicator

- -

The diurnal temperature variation indicator is a proxy for energy demand. The diurnal temperature indicator is the number of days in a season when the daily temperature variation (tasmax - tasmin) exceeds the vulnerability threshold. Here, the vulnerability threshold is based on the mean daily temperature variation for a reference period plus 5 degrees.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- -
    -
  • libssl-dev
  • -
  • libnecdf-dev
  • -
  • cdo
  • -
- -

The ClimProjDiags R package should be loaded by running the following lines in R, once it is integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(s2dverification)
-library(abind)
-library(parallel)
-
- -

2- Problem, parameters and data definition

- -

To ilustrate the problem, daily maximum or minimum air temperature at 2 m for the reference period (1971 - 2000) and the future scenario rcp2.6 (2006 - 2100) in the northern hemisphere (between -40 - 20 ºE and 25 - 60 ºN) will be created artificially.

- -

A grid of 5 degrees is defined by running the next lines in R:

- -
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

The synthetic sample of maximum and minimum temperature for the historical period can be obtained by running the following lines. The maximum temperature data is built by adding random perturbation to a sinusoidal function. The latitudinal behavior of the temperature is computed by randomly subtracting a value proportional to the latitude. The minimum temperature is built by subtracting a random value (with anual cycle included) to the synthetic maximum temperature. Furthermore, attributes of time and dimensions are added to both samples.

- -
tmax_historical <- NULL
-tmin_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 10958)) - abs(rnorm(10958))
-
-
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(10958, mean = 2, sd = 1) * 
-                                cos(2 * pi / 365 * (1 : 10958))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(10958, mean = j * 0.001,
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(10958, mean = j * 0.75, sd = 0.5))})
-  tmax_historical <- abind(tmax_historical, gridnew1, along = 3)
-  tmin_historical <- abind(tmin_historical, gridnew2, along = 3)
-}
-
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_historical <- InsertDim(InsertDim(tmin_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-attr(tmin_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is done to build the synthetic data for the future projections. However, a trend is added.

- -
tmax_projection <- NULL
-tmin_projection <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698 
-
-grid2 <- 289 - 8 * cos(2 * pi / 365 * (1 : 34698)) - abs(rnorm(34698)) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 40000
-gridlon1 <- NULL
-gridlon2 <- NULL
-for (i in 1 : 12) {
-  gridlon1 <- cbind(gridlon1, 
-                    grid1 + abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-  gridlon2 <- cbind(gridlon2, 
-                    grid2 - abs(rnorm(34698, mean = 2, sd = 1) * 
-                            cos(2 * pi / 365 * (1 : 34698))))
-}
-
-for (j in 1 : 8) {
-  gridnew1 <- apply(gridlon1, 2, function(x) {x - rnorm(34698, mean = j * 0.01, 
-                                                        sd = 0.1)})
-  gridnew2 <- apply(gridlon2, 2, function(x) {x - 
-                                 abs(rnorm(34698, mean = j * 0.75, sd = 0.5))})
-  tmax_projection <- abind(tmax_projection, gridnew1, along = 3)
-  tmin_projection <- abind(tmin_projection, gridnew2, along = 3)
-}
-
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-tmin_projection <- InsertDim(InsertDim(tmin_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-names(dim(tmin_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-attr(tmin_projection, 'Variables')$dat1$time <- time
-
- -

3- Reference diurnal temperature variation

- -

The function DTRRef, from the ClimProjDiags package, computes the mean between maximum and minimum temperature for each season during the selected reference period:

- -
dtr_reference <- DTRRef(tmax = tmax_historical, tmin = tmin_historical, by.seasons = TRUE, ncores = NULL)
-
- -

The label dtr_reference contains the output for each season and gridpoint:

- -
> str(dtr_reference)
-List of 2
- $ dtr.ref: num [1:4, 1, 1, 1:12, 1:8] 5.09 2.9 5.04 2.91 5.09 ...
- $ season : chr [1:4] "DJF" "MAM" "JJA" "SON"
-
- -

4- Computing the diurnal temperature variation indicator

- -

The diurnal temperature variation indicator is computed with the function DTRIndicator indicating the maximum and minimum temperature for the future projection and the reference temperature variation:

- -
dtr_indicator <- DTRIndicator(tmax = tmax_projection, tmin = tmin_projection, 
-                              ref = dtr_reference, 
-                              by.seasons = TRUE, ncores = NULL)
-
- -

The function returns a list of three elements, the label indicator being the desired output:

- -
> str(dtr_indicator)
-List of 3
- $ indicator: int [1:96, 1:4, 1, 1, 1:12, 1:8] 0 1 0 0 0 2 0 1 1 0 ...
- $ year     : chr [1:96] "2006" "2007" "2008" "2009" ...
- $ season   : chr [1:4] "DJF" "JJA" "MAM" "SON"
-
- -

Note: the total number of years in dtr_indicator$year (96) is greater than the period examined (in this case 95 years of future projection). This is because the last december of the time series belongs to the subsequent winter (in this example 2101 winter).

- -

5- Visualizing the diurnal temperature variation indicator

- -

A four panel plot can be generated to visualize the seasonal indicator by running the following lines:

- -
dtr_rcp <- array(dim = c(length(lon), length(lat), 4))             
-for (j in 1:4){
-              dtr_rcp[,,j] <- Mean1Dim(dtr_indicator$indicator[,j,,,,], 1)
-}            
-breaks <- 0 : (max(dtr_rcp) + 1)         
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_rcp, 
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "DTR", filled.continents = FALSE, 
-           units = "Days", title_scale = 0.5, axelab = FALSE, 
-           draw_separators = TRUE, subsampleg = 1, 
-           brks = breaks, color_fun = clim.palette("yellowred"),
-           bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialDTR.png")
-
- -

Diuranl Temperature Range Indicator

- -

Furthermore, the future diurnal temperature variation can be compared with the one observed during the reference period. So, the diurnal temperature variation indicator is computed for the reference period by running:

- -
dtr_indicator_reference <- DTRIndicator(tmax = tmax_historical, 
-                                        tmin = tmin_historical, 
-                                        ref = dtr_reference, by.seasons = TRUE, 
-                                        ncores = NULL)
-
- -

The comparison between the reference and the future projection diurnal temperature variation indicator can be computed With a simple subtraction. To visualize the result, PlotLayout function will be applied again. The resulting plot will be saved in the working directory.

- -
dtr_diff <- array(dim=c(length(lat), length(lon), 4))
-for (i in 1:4){
-  dtr_diff[,,i] <- Mean1Dim(dtr_indicator$indicator[,i,1,1,,], 1) - 
-                   Mean1Dim(dtr_indicator_reference$indicator[,i,1,1,,], 1)
-}
-
-PlotLayout(PlotEquiMap, c(1, 2), lon = lon, lat = lat, var = dtr_diff,
-           titles = c('DJF', 'MAM', 'JJA', 'SON'), 
-           toptitle = "Change in DTR indicator",
-           filled.continents = FALSE, units = "Days",
-           axelab = FALSE, draw_separators = TRUE, subsampleg = 1, 
-           brks = -2 : 2, bar_extra_labels = c(2, 0, 0, 0), 
-           fileout = "SpatialComparisonDTR.png")
-
- -

Spatial Comparison of DTR

- -

Note: the outputs of this example, including the figures, are artificial.

- - - - diff --git a/inst/doc/extreme_indices.html b/inst/doc/extreme_indices.html deleted file mode 100644 index 2d064b9..0000000 --- a/inst/doc/extreme_indices.html +++ /dev/null @@ -1,348 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Extreme Indices

-

The extreme indices are an ensemble of indices based on the Expert Team on Climate Change Detection Indices (ETCCDI). There are currently 5 available indices to be computed: extreme heat (tx90p), extreme cold (tn10p), extreme wind (wx), drought (ccd) and flooding (rx5day). The individual indices can be combined into a single index with or without weighting for each component.

-
-

1- Load dependencies

-

This example requires the following system libraries:

-
    -
  • libssl-dev
  • -
  • libnecdf-dev
  • -
  • cdo
  • -
-

The ClimProjDiags R package should be loaded by running the following lines in R once it’s integrated into CRAN mirror.

-
library(ClimProjDiags)
-

All the other R packages involved can be installed directly from CRAN and loaded as follows:

-
library(s2dverification)
-library(abind)
-library(multiApply)
-library(ggplot2)
-library(parallel)
-
-
-

2- Synthetic data

-

Daily maximum and minimum temperature, wind speed and precipitation are necessary to compute the different indices in both, the reference period (1971 - 2000) and the future projection (2006 - 2100). The defined region will be in the northern hemisphere between -40 - 20 ºE and 25 - 60 ºN.

-

Maximum temperature is generated considering the annual cycle:

-
lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, 
-                   grid1 + rnorm(10958, sd = 5) * cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-

A similar procedure is considered to build the synthetic data for the future projections. However, a little trend is added.

-
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + rnorm(34698) + 
-         (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - 
-                               rnorm(34698, mean = j * 0.5, sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-

To build synthetic precipitation data, a lognormal distribution is considered:

-
ppt_historical <- rlnorm(10958 * 12 * 8)
-dim(ppt_historical) <- c(model = 1, var = 1, time = 10958, lon = 12, lat = 8)
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_historical, 'Variables')$dat1$time <- time
-ppt_projection <- rlnorm(34698 * 12 * 8)
-dim(ppt_projection) <- c(model = 1, var = 1, time = 34698, lon = 12, lat = 8)
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(ppt_projection, 'Variables')$dat1$time <- time
-
-
-

3- Computing the Extreme Heat Index

-

The Extreme Heat Index (t90p) is defined as the percentage of days when the maximum temperature exceeds the 90th percentile.

-

In order to evaluate the future projections, it is necessary to compute the index during a reference historical period. The next steps should be followed:

-

To remove seasonality effects, the anomaly is computed for each day and gridpoint by applying the DailyAno function. The name of the first dimensions is defined as ‘time’ dimension.

-
anomaly_data <- apply(tmax_historical, c(1,2,4,5), DailyAno, dates = attributes(tmax_historical)$Variables$dat1$time)
-
-names(dim(anomaly_data))[1] <- "time"
-

This data can be detrended by applying the Trend function from s2dverification package. In order to remove the trend from the tmax_historical, the correction is calculated by subtracting the detrended_data to the anomaly_data.

-
detrended_data <- Trend(anomaly_data, 
-                        posTR = which(names(dim(anomaly_data)) == "time"))
-diff <- anomaly_data - detrended_data$detrended
-diff <- aperm(diff, c(2,3,1,4,5))
-detrended_data <- tmax_historical - diff
-

For each gridpoint and day of the year (from the 1st of January to the 31st of December), the maximum temperature on the position at the 90 percent of the series will be calculated as the threshold.

-
quantile <- 0.9
-thresholds <- Threshold(detrended_data, qtiles = quantile, 
-                        ncores = detectCores() -1)
-
> dim(thresholds)
-jdays model   var   lon   lat 
-  366     1     1    12     8 
-

By indicating the metric and introducing the threshold, Climdex() function will return the extreme heat index during the reference period.

-
base_index <- Climdex(data = detrended_data, metric = 't90p', 
-                      threshold = thresholds, ncores = detectCores() - 1)
-

The output of ´Climdex´ function will be a ´list()´ object. Index values are saved in the ´base_index$result´ label.

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 11.23 8.74 10.41 11.78 10.14 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-> dim(base_index$result)
- year model   var   lon   lat 
-   30     1     1    12     8 
-

Now, the standard deviation is computed in order to standardize the index. Notice that, by definition, the mean of the percentage of the number of days exceeding the 90th percentile is 10. Only standard deviation is computed.

-
base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The index can be computed by considering the threshold obtain for the reference period.

-
projection_index <- Climdex(data = tmax_projection, metric = 't90p', 
-                            threshold = thresholds, ncores = detectCores() - 1)
-

It is normalized with mean 10 and the standard deviation of the reference period.

-
base_mean <- 10
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-HeatExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

A spatial representation of the mean index values is obtained and saved in PNG format in the working directory with the name: “SpatialExtremeHeatIndex.png”. The matrix masc is built and shown as dots in the plot indicating wich pixels are considered land.

-
masc <- rep(0, 8 * 12)
-masc[c(5 : 12, 18 : 24, 31 : 34, 43, 44, 47, 56 : 60, 67 : 72, 79, 
-       82 : 84, 93 : 96)] <- 1
-dim(masc) <- c(12, 8)
-PlotEquiMap(Mean1Dim(HeatExtremeIndex, 
-                     which(names(dim(HeatExtremeIndex)) == "year")), 
-                     lon = lon, lat = lat, filled.continents = FALSE, 
-                     toptitle = "Extreme Heat Index", dots = masc,
-                     fileout = "SpatialExtremeHeatIndex.png")
-

Spatial distribution of Extreme Heat Index

-

The inland average of the Extreme Heat Index can be computed to plot its time evolution using WeigthedMean function. Smoothing() returns the smoothed time series for a 3 year moving window which can be modified using runmeanlen parameter.

-
temporal <- WeightedMean(HeatExtremeIndex, lon = lon, lat = lat, mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-

The next code should be run to plot and save the original average and the 3 year smoothed data.

-
png("Temporal_Inland_ExtremeHeatIndex.png", width = 8, height = 5, units = 'in', 
-    res = 100, type = "cairo")
-  plot(2006 : 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-       xlab = "Time (years)", ylab = "Extreme Heat Index", 
-       main = "Inland average Extreme Heat Index")
-  lines(2006 : 2100, temporal_3ysmooth, col = "darkgreen", lwd = 2)
-  legend('bottomright', c('Anual', '3 years smooth'), col = c(1, 'darkgreen'), 
-         lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal Inland Extreme Heat Index

-
-
-

4- Extreme Drought Index

-

The Extreme Drought Index (cdd), which measures the maximum length of a dry spell, is defined as the maximum number of consecutive days with the daily precipitation amount lower than 1 mm.

-

To compute the Extreme Drought Index during the reference period and its standard deviation and mean:

-

Note: Precipitation data is not detrended. Furthermore, this index doesn’t require to compute a threshold as Climdex function integrates the threshold of precipitation amount lower than 1 mm internally. However, this case requires the calculation of the mean.

-
base_index <- Climdex(data = ppt_historical, metric = 'cdd',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The object base_index contains the output of the Climdex function as two list with the next dimensions:

-
> str(base_index)
-List of 2
- $ result: num [1:30, 1, 1, 1:12, 1:8] 6 11 8 8 8 12 9 10 6 8 ...
- $ years : num [1:30] 1971 1972 1973 1974 1975 ...
-

The Extreme Drought Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'cdd', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-DroughtExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Drought Index:

-
PlotEquiMap(Mean1Dim(DroughtExtremeIndex, 
-                     which(names(dim(DroughtExtremeIndex)) == "year")), 
-            lon = lon, lat = lat, filled.continents = FALSE, 
-            toptitle = "Drought Index", brks = seq(-1, 1, 0.01), 
-            fileout = "SpatialDroughtIndex.png")
-

Spatial distribution of Extreme Drought Index

-

Evolution of inland average of the Extreme Drought Index:

-
temporal <- WeightedMean(DroughtExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_5ysmooth <- Smoothing(temporal, runmeanlen = 5, numdimt = 1)
-png("Temporal_Inland_ExtremeDroughtIndex.png", width = 8, height = 5, units= 'in', 
-    res = 100,  type = "cairo")
-plot(2006: 2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Drought Index", 
-     main = "Inland average Extreme Drought Index")
-lines(2006 : 2100, temporal_5ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'),
-        lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Drought Index

-
-
-

5- Extreme Flooding Index

-

The Extreme Flooding Index (rx5day) is defined as the maximum precipitation amount in 5 consecutive days.

-

The Extreme Flooding Index during the reference period and its standard deviation and mean can be calculated by executing:

-
base_index <- Climdex(data = ppt_historical, metric = 'rx5day',  
-                      ncores = detectCores() - 1)
-base_mean <-  Apply(list(base_index$result), target_dims = list(c(1)), 
-                    fun = "mean")$output1
-base_sd <- Apply(list(base_index$result), target_dims = list(c(1)), 
-                 fun = "sd")$output1
-

The Extreme Flooding Index is computed and standardized:

-
projection_index <- Climdex(data = ppt_projection, metric = 'rx5day', 
-                            ncores = detectCores() - 1)
-base_mean <- InsertDim(base_mean, 1, dim(projection_index$result)[1])
-base_sd <- InsertDim(base_sd, 1, dim(projection_index$result)[1])
-FloodingExtremeIndex <- (projection_index$result - base_mean) / base_sd
-

Spatial representation of the Extreme Flooding Index:

-
PlotEquiMap(Mean1Dim(FloodingExtremeIndex, 
-            which(names(dim(FloodingExtremeIndex)) == "year")), lon = lon, 
-            lat = lat, filled.continents = FALSE, 
-            toptitle = "Extreme Flooding Index", 
-            brks = seq(-1, 1, 0.1), fileout = "SpatialFloodingIndex.png")
-

Spatial distribution of Extreme Flooding Index

-
    -
  • Evolution of inland average of the Extreme Flooding Index:
  • -
-
temporal <- WeightedMean(FloodingExtremeIndex, lon = lon, lat = lat, 
-                         mask = drop(masc))
-temporal_3ysmooth <- Smoothing(temporal, runmeanlen = 3, numdimt = 1)
-png("Temporal_Inland_ExtremeFloodingIndex.png", width = 8, height = 5, 
-    units= 'in', res = 100, type = "cairo")
-plot(2006 :  2100, temporal, type = "l", lty = 5, lwd = 2, bty = 'n', 
-     xlab = "Time (years)", ylab = "Extreme Flooding Index", 
-     main = "Inland average Extreme Flooding Index")
-lines(2006 :  2100, temporal_3ysmooth, col = "darkgreen",lwd = 2)
-legend('bottomleft', c('Anual', '3 years smooth'), col= c(1, 'darkgreen'), 
-       lty = c(5, 1), lwd = 2, bty = 'n')
-dev.off()
-

Temporal evolution of Inland Extreme Flooding Index

-
-
-

6- Combining Indices

-

The individual indices can be combined into a single index with or without weighting for each component. This combined index is roughly analogous to the Actuaries Climate Risk Index. Extreme Indices should be saved in the same list object.

-
indices <- list()
-indices[[1]] <- HeatExtremeIndex
-indices[[2]] <- DroughtExtremeIndex
-indices[[3]] <- FloodingExtremeIndex
-

If the weights parameter is defined as NULL, all indices will be equally weighted if the operation parameter is set as mean (by default). To define other weights a vector of length equal to the number of considered indices (5 in this example) and with total sum equal to 1.

-
aci <- CombineIndices(indices = indices, weights = NULL)
-

A spatial visualization can be performed by executing:

-
PlotEquiMap(Mean1Dim(aci, which(names(dim(aci)) == "year")), lon = lon, 
-            lat = lat,  filled.continents = FALSE, toptitle = "Indices Combination",
-            fileout = "CombinedIndices.png")
-

Spatial distribution of Combined Indices

-

Note: This vignette shows the computation of three indices, however, five different indices can be computed with the Climdex function. To consider other combination settings run ?CombinedIndices.

-
-
- - - - - - - - diff --git a/inst/doc/heatcoldwaves.html b/inst/doc/heatcoldwaves.html deleted file mode 100644 index c272ce8..0000000 --- a/inst/doc/heatcoldwaves.html +++ /dev/null @@ -1,381 +0,0 @@ - - - - - -Heatwave and coldwave duration - - - - - - - - - - - - - - - - - - -

Heatwave and coldwave duration

- -

Heatwave and coldwave duration is considered to be the total duration of “extreme spells”, i.e. the total number of days in a season when the temperature exceeds a threshold for a given number of consecutive days. Other tools for computing such events are also available, but the novelty of this tool is that the users can select their own thresholds (based on quantiles) and the minimum duration of an event. The duration of heatwaves and coldwaves helps to understand potential changes in energy demand.

- -

1- Load dependencies

- -

This example requires the following system libraries:

- -
    -
  • libssl-dev
  • -
  • libnecdf-dev
  • -
  • cdo
  • -
- -

The ClimProjDiags R package should be loaded by running the following lines in R onces it's integrated into CRAN mirror.

- -
library(ClimProjDiags)
-
- -

All the other R packages involved can be installed directly from CRAN and loaded as follows:

- -
library(abind)
-library(s2dverification)
-library(parallel)
-
- -

The ilustrative problem is to analyze the daily maximum or minimum air temperature at 2m. The reference period used to compute a threshold is 1971 - 2000. The future scenario chosen is the rcp2.6 during the period 2006 - 2100 for which the heat/cold wave duration will be determined. Finally, the region selected in the northern hemisphere is between -40 - 20 ºE and 25 - 60 ºN.

- -

These parameters are defined by running:

- -
var <- 'tasmax'
-
-start_climatology <- '1971'
-end_climatology <- '2000'
-
-start_projection <- '2006'
-end_projection <- '2100'
-
-lat <- seq(25, 60, 5)
-lon <- seq(-35, 20 ,5)
-
- -

A synthetic sample of data for the reference period is built by adding random perturbations to a sinusoidal function. The latitudinal behavior of the temperature is considered by randomly subtracting a value proportional to the latitude. Furthermore, attributes of time and dimensions are added.

- -
tmax_historical <- NULL
-grid1 <- 293 - 10 * cos(2 * pi / 365 * (1 : 10958)) + rnorm(10958)
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(10958, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 10958)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(10958, mean = j * 0.5, sd = 3)})
-  tmax_historical <- abind(tmax_historical, gridnew, along = 3)
-}
-tmax_historical <- InsertDim(InsertDim(tmax_historical, posdim = 1, lendim = 1),
-                             posdim = 1, lendim = 1)
-names(dim(tmax_historical)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(1971, 1, 1), ISOdate(2000, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_historical, 'Variables')$dat1$time <- time
-
- -

A similar procedure is considered to build the synthetic data for the future projections. However, a small trend is added. -´

- -
tmax_projection <- NULL
-grid1 <- 298 - 10 * cos(2 * pi / 365 * (1 : 34698)) + 
-         rnorm(34698) + (1 : 34698) * rnorm(1, mean = 4) / 34698
-gridlon <- NULL
-for (i in 1 : 12) {
-  gridlon <- cbind(gridlon, grid1 + rnorm(34698, sd = 5) * 
-                                    cos(2 * pi / 365 * (1 : 34698)))
-}
-for (j in 1 : 8) {
-  gridnew <- apply(gridlon, 2, function(x) {x - rnorm(34698, mean = j * 0.5, 
-                                                      sd = 3)})
-  tmax_projection <- abind(tmax_projection, gridnew, along = 3)
-}
-tmax_projection <- InsertDim(InsertDim(tmax_projection, posdim = 1, lendim = 1), 
-                             posdim = 1, lendim = 1)
-names(dim(tmax_projection)) <- c("model", "var", "time", "lon", "lat")
-time <- seq(ISOdate(2006, 1, 1), ISOdate(2100, 12, 31), "day")
-metadata <- list(time = list(standard_name = 'time', long_name = 'time', 
-                             calendar = 'proleptic_gregorian',
-                             units = 'days since 1970-01-01 00:00:00', prec = 'double', 
-                             dim = list(list(name = 'time', unlim = FALSE))))
-attr(time, "variables") <- metadata
-attr(tmax_projection, 'Variables')$dat1$time <- time
-
- -

2- Heatwaves

- -

The heatwaves duration is the number of consecutive days for which the maximum temperature is exceeding a threshold for a minimum number of days during summer.

- -

2.1- Defining heatwaves threshold

- -

In this example, the threshold is defined as the 90th percentile of maximum temperature during the period 1971-2000.

- -

The summer data can be picked by the SeasonSelect function from the ClimProjDiags package.

- -
summer_tmax_historical <- SeasonSelect(tmax_historical, season = 'JJA')
-
- -

The output of SeasonSelect is a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * 30 years = 2760 dates. While the data dimensions also keeps the longitude and latitude dimensions.

- -
> str(summer_tmax_historical)
-List of 2
- $ data : num [1:2760, 1:12, 1:8] 302 285 301 302 312 ...
- $ dates: POSIXct[1:2760], format: "1971-06-01 12:00:00" "1971-06-04 12:00:00" ...
-
- -

To define the heatwave it is necessary to select the maximum temperature threshold that must be exceeded. In this example, the 90th percentile of the maximum temperature during the reference period is selected. This calculation is performed using Threshold function from ClimProjDiags package.

- -
quantile <- 0.9
-thresholds <- Threshold(data = summer_tmax_historical$data, 
-                        dates = summer_tmax_historical$dates, 
-                        calendar ="proleptic_gregorian", 
-                        qtiles = quantile, ncores = detectCores() -1)
-
- -

The output of Threshold is an array of the 92 days of summer in the first dimension and the spatial dimensions in the second and third position in this case:

- -
> str(thresholds)
- num [1:92, 1:12, 1:8] 310 308 312 309 310 ...
-
- -

2.2- Heatwaves projection

- -

The same selection should be done for the future projection data by applying SeasonSelect.

- -
summer_tmax_projection <- SeasonSelect(tmax_projection, season = 'JJA')
-
- -

The corresponding output is again a list of two elements: data and dates. The selected dates should be consistent: 92 summer days * (2100 - 2006 + 1) years = 8740 dates.

- -
> str(summer_tmax_projection)
-List of 2
- $ data : num [1:8740, 1:12, 1:8] 303 300 315 297 298 ...
- $ dates: POSIXct[1:8740], format: "2006-06-01 12:00:00" "2006-06-02 12:00:00"...
-
- -

2.3- Heatwaves duration

- -

The duration of the heatwaves is obtained by using the WaveDuration function from ClimProjDiags package. By setting spell.length = 5, the minimum length of a heatwave is defined as 5 days in this example. So, this function returns the number of days for each summer in which the maximum temperature is exceeding the 90th percentile of the reference period when they occur in a cluster of a minimum length of 5 consecutive days. This means that isolated events are not included.

- -
duration <- WaveDuration(data = summer_tmax_projection$data, 
-                         threshold = thresholds, op = ">", spell.length = 5, 
-                         dates = summer_tmax_projection$dates, 
-                         calendar = "proleptic_gregorian")
-
- -
> str(duration)
-List of 2
- $ result: num [1:95, 1:12, 1:8] 5 5 0 5 9 11 5 11 0 5 ...
- $ years : chr [1:95] "2006-JJA" "2007-JJA" "2008-JJA" "2009-JJA" ...
-
- -

The spatial representation of the maximum, mean and minimum duration of heatwaves can be plotted and saved in the working directory by running:

- -
breaks <- seq(0,92,4)
-
-PlotEquiMap(apply(duration$result, c(2, 3), max), lon = lon, lat = lat, 
-            brks = breaks, filled.continents = FALSE, title_scale = 0.8,
-            toptitle = "Heat wave duration", 
-            cols = heat.colors(length(breaks)-1)[(length(breaks)-1):1],
-            fileout = "SpatialHeatwave.png")
-
- -

Spatial distribution of Heatwave duration

- - - - -- GitLab From da86a9c742a6a7123261b6fd93c752f6dbf000ee Mon Sep 17 00:00:00 2001 From: aho Date: Tue, 6 Sep 2022 14:17:02 +0200 Subject: [PATCH 12/25] Revise example to avoid warning --- R/Extremes.R | 9 ++++----- man/Extremes.Rd | 9 ++++----- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/R/Extremes.R b/R/Extremes.R index f3e7b16..476b456 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -31,13 +31,12 @@ #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) -#'threshold = rep(40, 31) #'attr(time, "variables") <- metadata #'attr(data, 'Variables')$dat1$time <- time -#' -#'a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, -#' max.missing.days = 5, ncores = NULL) -#'str(a) +#'threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +#'res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, +#' max.missing.days = 5, ncores = NULL) +#'str(res) #'@export Extremes <- function(data, threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, timedim = NULL, calendar = NULL, ncores = NULL) { diff --git a/man/Extremes.Rd b/man/Extremes.Rd index ddc57e8..52998c7 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -59,11 +59,10 @@ time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CE metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) -threshold = rep(40, 31) attr(time, "variables") <- metadata attr(data, 'Variables')$dat1$time <- time - -a <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, - max.missing.days = 5, ncores = NULL) -str(a) +threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) +res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, + max.missing.days = 5, ncores = NULL) +str(res) } -- GitLab From b16aeb6959ebbac8b4a28c20dcffb6c043b58990 Mon Sep 17 00:00:00 2001 From: Eva Rifa Date: Thu, 8 Sep 2022 18:02:15 +0200 Subject: [PATCH 13/25] Create new function WeightCells --- NAMESPACE | 1 + R/WeightCells.R | 87 ++++++++++++++++++++++++++++ man/WeightCells.Rd | 36 ++++++++++++ tests/testthat/test-WeightCells.R | 94 +++++++++++++++++++++++++++++++ 4 files changed, 218 insertions(+) create mode 100644 R/WeightCells.R create mode 100644 man/WeightCells.Rd create mode 100644 tests/testthat/test-WeightCells.R diff --git a/NAMESPACE b/NAMESPACE index 40ba733..58def58 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -15,6 +15,7 @@ export(ShiftLon) export(Subset) export(Threshold) export(WaveDuration) +export(WeightCells) export(WeightedMean) import(PCICt) import(climdex.pcic) diff --git a/R/WeightCells.R b/R/WeightCells.R new file mode 100644 index 0000000..7bc3156 --- /dev/null +++ b/R/WeightCells.R @@ -0,0 +1,87 @@ +#'Compute the square-root of the cosine of the latitude weighting on the given +#'array. +#' +#'This function performs square-root of the cosine of the latitude weighting on +#'the given array. +#' +#'@param data A numeric array with named dimensions, representing the data to be +#' applied the weights. It should have at least the latitude dimension and it +#' can have more other dimensions. +#'@param lat A numeric array with one dimension containing the latitudes. +#'@param lat_dim A character string indicating the name of the latitudinal +#' dimension. The default value is 'lat'. +#'@param ncores An integer indicating the number of cores to use for parallel +#' computation. The default value is NULL. +#' +#'@return An array with same dimensions as parameter 'data'. +#' +#'@examples +#'exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) +#'lat <- rnorm(3)*10 +#'\dontrun{ +#'res <- WeightCells(data = exp, lat = lat) +#'} +#'@import multiApply +#'@export +WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { + + # Check inputs + ## data + if (is.null(data)) { + stop("Parameter 'data' cannot be NULL.") + } + if (!is.numeric(data)) { + stop("Parameter 'data' must be a numeric array.") + } + if (is.null(dim(data))) { + stop("Parameter 'data' must be at least latitude dimension.") + } + if(any(is.null(names(dim(data)))) | any(nchar(names(dim(data))) == 0)) { + stop("Parameter 'data' must have dimension names.") + } + + ## lat_dim + if (!is.character(lat_dim) | length(lat_dim) > 1) { + stop("Parameter 'lat_dim' must be a character string.") + } + if (!lat_dim %in% names(dim(data))) { + stop("Parameter 'lat_dim' is not found in 'data'.") + } + + ## lat + if (is.null(lat)) { + stop("Parameter 'lat' cannot be NULL.") + } + if (!is.numeric(lat)) { + stop("Parameter 'lat' must be a numeric array.") + } + if (dim(data)[lat_dim] != length(lat)) { + stop("Length of parameter 'lat' doesn't match the length of ", + "latitudinal dimension in parameter 'data'.") + } + ## ncores + if (!is.null(ncores)) { + if (!is.numeric(ncores) | ncores %% 1 != 0 | ncores <= 0 | + length(ncores) > 1) { + stop("Parameter 'ncores' must be either NULL or a positive integer.") + } + } + + namedims <- names(dim(data)) + + res <- Apply(data = data, + target_dims = c(lat_dim), + fun = .WeightCells, + lat = as.vector(lat), + ncores = ncores)$output1 + res <- Reorder(res, namedims) + + return(res) +} + +.WeightCells <- function(data, lat) { + dim_data <- dim(data) + wt <- sqrt(cos(lat * pi/180)) + data <- wt*data + return(data) +} diff --git a/man/WeightCells.Rd b/man/WeightCells.Rd new file mode 100644 index 0000000..dfee371 --- /dev/null +++ b/man/WeightCells.Rd @@ -0,0 +1,36 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/WeightCells.R +\name{WeightCells} +\alias{WeightCells} +\title{Compute the square-root of the cosine of the latitude weighting on the given +array.} +\usage{ +WeightCells(data, lat, lat_dim = "lat", ncores = NULL) +} +\arguments{ +\item{data}{A numeric array with named dimensions, representing the data to be +applied the weights. It should have at least the latitude dimension and it +can have more other dimensions.} + +\item{lat}{A numeric array with one dimension containing the latitudes.} + +\item{lat_dim}{A character string indicating the name of the latitudinal +dimension. The default value is 'lat'.} + +\item{ncores}{An integer indicating the number of cores to use for parallel +computation. The default value is NULL.} +} +\value{ +An array with same dimensions as parameter 'data'. +} +\description{ +This function performs square-root of the cosine of the latitude weighting on +the given array. +} +\examples{ +exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) +lat <- rnorm(3)*10 +\dontrun{ +res <- WeightCells(data = exp, lat = lat) +} +} diff --git a/tests/testthat/test-WeightCells.R b/tests/testthat/test-WeightCells.R new file mode 100644 index 0000000..fe3def1 --- /dev/null +++ b/tests/testthat/test-WeightCells.R @@ -0,0 +1,94 @@ +context("s2dv::WeightCells tests") + +############################################## + +# dat1 +set.seed(1) +data1 <- array(rnorm(5), dim = c(lat = 5)) +set.seed(2) +lat1 <- array(rnorm(5)*5) + +# dat2 +set.seed(1) +data2 <- array(rnorm(60), dim = c(sdate = 10, lat = 2, lon = 3)) +set.seed(2) +lat2 <- array(rnorm(2)*10) + + +############################################## +test_that("1. Input checks", { + # data + expect_error( + WeightCells(c()), + "Parameter 'data' cannot be NULL." + ) + expect_error( + WeightCells('lat'), + "Parameter 'data' must be a numeric array." + ) + expect_error( + WeightCells(rnorm(5)), + "Parameter 'data' must be at least latitude dimension." + ) + expect_error( + WeightCells(array(rnorm(5), dim = c(5))), + "Parameter 'data' must have dimension names." + ) + #lat_dim + expect_error( + WeightCells(data1, lat1, 3), + "Parameter 'lat_dim' must be a character string." + ) + expect_error( + WeightCells(data1, lat1, 'latitude'), + "Parameter 'lat_dim' is not found in 'data'." + ) + #lat + expect_error( + WeightCells(data1, NULL), + "Parameter 'lat' cannot be NULL." + ) + expect_error( + WeightCells(data1, rnorm(10)), + "Length of parameter 'lat' doesn't match the length of latitudinal dimension in parameter 'data'." + ) + # ncores + expect_error( + WeightCells(data1, lat1, ncores = 1.5), + "Parameter 'ncores' must be either NULL or a positive integer." + ) + +}) + +############################################## +test_that("2. Output checks: dat1", { + + expect_equal( + dim(WeightCells(data1, lat1)), + c(lat = 5) + ) + expect_equal( + as.vector(WeightCells(data1, lat1)), + c(-0.6254941, 0.1836314, -0.8316143, 1.5913985, 0.3295037), + tolerance = 0.0001 + ) + +}) + +############################################## +test_that("3. Output checks: dat2", { + + expect_equal( + dim(WeightCells(data2, lat2)), + c(sdate = 10, lat = 2, lon = 3) + ) + expect_equal( + as.vector(WeightCells(data2, lat2))[1:5], + c( -0.6226120, 0.1825171, -0.8305040, 1.5854976, 0.3274870), + tolerance = 0.0001 + ) + expect_equal( + dim(WeightCells(data2, lat2)), + dim(data2) + ) +}) -- GitLab From b05a5653fbc55446301905de3561140476e898b9 Mon Sep 17 00:00:00 2001 From: Eva Rifa Date: Thu, 8 Sep 2022 18:05:09 +0200 Subject: [PATCH 14/25] Add import s2dv --- R/WeightCells.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/R/WeightCells.R b/R/WeightCells.R index 7bc3156..2c59cdb 100644 --- a/R/WeightCells.R +++ b/R/WeightCells.R @@ -21,7 +21,7 @@ #'\dontrun{ #'res <- WeightCells(data = exp, lat = lat) #'} -#'@import multiApply +#'@import multiApply, s2dv #'@export WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { -- GitLab From 193ddd389946bfd46be5a24cb5de8e3d4d800e1f Mon Sep 17 00:00:00 2001 From: Eva Rifa Date: Wed, 21 Sep 2022 16:38:10 +0200 Subject: [PATCH 15/25] Correct documentation and improved code of WeightCells --- R/WeightCells.R | 25 ++++++++++++++----------- man/WeightCells.Rd | 8 ++++---- tests/testthat/test-WeightCells.R | 6 +++++- 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/R/WeightCells.R b/R/WeightCells.R index 2c59cdb..8461abf 100644 --- a/R/WeightCells.R +++ b/R/WeightCells.R @@ -7,21 +7,21 @@ #'@param data A numeric array with named dimensions, representing the data to be #' applied the weights. It should have at least the latitude dimension and it #' can have more other dimensions. -#'@param lat A numeric array with one dimension containing the latitudes. +#'@param lat A numeric vector or array with one dimension containing the +#' latitudes (in degrees). #'@param lat_dim A character string indicating the name of the latitudinal #' dimension. The default value is 'lat'. #'@param ncores An integer indicating the number of cores to use for parallel #' computation. The default value is NULL. #' -#'@return An array with same dimensions as parameter 'data'. +#'@return An array containing the latitude weighted data with same dimensions as +#'parameter 'data'. #' #'@examples #'exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) #'lat <- rnorm(3)*10 -#'\dontrun{ #'res <- WeightCells(data = exp, lat = lat) -#'} -#'@import multiApply, s2dv +#'@import multiApply #'@export WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { @@ -53,12 +53,13 @@ WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { stop("Parameter 'lat' cannot be NULL.") } if (!is.numeric(lat)) { - stop("Parameter 'lat' must be a numeric array.") + stop("Parameter 'lat' must be a numeric vector or array.") } if (dim(data)[lat_dim] != length(lat)) { stop("Length of parameter 'lat' doesn't match the length of ", "latitudinal dimension in parameter 'data'.") } + ## ncores if (!is.null(ncores)) { if (!is.numeric(ncores) | ncores %% 1 != 0 | ncores <= 0 | @@ -68,20 +69,22 @@ WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { } namedims <- names(dim(data)) + lat <- as.vector(lat) + wt <- sqrt(cos(lat * pi/180)) res <- Apply(data = data, target_dims = c(lat_dim), fun = .WeightCells, - lat = as.vector(lat), + wt = wt, ncores = ncores)$output1 - res <- Reorder(res, namedims) + + order <- match(namedims, names(dim(res))) + res <- aperm(res, order) return(res) } -.WeightCells <- function(data, lat) { - dim_data <- dim(data) - wt <- sqrt(cos(lat * pi/180)) +.WeightCells <- function(data, wt) { data <- wt*data return(data) } diff --git a/man/WeightCells.Rd b/man/WeightCells.Rd index dfee371..dfd0570 100644 --- a/man/WeightCells.Rd +++ b/man/WeightCells.Rd @@ -12,7 +12,8 @@ WeightCells(data, lat, lat_dim = "lat", ncores = NULL) applied the weights. It should have at least the latitude dimension and it can have more other dimensions.} -\item{lat}{A numeric array with one dimension containing the latitudes.} +\item{lat}{A numeric vector or array with one dimension containing the +latitudes (in degrees).} \item{lat_dim}{A character string indicating the name of the latitudinal dimension. The default value is 'lat'.} @@ -21,7 +22,8 @@ dimension. The default value is 'lat'.} computation. The default value is NULL.} } \value{ -An array with same dimensions as parameter 'data'. +An array containing the latitude weighted data with same dimensions as +parameter 'data'. } \description{ This function performs square-root of the cosine of the latitude weighting on @@ -30,7 +32,5 @@ the given array. \examples{ exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) lat <- rnorm(3)*10 -\dontrun{ res <- WeightCells(data = exp, lat = lat) } -} diff --git a/tests/testthat/test-WeightCells.R b/tests/testthat/test-WeightCells.R index fe3def1..2229456 100644 --- a/tests/testthat/test-WeightCells.R +++ b/tests/testthat/test-WeightCells.R @@ -48,6 +48,10 @@ test_that("1. Input checks", { WeightCells(data1, NULL), "Parameter 'lat' cannot be NULL." ) + expect_error( + WeightCells(data1, list(lat)), + "Parameter 'lat' must be a numeric vector or array." + ) expect_error( WeightCells(data1, rnorm(10)), "Length of parameter 'lat' doesn't match the length of latitudinal dimension in parameter 'data'." @@ -84,7 +88,7 @@ test_that("3. Output checks: dat2", { ) expect_equal( as.vector(WeightCells(data2, lat2))[1:5], - c( -0.6226120, 0.1825171, -0.8305040, 1.5854976, 0.3274870), + c(-0.6226120, 0.1825171, -0.8305040, 1.5854976, 0.3274870), tolerance = 0.0001 ) expect_equal( -- GitLab From c54c99b1fb00edfd0884d28d1d46074edcde94fd Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 22 Sep 2022 14:40:56 +0200 Subject: [PATCH 16/25] dontrun examples to see if CICD can pass --- R/Extremes.R | 2 ++ R/Threshold.R | 2 ++ man/Extremes.Rd | 2 ++ man/Threshold.Rd | 2 ++ 4 files changed, 8 insertions(+) diff --git a/R/Extremes.R b/R/Extremes.R index 476b456..e7908c3 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -25,6 +25,7 @@ #'@import climdex.pcic #'@examples #'##Example synthetic data: +#'dontrun{ #'data <- 1:(2 * 3 * 372 * 1) #'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) #'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") @@ -37,6 +38,7 @@ #'res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, #' max.missing.days = 5, ncores = NULL) #'str(res) +#'} #'@export Extremes <- function(data, threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, timedim = NULL, calendar = NULL, ncores = NULL) { diff --git a/R/Threshold.R b/R/Threshold.R index 181a5f0..9ded186 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -26,8 +26,10 @@ #'attr(time, "variables") <- metadata #'attr(data, 'Variables')$dat1$time <- time #' +#'\dontrun{ #'a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) #'str(a) +#'} #'@export Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL, na.rm = FALSE) { if (is.null(data)) { diff --git a/man/Extremes.Rd b/man/Extremes.Rd index 52998c7..8bc22f9 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -53,6 +53,7 @@ This routine compares data to the thresholds using the given operator, generatin } \examples{ ##Example synthetic data: +dontrun{ data <- 1:(2 * 3 * 372 * 1) dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") @@ -66,3 +67,4 @@ res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.ca max.missing.days = 5, ncores = NULL) str(res) } +} diff --git a/man/Threshold.Rd b/man/Threshold.Rd index a0fa10a..e4b0fb1 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -46,6 +46,8 @@ metadata <- list(time = list(standard_name = 'time', long_name = 'time', calend attr(time, "variables") <- metadata attr(data, 'Variables')$dat1$time <- time +\dontrun{ a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) } +} -- GitLab From 92d01409d42376b371ed2cc0d18c11179fdcbd4a Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 22 Sep 2022 14:47:42 +0200 Subject: [PATCH 17/25] Fix typo --- R/Extremes.R | 2 +- man/Extremes.Rd | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/R/Extremes.R b/R/Extremes.R index e7908c3..0a2950f 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -25,7 +25,7 @@ #'@import climdex.pcic #'@examples #'##Example synthetic data: -#'dontrun{ +#'\dontrun{ #'data <- 1:(2 * 3 * 372 * 1) #'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) #'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") diff --git a/man/Extremes.Rd b/man/Extremes.Rd index 8bc22f9..e7abbe5 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -53,7 +53,7 @@ This routine compares data to the thresholds using the given operator, generatin } \examples{ ##Example synthetic data: -dontrun{ +\dontrun{ data <- 1:(2 * 3 * 372 * 1) dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") -- GitLab From 653e4fef25eced8c47a38d384bab4cbdffa211f9 Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 22 Sep 2022 15:01:01 +0200 Subject: [PATCH 18/25] Try again for strftime() error --- R/Threshold.R | 5 ++--- man/Threshold.Rd | 3 +-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/R/Threshold.R b/R/Threshold.R index 9ded186..b477af7 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -26,10 +26,9 @@ #'attr(time, "variables") <- metadata #'attr(data, 'Variables')$dat1$time <- time #' -#'\dontrun{ #'a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) #'str(a) -#'} +#' #'@export Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL, na.rm = FALSE) { if (is.null(data)) { @@ -104,7 +103,7 @@ Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qt "element will be used.") } dates <- as.PCICt(dates, cal = calendar) - dates = as.character(dates) + dates <- as.character(dates) jdays <- as.numeric(strftime(dates, format = "%j")) if (calendar == "gregorian" | calendar == "standard" | calendar == "proleptic_gregorian") { year <- as.numeric(strftime(dates, format = "%Y")) diff --git a/man/Threshold.Rd b/man/Threshold.Rd index e4b0fb1..e6ae99f 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -46,8 +46,7 @@ metadata <- list(time = list(standard_name = 'time', long_name = 'time', calend attr(time, "variables") <- metadata attr(data, 'Variables')$dat1$time <- time -\dontrun{ a <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncores = NULL) str(a) -} + } -- GitLab From 0a996f462ae4fe934d85686d6d4c5369735c71e5 Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 22 Sep 2022 19:56:45 +0200 Subject: [PATCH 19/25] Change example to avoid problem mentioned in https://earth.bsc.es/gitlab/es/requests/-/issues/1927 --- R/Extremes.R | 9 ++++----- R/Threshold.R | 6 +++--- man/Extremes.Rd | 9 ++++----- man/Threshold.Rd | 6 +++--- 4 files changed, 14 insertions(+), 16 deletions(-) diff --git a/R/Extremes.R b/R/Extremes.R index 0a2950f..facb6cf 100644 --- a/R/Extremes.R +++ b/R/Extremes.R @@ -25,10 +25,9 @@ #'@import climdex.pcic #'@examples #'##Example synthetic data: -#'\dontrun{ -#'data <- 1:(2 * 3 * 372 * 1) -#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 310 * 1) +#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) @@ -38,7 +37,7 @@ #'res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, #' max.missing.days = 5, ncores = NULL) #'str(res) -#'} +#' #'@export Extremes <- function(data, threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, dates = NULL, timedim = NULL, calendar = NULL, ncores = NULL) { diff --git a/R/Threshold.R b/R/Threshold.R index b477af7..f6df348 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -17,9 +17,9 @@ #'@importFrom stats quantile #'@examples #'##Example synthetic data: -#'data <- 1:(2 * 3 * 372 * 1) -#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 310 * 1) +#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) diff --git a/man/Extremes.Rd b/man/Extremes.Rd index e7abbe5..8042619 100644 --- a/man/Extremes.Rd +++ b/man/Extremes.Rd @@ -53,10 +53,9 @@ This routine compares data to the thresholds using the given operator, generatin } \examples{ ##Example synthetic data: -\dontrun{ -data <- 1:(2 * 3 * 372 * 1) -dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 310 * 1) +dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) @@ -66,5 +65,5 @@ threshold <- Threshold(data, dates = NULL, base.range = NULL, qtiles = 0.9, ncor res <- Extremes(data, threshold = threshold, op = ">", min.length = 6, spells.can.span.years = TRUE, max.missing.days = 5, ncores = NULL) str(res) -} + } diff --git a/man/Threshold.Rd b/man/Threshold.Rd index e6ae99f..184cb91 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -37,9 +37,9 @@ This function computes the threshold based on a quantile value for each day of t } \examples{ ##Example synthetic data: -data <- 1:(2 * 3 * 372 * 1) -dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 310 * 1) +dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) -- GitLab From 9bd62a7181e9c4e3162f4c06c3be28d0f268d990 Mon Sep 17 00:00:00 2001 From: aho Date: Fri, 23 Sep 2022 15:37:56 +0200 Subject: [PATCH 20/25] Use strftime more explicitly --- R/Threshold.R | 8 ++++---- man/Threshold.Rd | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/R/Threshold.R b/R/Threshold.R index f6df348..c6f4298 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -17,9 +17,9 @@ #'@importFrom stats quantile #'@examples #'##Example synthetic data: -#'data <- 1:(2 * 3 * 310 * 1) -#'dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) -#'time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +#'data <- 1:(2 * 3 * 372 * 1) +#'dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) +#'time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") #'metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', #' units = 'days since 1970-01-01 00:00:00', prec = 'double', #' dim = list(list(name = 'time', unlim = FALSE)))) @@ -103,7 +103,7 @@ Threshold <- function(data, dates = NULL, calendar = NULL, base.range = NULL, qt "element will be used.") } dates <- as.PCICt(dates, cal = calendar) - dates <- as.character(dates) + dates <- as.POSIXlt(as.character(dates), format = "%Y-%m-%d") jdays <- as.numeric(strftime(dates, format = "%j")) if (calendar == "gregorian" | calendar == "standard" | calendar == "proleptic_gregorian") { year <- as.numeric(strftime(dates, format = "%Y")) diff --git a/man/Threshold.Rd b/man/Threshold.Rd index 184cb91..e6ae99f 100644 --- a/man/Threshold.Rd +++ b/man/Threshold.Rd @@ -37,9 +37,9 @@ This function computes the threshold based on a quantile value for each day of t } \examples{ ##Example synthetic data: -data <- 1:(2 * 3 * 310 * 1) -dim(data) <- c(time = 310, lon = 2, lat = 3, model = 1) -time <- as.POSIXct(paste(sort(rep(1902:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") +data <- 1:(2 * 3 * 372 * 1) +dim(data) <- c(time = 372, lon = 2, lat = 3, model = 1) +time <- as.POSIXct(paste(sort(rep(1900:1911, 31)), 1, 1:31, sep = "-"), tz = "CET") metadata <- list(time = list(standard_name = 'time', long_name = 'time', calendar = 'noleap', units = 'days since 1970-01-01 00:00:00', prec = 'double', dim = list(list(name = 'time', unlim = FALSE)))) -- GitLab From e548c13f414aa2cc7618e88a50b974f8bd4911ce Mon Sep 17 00:00:00 2001 From: Eva Rifa Date: Fri, 7 Oct 2022 16:52:27 +0200 Subject: [PATCH 21/25] Added method parameter and correct test --- NAMESPACE | 2 +- R/{WeightCells.R => WeightedCells.R} | 23 +++++++-- man/{WeightCells.Rd => WeightedCells.Rd} | 14 ++++-- ...est-WeightCells.R => test-WeightedCells.R} | 49 ++++++++++++------- 4 files changed, 59 insertions(+), 29 deletions(-) rename R/{WeightCells.R => WeightedCells.R} (80%) rename man/{WeightCells.Rd => WeightedCells.Rd} (72%) rename tests/testthat/{test-WeightCells.R => test-WeightedCells.R} (59%) diff --git a/NAMESPACE b/NAMESPACE index 58def58..e3226a5 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -15,7 +15,7 @@ export(ShiftLon) export(Subset) export(Threshold) export(WaveDuration) -export(WeightCells) +export(WeightedCells) export(WeightedMean) import(PCICt) import(climdex.pcic) diff --git a/R/WeightCells.R b/R/WeightedCells.R similarity index 80% rename from R/WeightCells.R rename to R/WeightedCells.R index 8461abf..1ce1dc4 100644 --- a/R/WeightCells.R +++ b/R/WeightedCells.R @@ -11,6 +11,9 @@ #' latitudes (in degrees). #'@param lat_dim A character string indicating the name of the latitudinal #' dimension. The default value is 'lat'. +#'@param method A character string indicating the type of weighting applied: +#' 'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the +#' cosine of the latitude). #'@param ncores An integer indicating the number of cores to use for parallel #' computation. The default value is NULL. #' @@ -20,10 +23,10 @@ #'@examples #'exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) #'lat <- rnorm(3)*10 -#'res <- WeightCells(data = exp, lat = lat) +#'res <- WeightedCells(data = exp, lat = lat) #'@import multiApply #'@export -WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { +WeightedCells <- function(data, lat, lat_dim = 'lat', method = 'cos', ncores = NULL) { # Check inputs ## data @@ -60,6 +63,11 @@ WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { "latitudinal dimension in parameter 'data'.") } + ## method + if (!method %in% c('cos','sqrtcos')) { + stop("Parameter 'method' must be one of 'cos' or 'sqrtcos'.") + } + ## ncores if (!is.null(ncores)) { if (!is.numeric(ncores) | ncores %% 1 != 0 | ncores <= 0 | @@ -70,11 +78,16 @@ WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { namedims <- names(dim(data)) lat <- as.vector(lat) - wt <- sqrt(cos(lat * pi/180)) + if (method == 'cos') { + wt <- cos(lat * pi/180) + } else { + wt <- sqrt(cos(lat * pi/180)) + } + res <- Apply(data = data, target_dims = c(lat_dim), - fun = .WeightCells, + fun = .WeightedCells, wt = wt, ncores = ncores)$output1 @@ -84,7 +97,7 @@ WeightCells <- function(data, lat, lat_dim = 'lat', ncores = NULL) { return(res) } -.WeightCells <- function(data, wt) { +.WeightedCells <- function(data, wt) { data <- wt*data return(data) } diff --git a/man/WeightCells.Rd b/man/WeightedCells.Rd similarity index 72% rename from man/WeightCells.Rd rename to man/WeightedCells.Rd index dfd0570..0a8fe98 100644 --- a/man/WeightCells.Rd +++ b/man/WeightedCells.Rd @@ -1,11 +1,11 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/WeightCells.R -\name{WeightCells} -\alias{WeightCells} +% Please edit documentation in R/WeightedCells.R +\name{WeightedCells} +\alias{WeightedCells} \title{Compute the square-root of the cosine of the latitude weighting on the given array.} \usage{ -WeightCells(data, lat, lat_dim = "lat", ncores = NULL) +WeightedCells(data, lat, lat_dim = "lat", method = "cos", ncores = NULL) } \arguments{ \item{data}{A numeric array with named dimensions, representing the data to be @@ -18,6 +18,10 @@ latitudes (in degrees).} \item{lat_dim}{A character string indicating the name of the latitudinal dimension. The default value is 'lat'.} +\item{method}{A character string indicating the type of weighting applied: +'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the +cosine of the latitude).} + \item{ncores}{An integer indicating the number of cores to use for parallel computation. The default value is NULL.} } @@ -32,5 +36,5 @@ the given array. \examples{ exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) lat <- rnorm(3)*10 -res <- WeightCells(data = exp, lat = lat) +res <- WeightedCells(data = exp, lat = lat) } diff --git a/tests/testthat/test-WeightCells.R b/tests/testthat/test-WeightedCells.R similarity index 59% rename from tests/testthat/test-WeightCells.R rename to tests/testthat/test-WeightedCells.R index 2229456..29e5e9b 100644 --- a/tests/testthat/test-WeightCells.R +++ b/tests/testthat/test-WeightedCells.R @@ -1,4 +1,4 @@ -context("s2dv::WeightCells tests") +context("s2dv::WeightedCells tests") ############################################## @@ -19,46 +19,51 @@ lat2 <- array(rnorm(2)*10) test_that("1. Input checks", { # data expect_error( - WeightCells(c()), + WeightedCells(c()), "Parameter 'data' cannot be NULL." ) expect_error( - WeightCells('lat'), + WeightedCells('lat'), "Parameter 'data' must be a numeric array." ) expect_error( - WeightCells(rnorm(5)), + WeightedCells(rnorm(5)), "Parameter 'data' must be at least latitude dimension." ) expect_error( - WeightCells(array(rnorm(5), dim = c(5))), + WeightedCells(array(rnorm(5), dim = c(5))), "Parameter 'data' must have dimension names." ) #lat_dim expect_error( - WeightCells(data1, lat1, 3), + WeightedCells(data1, lat1, 3), "Parameter 'lat_dim' must be a character string." ) expect_error( - WeightCells(data1, lat1, 'latitude'), + WeightedCells(data1, lat1, 'latitude'), "Parameter 'lat_dim' is not found in 'data'." ) #lat expect_error( - WeightCells(data1, NULL), + WeightedCells(data1, NULL), "Parameter 'lat' cannot be NULL." ) expect_error( - WeightCells(data1, list(lat)), + WeightedCells(data1, list(lat1)), "Parameter 'lat' must be a numeric vector or array." ) expect_error( - WeightCells(data1, rnorm(10)), + WeightedCells(data1, rnorm(10)), "Length of parameter 'lat' doesn't match the length of latitudinal dimension in parameter 'data'." ) + # method + expect_error( + WeightedCells(data1, lat1, method = 1.5), + "Parameter 'method' must be one of 'cos' or 'sqrtcos'." + ) # ncores expect_error( - WeightCells(data1, lat1, ncores = 1.5), + WeightedCells(data1, lat1, ncores = 1.5), "Parameter 'ncores' must be either NULL or a positive integer." ) @@ -68,12 +73,12 @@ test_that("1. Input checks", { test_that("2. Output checks: dat1", { expect_equal( - dim(WeightCells(data1, lat1)), + dim(WeightedCells(data1, lat1)), c(lat = 5) ) expect_equal( - as.vector(WeightCells(data1, lat1)), - c(-0.6254941, 0.1836314, -0.8316143, 1.5913985, 0.3295037), + as.vector(WeightedCells(data1, lat1)), + c(-0.6245359, 0.1836194, -0.8276192, 1.5875256, 0.3294997), tolerance = 0.0001 ) @@ -83,16 +88,24 @@ test_that("2. Output checks: dat1", { test_that("3. Output checks: dat2", { expect_equal( - dim(WeightCells(data2, lat2)), + dim(WeightedCells(data2, lat2)), c(sdate = 10, lat = 2, lon = 3) ) expect_equal( - as.vector(WeightCells(data2, lat2))[1:5], - c(-0.6226120, 0.1825171, -0.8305040, 1.5854976, 0.3274870), + as.vector(WeightedCells(data2, lat2))[1:5], + c(-0.6187938, 0.1813978, -0.8254109, 1.5757744, 0.3254787), tolerance = 0.0001 ) expect_equal( - dim(WeightCells(data2, lat2)), + dim(WeightedCells(data2, lat2)), dim(data2) ) + expect_equal( + WeightedCells(data2, lat2), + WeightedCells(data2, lat2, method = 'sqrtcos')*as.vector(sqrt(cos(lat2*pi/180))), + tolerance = 0.01 + ) + }) + +############################################## -- GitLab From 089d3991e32c307aa56317952ea867c297da774f Mon Sep 17 00:00:00 2001 From: aho Date: Wed, 2 Nov 2022 16:28:18 +0100 Subject: [PATCH 22/25] Refine doc and format --- R/WeightedCells.R | 14 +++++++------- man/WeightedCells.Rd | 4 ++-- tests/testthat/test-WeightedCells.R | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/R/WeightedCells.R b/R/WeightedCells.R index 1ce1dc4..884870f 100644 --- a/R/WeightedCells.R +++ b/R/WeightedCells.R @@ -13,7 +13,7 @@ #' dimension. The default value is 'lat'. #'@param method A character string indicating the type of weighting applied: #' 'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the -#' cosine of the latitude). +#' cosine of the latitude). The default value is 'cos'. #'@param ncores An integer indicating the number of cores to use for parallel #' computation. The default value is NULL. #' @@ -22,7 +22,7 @@ #' #'@examples #'exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) -#'lat <- rnorm(3)*10 +#'lat <- c(10, 15, 20) #'res <- WeightedCells(data = exp, lat = lat) #'@import multiApply #'@export @@ -64,8 +64,8 @@ WeightedCells <- function(data, lat, lat_dim = 'lat', method = 'cos', ncores = N } ## method - if (!method %in% c('cos','sqrtcos')) { - stop("Parameter 'method' must be one of 'cos' or 'sqrtcos'.") + if (!method %in% c('cos', 'sqrtcos')) { + stop("Parameter 'method' must be 'cos' or 'sqrtcos'.") } ## ncores @@ -80,9 +80,9 @@ WeightedCells <- function(data, lat, lat_dim = 'lat', method = 'cos', ncores = N lat <- as.vector(lat) if (method == 'cos') { - wt <- cos(lat * pi/180) + wt <- cos(lat * pi / 180) } else { - wt <- sqrt(cos(lat * pi/180)) + wt <- sqrt(cos(lat * pi / 180)) } res <- Apply(data = data, @@ -98,6 +98,6 @@ WeightedCells <- function(data, lat, lat_dim = 'lat', method = 'cos', ncores = N } .WeightedCells <- function(data, wt) { - data <- wt*data + data <- wt * data return(data) } diff --git a/man/WeightedCells.Rd b/man/WeightedCells.Rd index 0a8fe98..9b5c880 100644 --- a/man/WeightedCells.Rd +++ b/man/WeightedCells.Rd @@ -20,7 +20,7 @@ dimension. The default value is 'lat'.} \item{method}{A character string indicating the type of weighting applied: 'cos' (cosine of the latitude) or 'sqrtcos' (square-root of the -cosine of the latitude).} +cosine of the latitude). The default value is 'cos'.} \item{ncores}{An integer indicating the number of cores to use for parallel computation. The default value is NULL.} @@ -35,6 +35,6 @@ the given array. } \examples{ exp <- array(rnorm(1:30), dim = c(lat = 3, lon = 5, sdate = 2)) -lat <- rnorm(3)*10 +lat <- c(10, 15, 20) res <- WeightedCells(data = exp, lat = lat) } diff --git a/tests/testthat/test-WeightedCells.R b/tests/testthat/test-WeightedCells.R index 29e5e9b..dcf171d 100644 --- a/tests/testthat/test-WeightedCells.R +++ b/tests/testthat/test-WeightedCells.R @@ -59,7 +59,7 @@ test_that("1. Input checks", { # method expect_error( WeightedCells(data1, lat1, method = 1.5), - "Parameter 'method' must be one of 'cos' or 'sqrtcos'." + "Parameter 'method' must be 'cos' or 'sqrtcos'." ) # ncores expect_error( @@ -102,7 +102,7 @@ test_that("3. Output checks: dat2", { ) expect_equal( WeightedCells(data2, lat2), - WeightedCells(data2, lat2, method = 'sqrtcos')*as.vector(sqrt(cos(lat2*pi/180))), + WeightedCells(data2, lat2, method = 'sqrtcos') * as.vector(sqrt(cos(lat2 * pi/180))), tolerance = 0.01 ) -- GitLab From f57fd02b68e50225dfd7824971eebf2fd6eda3d6 Mon Sep 17 00:00:00 2001 From: aho Date: Wed, 2 Nov 2022 17:25:13 +0100 Subject: [PATCH 23/25] Version bump to 0.2.0 --- .Rbuildignore | 2 +- DESCRIPTION | 9 +++++---- NEWS.md | 8 ++++++++ 3 files changed, 14 insertions(+), 5 deletions(-) create mode 100644 NEWS.md diff --git a/.Rbuildignore b/.Rbuildignore index 0c51051..dde50c4 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -5,5 +5,5 @@ ./.nc$ .*\.gitlab-ci.yml$ # Ignore tests when submitting to CRAN -#^tests$ +^tests$ diff --git a/DESCRIPTION b/DESCRIPTION index bd980e3..a75e2d8 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,13 +1,14 @@ Package: ClimProjDiags Title: Set of Tools to Compute Various Climate Indices -Version: 0.1.3 +Version: 0.2.0 Authors@R: c( person("BSC-CNS", role = c("aut", "cph")), - person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-8568-3071")), - person("An-Chi", "Ho", , "an.ho@bsc.es", role = "ctb"), + person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut"), comment = c(ORCID = "0000-0001-8568-3071")), + person("An-Chi", "Ho", , "an.ho@bsc.es", role = c("cre", "ctb")), person("Nicolau", "Manubens", , "nicolau.manubens@bsc.es", role = "ctb"), person("Alasdair", "Hunter", , "alasdair.hunter@bsc.es", role = "aut"), - person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "ctb")) + person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "ctb"), + person("Eva", "Rifà", , "eva.rifarovira@bsc.es", role = "ctb")) Description: Set of tools to compute metrics and indices for climate analysis. The package provides functions to compute extreme indices, evaluate the agreement between models and combine theses models into an ensemble. Multi-model diff --git a/NEWS.md b/NEWS.md new file mode 100644 index 0000000..461ff84 --- /dev/null +++ b/NEWS.md @@ -0,0 +1,8 @@ +# 0.2.0 (Release date: 2022-11-04) +- New functions: ShiftLon, WeightedCells +- Bugfix of Subset() when only one dimension left after subsetting and when +parameter "indices" is not a list. +- Bugfix of WeightedMean() at the grid points that are across positive and +negative longitudes. + + -- GitLab From dd3754afdb7c9ca5e0bff64258b44dadccfd4608 Mon Sep 17 00:00:00 2001 From: aho Date: Thu, 3 Nov 2022 14:55:23 +0100 Subject: [PATCH 24/25] Sanity check for along = integer(0) --- R/Subset.R | 3 ++- tests/testthat/test-Subset.R | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/R/Subset.R b/R/Subset.R index c077c2a..6c23215 100644 --- a/R/Subset.R +++ b/R/Subset.R @@ -62,7 +62,8 @@ Subset <- function(x, along, indices, drop = FALSE) { } # Check along - if (any(sapply(along, function(x) !is.numeric(x) && !is.character(x)))) { + if (any(sapply(along, function(x) !is.numeric(x) && !is.character(x))) | + length(along) == 0) { stop("All provided dimension indices in 'along' must be integers or character strings.") } if (any(sapply(along, is.character))) { diff --git a/tests/testthat/test-Subset.R b/tests/testthat/test-Subset.R index 42f609a..e769453 100644 --- a/tests/testthat/test-Subset.R +++ b/tests/testthat/test-Subset.R @@ -28,6 +28,10 @@ Subset(x = dat1, along = T), "All provided dimension indices in 'along' must be integers or character strings." ) expect_error( +Subset(x = dat1, along = integer(0)), +"All provided dimension indices in 'along' must be integers or character strings." +) +expect_error( Subset(x = dat1, along = c('dat', 'dat')), "The parameter 'along' must not contain repeated dimension names." ) -- GitLab From 671f975afd377959fd8fdf2a7e26ab58eddfd732 Mon Sep 17 00:00:00 2001 From: aho Date: Mon, 7 Nov 2022 12:50:23 +0100 Subject: [PATCH 25/25] Move Suggests above --- DESCRIPTION | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index a75e2d8..26f5f0e 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -24,14 +24,14 @@ Imports: plyr, climdex.pcic, stats -License: Apache License 2.0 -URL: https://earth.bsc.es/gitlab/es/ClimProjDiags -BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues -Encoding: UTF-8 -RoxygenNote: 7.2.0 Suggests: knitr, testthat, markdown, rmarkdown +License: Apache License 2.0 +URL: https://earth.bsc.es/gitlab/es/ClimProjDiags +BugReports: https://earth.bsc.es/gitlab/es/ClimProjDiags/-/issues +Encoding: UTF-8 +RoxygenNote: 7.2.0 VignetteBuilder: knitr -- GitLab