diff --git a/R/AbsToProbs.R b/R/AbsToProbs.R index 708fabdafbec4e9cbf094fb53c586741a48b52f6..e086e6e429e92615bf9300d5d22bffd083ae7bd7 100644 --- a/R/AbsToProbs.R +++ b/R/AbsToProbs.R @@ -38,47 +38,44 @@ #'exp_probs <- CST_AbsToProbs(exp) #'exp$data <- array(rnorm(5 * 3 * 214 * 2), #' c(member = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) -#'exp_probs <- CST_AbsToProbs(exp, start = list(21, 4), end = list(21, 6)) -#' +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) +#'exp_probs <- CST_AbsToProbs(data = exp, start = list(21, 4), end = list(21, 6)) #'@import multiApply #'@importFrom stats ecdf #'@export CST_AbsToProbs <- function(data, start = NULL, end = NULL, time_dim = 'ftime', memb_dim = 'member', sdate_dim = 'sdate', ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)[sdate_dim])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)[sdate_dim]) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - probs <- AbsToProbs(data$data, data$Dates[[1]], start, end, - time_dim = time_dim, memb_dim = memb_dim, - sdate_dim = sdate_dim, ncores = ncores) + + probs <- AbsToProbs(data = data$data, dates = data$attrs$Dates, + start = start, end = end, time_dim = time_dim, + memb_dim = memb_dim, sdate_dim = sdate_dim, + ncores = ncores) data$data <- probs if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } @@ -90,9 +87,11 @@ CST_AbsToProbs <- function(data, start = NULL, end = NULL, #'Distribution Function excluding the corresponding initialization. #' #'@param data A multidimensional array with named dimensions. -#'@param dates A vector of dates or a multidimensional array of dates with named -#' dimensions matching the dimensions on parameter 'data'. By default it is -#' NULL, to select a period this parameter must be provided. +#'@param dates An optional parameter containing a vector of dates or a +#' multidimensional array of dates with named dimensions matching the +#' dimensions on parameter 'data'. By default it is NULL, to select a period +#' this parameter must be provided. All common dimensions with 'data' need to +#' have the same length. #'@param start An optional parameter to define the initial date of the period #' to select from the data by providing a list of two elements: the initial #' date of the period and the initial month of the period. By default it is set @@ -118,48 +117,76 @@ CST_AbsToProbs <- function(data, start = NULL, end = NULL, #'probabilites in the element \code{data}. #' #'@examples -#'exp <- array(rnorm(216), dim = c(dataset = 1, member = 2, sdate = 3, ftime = 9, lat = 2, lon = 2)) +#'exp <- array(rnorm(216), dim = c(dataset = 1, member = 2, sdate = 3, +#' ftime = 9, lat = 2, lon = 2)) #'exp_probs <- AbsToProbs(exp) -#'data <- array(rnorm(5 * 2 * 61 * 1), -#' c(member = 5, sdate = 2, ftime = 61, lon = 1)) +#'data <- array(rnorm(5 * 3 * 61 * 1), +#' c(member = 5, sdate = 3, ftime = 61, lon = 1)) #'Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), #' as.Date("30-06-2000", format = "%d-%m-%Y"), by = 'day'), #' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), #' as.Date("30-06-2001", format = "%d-%m-%Y"), by = 'day'), #' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), #' as.Date("30-06-2002", format = "%d-%m-%Y"), by = 'day')) -#'exp_probs <- AbsToProbs(exp, start = list(21, 4), end = list(21, 6)) +#'dim(Dates) <- c(ftime = 61, sdate = 3) +#'exp_probs <- AbsToProbs(data, dates = Dates, start = list(21, 4), +#' end = list(21, 6)) #' #'@import multiApply #'@importFrom stats ecdf #'@export -AbsToProbs <- function(data, dates = NULL, start = NULL, end = NULL, time_dim = 'time', - memb_dim = 'member', +AbsToProbs <- function(data, dates = NULL, start = NULL, end = NULL, + time_dim = 'ftime', memb_dim = 'member', sdate_dim = 'sdate', ncores = NULL) { - if (is.null(data)) { - stop("Parameter 'data' cannot be NULL.") - } + # data if (!is.numeric(data)) { stop("Parameter 'data' must be numeric.") } + data_is_array <- TRUE if (!is.array(data)) { + data_is_array <- FALSE dim(data) <- c(length(data), 1) names(dim(data)) <- c(memb_dim, sdate_dim) if (!is.null(start) && !is.null(end)) { - if (!any(c(is.list(start), is.list(end)))) { - stop("Parameter 'start' and 'end' must be lists indicating the ", - "day and the month of the period start and end.") + warning("Parameter 'data' doesn't have dimension names and all ", + "data will be used.") + start <- NULL + end <- NULL + } + } + # dates subset + if (!is.null(start) && !is.null(end)) { + if (!all(c(is.list(start), is.list(end)))) { + stop("Parameter 'start' and 'end' must be lists indicating the ", + "day and the month of the period start and end.") + } + if (is.null(dates)) { + warning("Parameter 'dates' is not provided and all data will be used.") + } else { + if (is.null(dim(dates))) { + warning("Parameter 'dates' doesn't have dimension names and all ", + "data will be used.") + } else { + data <- SelectPeriodOnData(data, dates, start, end, + time_dim = time_dim, ncores = ncores) } - data <- SelectPeriodOnData(data, dates, start, end, - time_dim = time_dim, ncores = ncores) } } - probs <- Apply(list(data), target_dims = c(memb_dim, sdate_dim), fun = .abstoprobs, + probs <- Apply(list(data), target_dims = c(memb_dim, sdate_dim), + fun = .abstoprobs, ncores = ncores)$output1 + if (!data_is_array) { + dim(probs) <- NULL + } else { + pos <- match(names(dim(data)), names(dim(probs))) + probs <- aperm(probs, pos) + } + return(probs) } + .abstoprobs <- function(data) { - if (dim(data)[2] > 1 ) { # Several sdates + if (dim(data)[2] > 1) { # Several sdates qres <- unlist( lapply(1:(dim(data)[1]), function(x) { # dim 1: member lapply(1:(dim(data)[2]), function(y) { # dim 2: sdate diff --git a/R/AccumulationExceedingThreshold.R b/R/AccumulationExceedingThreshold.R index 5cb70f354e3df42ab95d56488a42a12b895248c0..7fd78f49f9de6d3fad3a65043ed7a46f2a19e143 100644 --- a/R/AccumulationExceedingThreshold.R +++ b/R/AccumulationExceedingThreshold.R @@ -62,25 +62,20 @@ CST_AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = FALSE, start = NULL, end = NULL, time_dim = 'ftime', na.rm = FALSE, ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and", - "all data would be used.") - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } + if (length(op) == 1) { if (inherits(threshold, 's2dv_cube')) { threshold <- threshold$data @@ -94,15 +89,16 @@ CST_AccumulationExceedingThreshold <- function(data, threshold, op = '>', diff = } } - total <- AccumulationExceedingThreshold(data$data, dates = data$Dates[[1]], + total <- AccumulationExceedingThreshold(data$data, dates = data$attrs$Dates, threshold = threshold, op = op, diff = diff, start = start, end = end, time_dim = time_dim, na.rm = na.rm, ncores = ncores) data$data <- total if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates$start, - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } diff --git a/R/MergeRefToExp.R b/R/MergeRefToExp.R index 216dc8e831a8448909a7f1e2284519e216e660e2..fa3dcaf8415b8a36c659b36f0247bd74bb87b308 100644 --- a/R/MergeRefToExp.R +++ b/R/MergeRefToExp.R @@ -46,14 +46,14 @@ #'dim(data_dates) <- c(ftime = 154, sdate = 2) #'data <- NULL #'data$data <- array(1:(2*154*2), c(ftime = 154, sdate = 2, member= 2)) -#'data$Dates$start <- data_dates +#'data$attrs$Dates<- data_dates #'class(data) <- 's2dv_cube' #'ref_dates <- seq(as.Date("01-01-1993", "%d-%m-%Y", tz = 'UTC'), #' as.Date("01-12-1994","%d-%m-%Y", tz = 'UTC'), "day") #'dim(ref_dates) <- c(ftime = 350, sdate = 2) #'ref <- NULL #'ref$data <- array(1001:1700, c(ftime = 350, sdate = 2)) -#'ref$Dates$start <- ref_dates +#'ref$attrs$Dates <- ref_dates #'class(ref) <- 's2dv_cube' #'new_data <- CST_MergeRefToExp(data1 = ref, data2 = data, #' start1 = list(21, 6), end1 = list(30, 6), @@ -61,63 +61,87 @@ #' #'@import multiApply #'@importFrom ClimProjDiags Subset +#'@importFrom s2dv InsertDim #'@export CST_MergeRefToExp <- function(data1, data2, start1, end1, start2, end2, time_dim = 'ftime', sdate_dim = 'sdate', ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data1, 's2dv_cube')) { - stop("Parameter 'ref' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'ref' must be of the class 's2dv_cube'.") } if (!inherits(data2, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") - } - # when subsetting is needed, dimensions are also needed: - if (is.null(dim(data1$Dates$start))) { - if (length(data1$Dates$start) != dim(data1$data)[time_dim]) { - if (length(data1$Dates$start) == - prod(dim(data1$data)[time_dim] * dim(data1$data)['sdate'])) { - dim(data1$Dates$start) <- c(dim(data1$data)[time_dim], - dim(data1$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'data$Dates$start' are missed and", - "all data would be used.") - } + stop("Parameter 'data' must be of the class 's2dv_cube'.") + } + # Dates subset of data1 + dates1 <- NULL + if (!is.null(start1) && !is.null(end1)) { + if (is.null(dim(data1$attrs$Dates))) { + warning("Dimensions in 'data1' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL + } else { + dates1 <- data1$attrs$Dates } } - # when subsetting is needed, dimensions are also needed: - if (is.null(dim(data2$Dates$start))) { - if (length(data2$Dates$start) != dim(data2$data)[time_dim]) { - if (length(data2$Dates$start) == - prod(dim(data2$data)[time_dim] * dim(data2$data)['sdate'])) { - dim(data2$Dates$start) <- c(dim(data2$data)[time_dim], - dim(data2$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'data$Dates$start' are missed and ", - "all data would be used.") - } + # Dates subset of data2 + dates2 <- NULL + if (!is.null(start2) && !is.null(end2)) { + if (is.null(dim(data2$attrs$Dates))) { + warning("Dimensions in 'data2' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL + } else { + dates2 <- data2$attrs$Dates } } - data1$data <- MergeRefToExp(data1 = data1$data, dates1 = data1$Dates[[1]], + + data1$data <- MergeRefToExp(data1 = data1$data, dates1 = dates1, start1 = start1, end1 = end1, - data2 = data2$data, dates2 = data2$Dates[[1]], + data2 = data2$data, dates2 = dates2, start2, end2, time_dim = time_dim, sdate_dim = sdate_dim, ncores = ncores) - dates1 <- SelectPeriodOnDates(data1$Dates[[1]], start = start1, - end = end1, - time_dim = time_dim) - dates2 <- SelectPeriodOnDates(data2$Dates[[1]], - start = start2, - end = end2, time_dim = time_dim) -# TO DO CONCATENATE DATES - res <- Apply(list(dates1, dates2), target_dims = time_dim, + if (!is.null(dates1)) { + data1$attrs$Dates <- SelectPeriodOnDates(dates1, start = start1, end = end1, + time_dim = time_dim) + } + if (!is.null(dates2)) { + data2$attrs$Dates <- SelectPeriodOnDates(dates2, start = start2, + end = end2, time_dim = time_dim) + } + + # TO DO CONCATENATE DATES + remove_dates1_dim <- FALSE + remove_dates2_dim <- FALSE + if (!is.null(data1$attrs$Dates) & !is.null(data2$attrs$Dates)) { + if (is.null(dim(data1$attrs$Dates))) { + remove_dates1_dim <- TRUE + dim(data1$attrs$Dates) <- length(data1$attrs$Dates) + names(dim(data1$attrs$Dates)) <- time_dim + } + if (is.null(dim(data2$attrs$Dates))) { + remove_dates2_dim <- TRUE + dim(data2$attrs$Dates) <- length(data2$attrs$Dates) + names(dim(data2$attrs$Dates)) <- time_dim + } + } + res <- Apply(list(data1$attrs$Dates, data2$attrs$Dates), target_dims = time_dim, c, output_dims = time_dim, ncores = ncores)$output1 - if (inherits(data1$Dates[[1]], 'Date')) { - data1$Dates <- as.Date(res, origin = '1970-01-01') + if (inherits(data1$attrs$Dates, 'Date')) { + data1$attrs$Dates <- as.Date(res, origin = '1970-01-01') } else { - data1$Dates <- as.POSIXct(res*3600*24, origin = '1970-01-01', tz = 'UTC') + data1$attrs$Dates <- as.POSIXct(res*3600*24, origin = '1970-01-01', tz = 'UTC') } + + if (remove_dates1_dim) { + dim(data1$attrs$Dates) <- NULL + } + if (remove_dates2_dim) { + dim(data2$attrs$Dates) <- NULL + } + return(data1) } @@ -131,7 +155,7 @@ CST_MergeRefToExp <- function(data1, data2, start1, end1, start2, end2, #'steps. #' #'@param data1 A multidimensional array with named dimensions. -#'@param dates1 a vector of dates or a multidimensional array of dates with +#'@param dates1 A vector of dates or a multidimensional array of dates with #' named dimensions matching the dimensions on parameter 'data1'. #'@param data2 A multidimensional array with named dimensions. #'@param dates2 A vector of dates or a multidimensional array of dates with @@ -151,7 +175,8 @@ CST_MergeRefToExp <- function(data1, data2, start1, end1, start2, end2, #'@param time_dim A character string indicating the name of the temporal #' dimension. By default, it is set to 'ftime'. More than one dimension name #' matching the dimensions provided in the object \code{data$data} can be -#' specified. This dimension is required to subset the data in a requested period. +#' specified. This dimension is required to subset the data in a requested +#' period. #'@param sdate_dim A character string indicating the name of the dimension in #' which the initialization dates are stored. #'@param ncores An integer indicating the number of cores to use in parallel @@ -172,15 +197,18 @@ CST_MergeRefToExp <- function(data1, data2, start1, end1, start2, end2, #'data <- array(1:(2*154*2), c(time = 154, sdate = 2, member= 2)) #'new_data <- MergeRefToExp(data1 = ref, dates1 = ref_dates, start1 = list(21, 6), #' end1 = list(30, 6), data2 = data, dates2 = data_dates, -#' start2 = list(1, 7), end = list(21, 9)) +#' start2 = list(1, 7), end = list(21, 9), +#' time_dim = 'time') #' #'@import multiApply #'@importFrom ClimProjDiags Subset #'@importFrom s2dv InsertDim #'@export -MergeRefToExp <- function(data1, dates1, start1, end1, data2, dates2, start2, end2, - time_dim = 'time', sdate_dim = 'sdate', - ncores = NULL) { +MergeRefToExp <- function(data1, dates1, start1, end1, data2, dates2, start2, + end2, time_dim = 'ftime', sdate_dim = 'sdate', + ncores = NULL) { + # Input checks + # data if (!is.array(data1)) { dim(data1) <- c(length(data1)) names(dim(data1)) <- time_dim @@ -189,20 +217,24 @@ MergeRefToExp <- function(data1, dates1, start1, end1, data2, dates2, start2, en dim(data2) <- c(length(data2)) names(dim(data2)) <- time_dim } - if (is.null(dim(dates1))) { - warning("Dimensions in 'dates1' element are missed and ", - "all data would be used.") - dim(dates1) <- length(dates1) - names(dim(dates1)) <- time_dim - } - if (is.null(dim(dates2))) { - warning("Dimensions in 'dates2' element are missed and ", - "all data would be used.") - dim(dates2) <- length(dates2) - names(dim(dates2)) <- time_dim + # dates + if (!is.null(dates1) & !is.null(dates2)) { + if (is.null(dim(dates1))) { + warning("Dimensions in 'dates1' element are missed and ", + "all data would be used.") + dim(dates1) <- length(dates1) + names(dim(dates1)) <- time_dim + } + if (is.null(dim(dates2))) { + warning("Dimensions in 'dates2' element are missed and ", + "all data would be used.") + dim(dates2) <- length(dates2) + names(dim(dates2)) <- time_dim + } + data1 <- SelectPeriodOnData(data1, dates = dates1, start = start1, + end = end1, time_dim = time_dim, ncores = ncores) } - data1 <- SelectPeriodOnData(data1, dates = dates1, start = start1, - end = end1, time_dim = time_dim, ncores = ncores) + # Check if data2 has dimension sdate_dim and it should be added to data1: if ((sdate_dim %in% names(dim(data2))) && dim(data2)[sdate_dim] > 1 && !sdate_dim %in% names(dim(data1))) { @@ -220,8 +252,8 @@ MergeRefToExp <- function(data1, dates1, start1, end1, data2, dates2, start2, en dif_dims <- which(names(dim(data2)) %in% names(dim(data1)) == FALSE) if (length(dif_dims) > 0) { for (i in dif_dims) { - data1 <- s2dv::InsertDim(data1, posdim = i, lendim = dim(data2)[i], - name = names(dim(data2))[i]) + data1 <- InsertDim(data1, posdim = i, lendim = dim(data2)[i], + name = names(dim(data2))[i]) } } } @@ -230,13 +262,15 @@ MergeRefToExp <- function(data1, dates1, start1, end1, data2, dates2, start2, en dif_dims <- which(names(dim(data1)) %in% names(dim(data2)) == FALSE) if (length(dif_dims) > 0) { for (i in dif_dims) { - data2 <- s2dv::InsertDim(data2, posdim = i, lendim = dim(data1)[i], - name = names(dim(data1))[i]) + data2 <- InsertDim(data2, posdim = i, lendim = dim(data1)[i], + name = names(dim(data1))[i]) } } } - data2 <- SelectPeriodOnData(data2, dates = dates2, start = start2, - end = end2, time_dim = time_dim, ncores = ncores) + if (!is.null(dates2)) { + data2 <- SelectPeriodOnData(data2, dates = dates2, start = start2, + end = end2, time_dim = time_dim, ncores = ncores) + } data1 <- Apply(list(data1, data2), target_dims = time_dim, fun = 'c', output_dims = time_dim, ncores = ncores)$output1 return(data1) diff --git a/R/PeriodAccumulation.R b/R/PeriodAccumulation.R index 0b3fde5a909e212859875e07a70fb79a424410bf..d181d8eeaa71100948ffa1594fce8d6ebadd376f 100644 --- a/R/PeriodAccumulation.R +++ b/R/PeriodAccumulation.R @@ -41,53 +41,50 @@ #'TP <- CST_PeriodAccumulation(exp) #'exp$data <- array(rnorm(5 * 3 * 214 * 2), #' c(memb = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'SprR <- CST_PeriodAccumulation(exp, start = list(21, 4), end = list(21, 6)) #'dim(SprR$data) -#'head(SprR$Dates) +#'head(SprR$attrs$Dates) #'HarR <- CST_PeriodAccumulation(exp, start = list(21, 8), end = list(21, 10)) #'dim(HarR$data) -#'head(HarR$Dates) +#'head(HarR$attrs$Dates) #' #'@import multiApply #'@export CST_PeriodAccumulation <- function(data, start = NULL, end = NULL, time_dim = 'ftime', na.rm = FALSE, ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - total <- PeriodAccumulation(data$data, data$Dates[[1]], start, end, - time_dim = time_dim, na.rm = na.rm, ncores = ncores) + + total <- PeriodAccumulation(data$data, dates = data$attrs$Dates, start, end, + time_dim = time_dim, na.rm = na.rm, ncores = ncores) data$data <- total if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } + #'Period Accumulation on multidimensional array objects #' #'Period Accumulation computes the sum (accumulation) of a given variable in a @@ -144,8 +141,8 @@ CST_PeriodAccumulation <- function(data, start = NULL, end = NULL, #'@import multiApply #'@export PeriodAccumulation <- function(data, dates = NULL, start = NULL, end = NULL, - time_dim = 'time', na.rm = FALSE, - ncores = NULL) { + time_dim = 'time', na.rm = FALSE, + ncores = NULL) { if (is.null(data)) { stop("Parameter 'data' cannot be NULL.") } diff --git a/R/PeriodMean.R b/R/PeriodMean.R index 97f99c3c309dad36392a23d214148c188a3d1290..85a12a74272726c3694425fe1728181409d956b1 100644 --- a/R/PeriodMean.R +++ b/R/PeriodMean.R @@ -37,10 +37,10 @@ #'exp <- NULL #'exp$data <- array(rnorm(45), dim = c(member = 7, ftime = 8)) #'class(exp) <- 's2dv_cube' -#'exp$Dates$start <- c(seq(as.Date("01-07-1993", "%d-%m-%Y", tz = 'UTC'), -#' as.Date("01-08-1993","%d-%m-%Y", tz = 'UTC'), "day"), -#' seq(as.Date("01-07-1994", "%d-%m-%Y", tz = 'UTC'), -#' as.Date("01-08-1994","%d-%m-%Y", tz = 'UTC'), "day")) +#'exp$attrs$Dates <- c(seq(as.Date("01-07-1993", "%d-%m-%Y", tz = 'UTC'), +#' as.Date("01-08-1993","%d-%m-%Y", tz = 'UTC'), "day"), +#' seq(as.Date("01-07-1994", "%d-%m-%Y", tz = 'UTC'), +#' as.Date("01-08-1994","%d-%m-%Y", tz = 'UTC'), "day")) #'SA <- CST_PeriodMean(exp) #' #'@import multiApply @@ -48,32 +48,28 @@ CST_PeriodMean <- function(data, start = NULL, end = NULL, time_dim = 'ftime', na.rm = FALSE, ncores = NULL) { -# Consider to add an option for providing tx and tn in data + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed/unmatched. All data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - total <- PeriodMean(data = data$data, dates = data$Dates[[1]], start, end, + + total <- PeriodMean(data = data$data, dates = data$attrs$Dates, start, end, time_dim = time_dim, na.rm = na.rm, ncores = ncores) data$data <- total if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } diff --git a/R/QThreshold.R b/R/QThreshold.R index 8eb950a76a255138e5c930c3a2d4f0414496c605..e86b95a0dcb634326356a6283db8d837e1b3c185 100644 --- a/R/QThreshold.R +++ b/R/QThreshold.R @@ -59,50 +59,45 @@ #'exp_probs <- CST_QThreshold(exp, threshold) #'exp$data <- array(rnorm(5 * 3 * 214 * 2), #' c(member = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'exp_probs <- CST_QThreshold(exp, threshold) #' #'@import multiApply #'@importFrom ClimProjDiags Subset #'@export CST_QThreshold <- function(data, threshold, start = NULL, end = NULL, - time_dim = 'ftime', memb_dim = 'member', sdate_dim = 'sdate', - ncores = NULL) { - if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + time_dim = 'ftime', memb_dim = 'member', + sdate_dim = 'sdate', ncores = NULL) { + # Check 's2dv_cube' + if (!inherits(data, 's2dv_cube')) { + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)[sdate_dim])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)[sdate_dim]) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } if (inherits(threshold, 's2dv_cube')) { threshold <- threshold$data } - probs <- QThreshold(data$data, threshold, data$Dates[[1]], start, end, - time_dim = time_dim, memb_dim = memb_dim, + probs <- QThreshold(data$data, threshold, dates = data$attrs$Dates, + start, end, time_dim = time_dim, memb_dim = memb_dim, sdate_dim = sdate_dim, ncores = ncores) data$data <- probs if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } @@ -178,7 +173,6 @@ QThreshold <- function(data, threshold, dates = NULL, start = NULL, end = NULL, if (!is.numeric(data)) { stop("Parameter 'data' must be numeric.") } - if (!is.array(data)) { dim(data) <- c(length(data), 1) names(dim(data)) <- c(memb_dim, sdate_dim) diff --git a/R/SelectPeriodOnData.R b/R/SelectPeriodOnData.R index 3c162dd746340feb909a9a19ea615ee9081a94a1..b9cf8ac3966bca04dee0c806b0c2d3421de259f2 100644 --- a/R/SelectPeriodOnData.R +++ b/R/SelectPeriodOnData.R @@ -24,67 +24,63 @@ #'exp <- NULL #'exp$data <- array(rnorm(5 * 3 * 214 * 2), #' c(memb = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) #'class(exp) <- 's2dv_cube' #'Period <- CST_SelectPeriodOnData(exp, start = list(21, 6), end = list(21, 9)) -#' #'@import multiApply #'@export -CST_SelectPeriodOnData <- function(data, start, end, time_dim = 'ftime', ncores = NULL) { +CST_SelectPeriodOnData <- function(data, start, end, time_dim = 'ftime', + ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'Dates$start' are missed and ", + "all data would be used.") } } - res <- SelectPeriodOnData(data$data, data$Dates[[1]], + + res <- SelectPeriodOnData(data$data, data$attrs$Dates, start = start, end = end, time_dim = time_dim, ncores = ncores) data$data <- res if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } - #' Select a period on Data on multidimensional array objects #' #' Auxiliary function to subset data for a specific period. #' -#'@param data A multidimensional array with named dimensions. -#'@param dates A vector of dates or a multidimensional array of dates with named -#' dimensions. -#'@param start An optional parameter to defined the initial date of the period -#' to select from the data by providing a list of two elements: the initial -#' date of the period and the initial month of the period. -#'@param end An optional parameter to defined the final date of the period to -#' select from the data by providing a list of two elements: the final day of -#' the period and the final month of the period. +#'@param data A multidimensional array with named dimensions with at least the +#' time dimension specified in parameter 'time_dim'. All common dimensions +#' with 'dates' parameter need to have the same length. +#'@param dates An array of dates with named dimensions with at least the time +#' dimension specified in parameter 'time_dim'. All common dimensions with +#' 'data' parameter need to have the same length. +#'@param start A list with two elements to define the initial date of the period +#' to select from the data. The first element is the initial day of the period +#' and the second element is the initial month of the period. +#'@param end A list with two elements to define the final date of the period +#' to select from the data. The first element is the final day of the period +#' and the second element is the final month of the period. #'@param time_dim A character string indicating the name of the dimension to -#' compute select the dates. By default, it is set to 'ftime'. More than one -#' dimension name matching the dimensions provided in the object -#' \code{data$data} can be specified. +#' compute select the dates. By default, it is set to 'ftime'. Parameters +#' 'data' and 'dates' #'@param ncores An integer indicating the number of cores to use in parallel #' computation. #' @@ -103,11 +99,11 @@ CST_SelectPeriodOnData <- function(data, start, end, time_dim = 'ftime', ncores #' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'dim(Dates) <- c(ftime = 214, sdate = 3) #'Period <- SelectPeriodOnData(data, Dates, start = list(21, 6), end = list(21, 9)) -#' #'@import multiApply +#'@importFrom ClimProjDiags Subset #'@export SelectPeriodOnData <- function(data, dates, start, end, - time_dim = 'ftime', ncores = NULL) { + time_dim = 'ftime', ncores = NULL) { if (is.null(dim(dates))) { dim(dates) <- length(dates) names(dim(dates)) <- time_dim @@ -116,6 +112,7 @@ SelectPeriodOnData <- function(data, dates, start, end, dim(data) <- length(data) names(dim(data)) <- time_dim } + res <- Apply(list(dates), target_dims = time_dim, fun = .position, ini_day = start[[1]], ini_month = start[[2]], @@ -149,6 +146,16 @@ SelectPeriodOnData <- function(data, dates, start, end, return(res) }, output_dims = time_dim, ncores = ncores)$output1 } + names_res <- sort(names(dim(res))) + names_data <- sort(names(dim(data))) + if (!all(names_res %in% names_data)) { + dim_remove <- names_res[-which(names_res %in% names_data)] + indices <- as.list(rep(1, length(dim_remove))) + res <- Subset(res, along = dim_remove, indices, drop = 'selected') + } + + pos <- match(names(dim(data)), names(dim(res))) + res <- aperm(res, pos) return(res) } diff --git a/R/SelectPeriodOnDates.R b/R/SelectPeriodOnDates.R index a9c8d9c834a9b8a4963e71d78455b3121737c926..09633dd605100f8fb964ad30acb1792d3f227cac 100644 --- a/R/SelectPeriodOnDates.R +++ b/R/SelectPeriodOnDates.R @@ -2,8 +2,7 @@ #' #' Auxiliary function to subset dates for a specific period. #' -#'@param dates A vector of dates or a multidimensional array of dates with named -#' dimensions. +#'@param dates An array of dates with named dimensions. #'@param start An optional parameter to defined the initial date of the period #' to select from the data by providing a list of two elements: the initial #' date of the period and the initial month of the period. @@ -30,15 +29,18 @@ #' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), #' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), #' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'dim(Dates) <- c(ftime = 214, sdate = 3) #'Period <- SelectPeriodOnDates(Dates, start = list(21, 6), end = list(21, 9)) #'@export SelectPeriodOnDates <- function(dates, start, end, time_dim = 'ftime', ncores = NULL) { - # TODO: consider NAs if (is.null(dim(dates))) { dim(dates) <- length(dates) names(dim(dates)) <- time_dim } + + # TODO: consider NAs + res <- Apply(list(dates), target_dims = time_dim, fun = .position, ini_day = start[[1]], ini_month = start[[2]], diff --git a/R/Threshold.R b/R/Threshold.R index 0117952a9ece29ca03caa68b1880c836455016e7..3122c1289394f9e6039c2e462b1776f448a49519 100644 --- a/R/Threshold.R +++ b/R/Threshold.R @@ -41,47 +41,44 @@ #'exp <- NULL #'exp$data <- array(rnorm(5 * 3 * 214 * 2), #' c(member = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'class(exp) <- 's2dv_cube' #'exp_probs <- CST_Threshold(exp, threshold, start = list(21, 4), end = list(21, 6)) #' #'@import multiApply #'@export CST_Threshold <- function(data, threshold, start = NULL, end = NULL, - time_dim = 'ftime', memb_dim = 'member', sdate_dim = 'sdate', - na.rm = FALSE, ncores = NULL) { + time_dim = 'ftime', memb_dim = 'member', + sdate_dim = 'sdate', na.rm = FALSE, + ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)[sdate_dim])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)[sdate_dim]) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - thres <- Threshold(data$data, threshold, data$Dates[[1]], start, end, + + thres <- Threshold(data$data, threshold, dates = data$attrs$Dates, start, end, time_dim = time_dim, memb_dim = memb_dim, sdate_dim = sdate_dim, na.rm = na.rm, ncores = ncores) data$data <- thres if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } @@ -145,7 +142,6 @@ Threshold <- function(data, threshold, dates = NULL, start = NULL, end = NULL, if (!is.numeric(data)) { stop("Parameter 'data' must be numeric.") } - if (!is.array(data)) { dim(data) <- c(length(data), 1) names(dim(data)) <- c(memb_dim, sdate_dim) diff --git a/R/TotalSpellTimeExceedingThreshold.R b/R/TotalSpellTimeExceedingThreshold.R index 7531dee293c83db3b963ffcba639611cf7a54f62..3ee22a27b21114a970476974067366adca2ffc7c 100644 --- a/R/TotalSpellTimeExceedingThreshold.R +++ b/R/TotalSpellTimeExceedingThreshold.R @@ -54,12 +54,12 @@ #'exp <- NULL #'exp$data <- array(rnorm(5 * 3 * 214 * 2)*23, #' c(member = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'class(exp) <- 's2dv_cube' #'TTSET <- CST_TotalSpellTimeExceedingThreshold(exp, threshold = 23, spell = 3) #' @@ -69,25 +69,20 @@ CST_TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '> start = NULL, end = NULL, time_dim = 'ftime', ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } + if (length(op) == 1) { if (inherits(threshold, 's2dv_cube')) { threshold <- threshold$data @@ -100,17 +95,18 @@ CST_TotalSpellTimeExceedingThreshold <- function(data, threshold, spell, op = '> threshold[[2]] <- threshold[[2]]$data } } - - - total <- TotalSpellTimeExceedingThreshold(data$data, data$Dates[[1]], - threshold = threshold, spell = spell, op = op, - start = start, end = end, time_dim = time_dim, + + total <- TotalSpellTimeExceedingThreshold(data$data, data$attrs$Dates, + threshold = threshold, spell = spell, + op = op, start = start, end = end, + time_dim = time_dim, ncores = ncores) data$data <- total if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates$start, - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } diff --git a/R/TotalTimeExceedingThreshold.R b/R/TotalTimeExceedingThreshold.R index 01d7823faf0d5848f41cb6f1b8ff30e5e5b58146..ceda1eee6ee47ef0e08ad0a1649d93e986286df2 100644 --- a/R/TotalTimeExceedingThreshold.R +++ b/R/TotalTimeExceedingThreshold.R @@ -60,12 +60,12 @@ #'exp <- NULL #'exp$data <- array(abs(rnorm(5 * 3 * 214 * 2)*280), #' c(member = 5, sdate = 3, ftime = 214, lon = 2)) -#'exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), -#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), -#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), -#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), -#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) +#'exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), +#' as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2001", format = "%d-%m-%Y"), +#' as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), +#' seq(as.Date("01-05-2002", format = "%d-%m-%Y"), +#' as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) #'class(exp) <- 's2dv_cube' #'DOT <- CST_TotalTimeExceedingThreshold(exp, threshold = 280) #' @@ -75,25 +75,20 @@ CST_TotalTimeExceedingThreshold <- function(data, threshold, op = '>', start = NULL, end = NULL, time_dim = 'ftime', na.rm = FALSE, ncores = NULL) { + # Check 's2dv_cube' if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(data$Dates$start))) { - if (length(data$Dates$start) != dim(data$data)[time_dim]) { - if (length(data$Dates$start) == - prod(dim(data$data)[time_dim] * dim(data$data)['sdate'])) { - dim(data$Dates$start) <- c(dim(data$data)[time_dim], - dim(data$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(data$attrs$Dates))) { + warning("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } + if (length(op) == 1) { if (inherits(threshold, 's2dv_cube')) { threshold <- threshold$data @@ -106,15 +101,17 @@ CST_TotalTimeExceedingThreshold <- function(data, threshold, op = '>', threshold[[2]] <- threshold[[2]]$data } } - total <- TotalTimeExceedingThreshold(data$data, data$Dates[[1]], + total <- TotalTimeExceedingThreshold(data$data, dates = data$attrs$Dates, threshold = threshold, op = op, - start = start, end = end, time_dim = time_dim, - na.rm = na.rm, ncores = ncores) + start = start, end = end, + time_dim = time_dim, na.rm = na.rm, + ncores = ncores) data$data <- total if (!is.null(start) && !is.null(end)) { - data$Dates <- SelectPeriodOnDates(dates = data$Dates$start, - start = start, end = end, - time_dim = time_dim, ncores = ncores) + data$attrs$Dates <- SelectPeriodOnDates(dates = data$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(data) } diff --git a/R/WindCapacityFactor.R b/R/WindCapacityFactor.R index ee542ebe423fc54c57a114755e7905df9fa63214..8ed20844370f7036d2cee074df64eab79308ce18 100644 --- a/R/WindCapacityFactor.R +++ b/R/WindCapacityFactor.R @@ -10,8 +10,9 @@ #'piecewise approximating function to obtain a smooth power curve. Five #'different power curves that span different IEC classes can be selected (see #'below). -#'@references Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, F. J. (2019). -#'Seasonal forecasts of wind power generation. Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 +#'@references Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, +#'F. J. (2019). Seasonal forecasts of wind power generation. +#'Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 #'@references International Standard IEC 61400-1 (third ed.) (2005) #' #'@param wind An s2dv_cube object with instantaneous wind speeds expressed in m/s. @@ -39,48 +40,49 @@ #'@return An s2dv_cube object containing the Wind Capacity Factor (unitless). #' #'@examples -#'wind <- array(rweibull(n = 100, shape = 2, scale = 6), c(member = 10, lat = 2, lon = 5)) -#'wind <- CSTools::s2dv_cube(data = wind, lat = c(40, 41), lon = 1:5, -#' Variable = list(varName = 'sfcWind', level = 'Surface'), -#' Datasets = 'synthetic', when = Sys.time(), -#' Dates = list(start = '1990-01-01 00:00:00', end = '1990-01-01 00:00:00'), -#' source_file = NA) +#'wind <- NULL +#'wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), +#' c(member = 10, lat = 2, lon = 5)) +#'wind$coords <- list(lat = c(40, 41), lon = 1:5) +#'variable <- list(varName = 'sfcWind', +#' metadata = list(sfcWind = list(level = 'Surface'))) +#'wind$attrs <- list(Variable = variable, Datasets = 'synthetic', +#' when = Sys.time(), Dates = '1990-01-01 00:00:00') +#'class(wind) <- 's2dv_cube' #'WCF <- CST_WindCapacityFactor(wind, IEC_class = "III") #' #'@export CST_WindCapacityFactor <- function(wind, IEC_class = c("I", "I/II", "II", "II/III", "III"), start = NULL, end = NULL, time_dim = 'ftime', ncores = NULL) { + # Check 's2dv_cube' if (!inherits(wind, 's2dv_cube')) { - stop("Parameter 'wind' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'wind' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(wind$Dates$start))) { - if (length(wind$Dates$start) != dim(wind$data)[time_dim]) { - if (length(wind$Dates$start) == - prod(dim(wind$data)[time_dim] * dim(wind$data)['sdate'])) { - dim(wind$Dates$start) <- c(dim(wind$data)[time_dim], - dim(wind$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(wind$attrs$Dates))) { + warning("Dimensions in 'wind' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - wind$data <- WindCapacityFactor(wind$data, IEC_class = IEC_class, dates = wind$Dates[[1]], - start = start, end = end, ncores = ncores) - if ('Variable' %in% names(wind)) { - if ('varName' %in% names(wind$Variable)) { - wind$Variable$varName <- 'WindCapacityFactor' + + WindCapacity <- WindCapacityFactor(wind = wind$data, IEC_class = IEC_class, + dates = wind$attrs$Dates, start = start, + end = end, ncores = ncores) + wind$data <- WindCapacity + if ('Variable' %in% names(wind$attrs)) { + if ('varName' %in% names(wind$attrs$Variable)) { + wind$attrs$Variable$varName <- 'WindCapacityFactor' } } if (!is.null(start) && !is.null(end)) { - wind$Dates <- SelectPeriodOnDates(dates = wind$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + wind$attrs$Dates <- SelectPeriodOnDates(dates = wind$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(wind) } @@ -96,8 +98,9 @@ CST_WindCapacityFactor <- function(wind, IEC_class = c("I", "I/II", "II", "II/II #'piecewise approximating function to obtain a smooth power curve. Five #'different power curves that span different IEC classes can be selected (see #'below). -#'@references Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, F. J. (2019). -#'Seasonal forecasts of wind power generation. Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 +#'@references Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, +#'F. J. (2019). Seasonal forecasts of wind power generation. +#'Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 #'@references International Standard IEC 61400-1 (third ed.) (2005) #' #'@param wind A multidimensional array, vector or scalar with instantaneous wind @@ -150,17 +153,18 @@ WindCapacityFactor <- function(wind, IEC_class = c("I", "I/II", "II", "II/III", ) pc_file <- system.file("power_curves", pc_files[IEC_class], package = "CSIndicators", mustWork = T) pc <- read_pc(pc_file) - if (!is.null(dates)) { - if (!is.null(start) && !is.null(end)) { - if (!any(c(is.list(start), is.list(end)))) { - stop("Parameter 'start' and 'end' must be lists indicating the ", - "day and the month of the period start and end.") - } - wind <- SelectPeriodOnData(wind, dates, start, end, - time_dim = time_dim, ncores = ncores) - } - } + if (!is.null(dates)) { + if (!is.null(start) && !is.null(end)) { + if (!any(c(is.list(start), is.list(end)))) { + stop("Parameter 'start' and 'end' must be lists indicating the ", + "day and the month of the period start and end.") + } + wind <- SelectPeriodOnData(wind, dates, start, end, + time_dim = time_dim, ncores = ncores) + } + } cf <- wind2CF(wind, pc) + dim(cf) <- dim(wind) return(cf) } diff --git a/R/WindPowerDensity.R b/R/WindPowerDensity.R index bbdd07d87c6b82ce67544366ad042fe273051d99..5691bb55c052750f6bc2d9e7f2c887ad2962d963 100644 --- a/R/WindPowerDensity.R +++ b/R/WindPowerDensity.R @@ -30,47 +30,47 @@ #'@return An s2dv_cube object containing Wind Power Density expressed in W/m^2. #' #'@examples -#'wind <- array(rweibull(n = 100, shape = 2, scale = 6), c(member = 10, lat = 2, lon = 5)) -#'wind <- CSTools::s2dv_cube(data = wind, lat = c(40, 41), lon = 1:5, -#' Variable = list(varName = 'sfcWind', level = 'Surface'), -#' Datasets = 'synthetic', when = Sys.time(), -#' Dates = list(start = '1990-01-01 00:00:00', end = '1990-01-01 00:00:00'), -#' source_file = NA) -#'WPD <- CST_WindPowerDensity(wind) +#'wind <- NULL +#'wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), +#' c(member = 10, lat = 2, lon = 5)) +#'wind$coords <- list(lat = c(40, 41), lon = 1:5) +#'variable <- list(Variable = list(varName = 'sfcWind', +#' metadata = list(sfcWind = list(level = 'Surface')))) +#'wind$attrs <- list(Variable = variable, Datasets = 'synthetic', +#' when = Sys.time(), Dates = '1990-01-01 00:00:00') +#'class(wind) <- 's2dv_cube' +#'WCF <- CST_WindPowerDensity(wind) #' #'@export CST_WindPowerDensity <- function(wind, ro = 1.225, start = NULL, end = NULL, time_dim = 'ftime', ncores = NULL) { + # Check 's2dv_cube' if (!inherits(wind, 's2dv_cube')) { - stop("Parameter 'wind' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'wind' must be of the class 's2dv_cube'.") } - # when subsetting is needed, dimensions are also needed: + # Dates subset if (!is.null(start) && !is.null(end)) { - if (is.null(dim(wind$Dates$start))) { - if (length(wind$Dates$start) != dim(wind$data)[time_dim]) { - if (length(wind$Dates$start) == - prod(dim(wind$data)[time_dim] * dim(wind$data)['sdate'])) { - dim(wind$Dates$start) <- c(dim(wind$data)[time_dim], - dim(wind$data)['sdate']) - } else { - warning("Dimensions in 'data' element 'Dates$start' are missed and ", - "all data would be used.") - } - } + if (is.null(dim(wind$attrs$Dates))) { + warning("Dimensions in 'wind' element 'attrs$Dates' are missed and ", + "all data would be used.") + start <- NULL + end <- NULL } } - wind$data <- WindPowerDensity(wind$data, ro = ro, dates = wind$Dates[[1]], - start = start, end = end, ncores = ncores) - if ('Variable' %in% names(wind)) { - if ('varName' %in% names(wind$Variable)) { - wind$Variable$varName <- 'WindPowerDensity' + WindPower <- WindPowerDensity(wind = wind$data, ro = ro, + dates = wind$attrs$Dates, start = start, + end = end, ncores = ncores) + wind$data <- WindPower + if ('Variable' %in% names(wind$attrs)) { + if ('varName' %in% names(wind$attrs$Variable)) { + wind$attrs$Variable$varName <- 'WindPowerDensity' } } if (!is.null(start) && !is.null(end)) { - wind$Dates <- SelectPeriodOnDates(dates = wind$Dates[[1]], - start = start, end = end, - time_dim = time_dim, ncores = ncores) + wind$attrs$Dates <- SelectPeriodOnDates(dates = wind$attrs$Dates, + start = start, end = end, + time_dim = time_dim, + ncores = ncores) } return(wind) } @@ -115,8 +115,8 @@ CST_WindPowerDensity <- function(wind, ro = 1.225, start = NULL, end = NULL, #'WPD <- WindPowerDensity(wind) #' #'@export -WindPowerDensity <- function(wind, ro = 1.225, dates = NULL, start = NULL, end = NULL, - time_dim = 'time', ncores = NULL) { +WindPowerDensity <- function(wind, ro = 1.225, dates = NULL, start = NULL, + end = NULL, time_dim = 'time', ncores = NULL) { if (!is.null(dates)) { if (!is.null(start) && !is.null(end)) { if (!any(c(is.list(start), is.list(end)))) { @@ -127,6 +127,5 @@ WindPowerDensity <- function(wind, ro = 1.225, dates = NULL, start = NULL, end = time_dim = time_dim, ncores = ncores) } } - return(0.5 * ro * wind^3) } diff --git a/R/zzz.R b/R/zzz.R index 52fa2cd557d0d25a7af8269cf46405db97a64359..9b0c6488fb9a91a5c8b5e86bffaa454a7be48379 100644 --- a/R/zzz.R +++ b/R/zzz.R @@ -36,7 +36,8 @@ read_pc <- function(file) { pc$points <- rbind(c(0, 0), read.delim(file, comment.char = "#")) # Create an approximating function - pc$fun <- approxfun(pc$points$WindSpeed, pc$points$Power, method = "linear", yleft = NA, yright = 0) + pc$fun <- approxfun(pc$points$WindSpeed, pc$points$Power, method = "linear", + yleft = NA, yright = 0) # Get the rated power from the power values pc$attr$RatedPower <- max(pc$points$Power) @@ -47,16 +48,16 @@ read_pc <- function(file) { #======================= # Evaluate the linear piecewise approximation function with the wind speed inputs to get wind power #======================= -wind2power <- function(wind, pc) -{ power <- pc$fun(wind) +wind2power <- function(wind, pc) { + power <- pc$fun(wind) return(power) } #======================= # Convert wind to power, and divide by rated power to obtain Capacity Factor values #======================= -wind2CF <- function(wind, pc) -{ power <- wind2power(wind, pc) +wind2CF <- function(wind, pc) { + power <- wind2power(wind, pc) CF <- power / pc$attr$RatedPower return(CF) } diff --git a/inst/doc/paper-figure-PlotForecastPDF.R b/inst/doc/paper-figure-PlotForecastPDF.R index e8320b6c9e2c1ea383fc8b04fd20b40d2480f24f..b8867a61c882f4cb6453a64226c0539aae74aa8a 100644 --- a/inst/doc/paper-figure-PlotForecastPDF.R +++ b/inst/doc/paper-figure-PlotForecastPDF.R @@ -34,9 +34,9 @@ c(hcst, hcst_ref) %<-% CST_Load(var = 'prlr', leadtimemin = 1, leadtimemax = 214, nmember = 25, output = "lonlat") hcst$data <- hcst$data * 3600 * 24 * 1000 -attributes(hcst$Variable)$units <- 'mm' +attributes(hcst$attrs$Variable)$units <- 'mm' hcst_ref$data <- hcst_ref$data * 3600 * 24 * 1000 -attributes(hcst_ref$Variable)$units <- 'mm' +attributes(hcst_ref$attrs$Variable)$units <- 'mm' c(fcst, obs) %<-% CST_Load(var = 'prlr', @@ -49,9 +49,9 @@ c(fcst, obs) %<-% CST_Load(var = 'prlr', leadtimemin = 1, leadtimemax = 214, nmember = 50, output = "lonlat") fcst$data <- fcst$data * 1000 * 3600 * 24 -attributes(fcst$Variable)$units <- 'mm' +attributes(fcst$attrs$Variable)$units <- 'mm' obs$data <- obs$data * 1000 * 3600 * 24 -attributes(obs$Variable)$units <- 'mm' +attributes(obs$attrs$Variable)$units <- 'mm' fcst_QM <- CST_QuantileMapping(exp = hcst, diff --git a/man/AbsToProbs.Rd b/man/AbsToProbs.Rd index 9b79296bfa5b152c6c75bbac08b05bf33828a1ac..7717c91e629da1345eb25df96f08054620776380 100644 --- a/man/AbsToProbs.Rd +++ b/man/AbsToProbs.Rd @@ -9,7 +9,7 @@ AbsToProbs( dates = NULL, start = NULL, end = NULL, - time_dim = "time", + time_dim = "ftime", memb_dim = "member", sdate_dim = "sdate", ncores = NULL @@ -18,9 +18,11 @@ AbsToProbs( \arguments{ \item{data}{A multidimensional array with named dimensions.} -\item{dates}{A vector of dates or a multidimensional array of dates with named -dimensions matching the dimensions on parameter 'data'. By default it is -NULL, to select a period this parameter must be provided.} +\item{dates}{An optional parameter containing a vector of dates or a +multidimensional array of dates with named dimensions matching the +dimensions on parameter 'data'. By default it is NULL, to select a period +this parameter must be provided. All common dimensions with 'data' need to +have the same length.} \item{start}{An optional parameter to define the initial date of the period to select from the data by providing a list of two elements: the initial @@ -59,16 +61,19 @@ probabilities of each value in the ensemble. If multiple initializations Distribution Function excluding the corresponding initialization. } \examples{ -exp <- array(rnorm(216), dim = c(dataset = 1, member = 2, sdate = 3, ftime = 9, lat = 2, lon = 2)) +exp <- array(rnorm(216), dim = c(dataset = 1, member = 2, sdate = 3, + ftime = 9, lat = 2, lon = 2)) exp_probs <- AbsToProbs(exp) -data <- array(rnorm(5 * 2 * 61 * 1), - c(member = 5, sdate = 2, ftime = 61, lon = 1)) +data <- array(rnorm(5 * 3 * 61 * 1), + c(member = 5, sdate = 3, ftime = 61, lon = 1)) Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), as.Date("30-06-2000", format = "\%d-\%m-\%Y"), by = 'day'), seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), as.Date("30-06-2001", format = "\%d-\%m-\%Y"), by = 'day'), seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), as.Date("30-06-2002", format = "\%d-\%m-\%Y"), by = 'day')) -exp_probs <- AbsToProbs(exp, start = list(21, 4), end = list(21, 6)) +dim(Dates) <- c(ftime = 61, sdate = 3) +exp_probs <- AbsToProbs(data, dates = Dates, start = list(21, 4), + end = list(21, 6)) } diff --git a/man/CST_AbsToProbs.Rd b/man/CST_AbsToProbs.Rd index 57426efb6f4e7e88f6c3618c5e89a3db31f0e8fd..055bf6b571ad1fc82e1787e4805b6fe1743c89b8 100644 --- a/man/CST_AbsToProbs.Rd +++ b/man/CST_AbsToProbs.Rd @@ -61,12 +61,12 @@ class(exp) <- 's2dv_cube' exp_probs <- CST_AbsToProbs(exp) exp$data <- array(rnorm(5 * 3 * 214 * 2), c(member = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) -exp_probs <- CST_AbsToProbs(exp, start = list(21, 4), end = list(21, 6)) - +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) +exp_probs <- CST_AbsToProbs(data = exp, start = list(21, 4), end = list(21, 6)) } diff --git a/man/CST_MergeRefToExp.Rd b/man/CST_MergeRefToExp.Rd index a5b9cc76fa4e73595a865eac4998bc2c417b3772..9f9a3b977ce032bb44b6eb7724574738992e5d65 100644 --- a/man/CST_MergeRefToExp.Rd +++ b/man/CST_MergeRefToExp.Rd @@ -74,14 +74,14 @@ data_dates <- c(seq(as.Date("01-07-1993", "\%d-\%m-\%Y", tz = 'UTC'), dim(data_dates) <- c(ftime = 154, sdate = 2) data <- NULL data$data <- array(1:(2*154*2), c(ftime = 154, sdate = 2, member= 2)) -data$Dates$start <- data_dates +data$attrs$Dates<- data_dates class(data) <- 's2dv_cube' ref_dates <- seq(as.Date("01-01-1993", "\%d-\%m-\%Y", tz = 'UTC'), as.Date("01-12-1994","\%d-\%m-\%Y", tz = 'UTC'), "day") dim(ref_dates) <- c(ftime = 350, sdate = 2) ref <- NULL ref$data <- array(1001:1700, c(ftime = 350, sdate = 2)) -ref$Dates$start <- ref_dates +ref$attrs$Dates <- ref_dates class(ref) <- 's2dv_cube' new_data <- CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), end1 = list(30, 6), diff --git a/man/CST_PeriodAccumulation.Rd b/man/CST_PeriodAccumulation.Rd index abc79b69cd51f8cce811885b0f17c134ce681187..39287052dc1e55d5bef9e24046e875bee38cdb73 100644 --- a/man/CST_PeriodAccumulation.Rd +++ b/man/CST_PeriodAccumulation.Rd @@ -62,17 +62,17 @@ class(exp) <- 's2dv_cube' TP <- CST_PeriodAccumulation(exp) exp$data <- array(rnorm(5 * 3 * 214 * 2), c(memb = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) SprR <- CST_PeriodAccumulation(exp, start = list(21, 4), end = list(21, 6)) dim(SprR$data) -head(SprR$Dates) +head(SprR$attrs$Dates) HarR <- CST_PeriodAccumulation(exp, start = list(21, 8), end = list(21, 10)) dim(HarR$data) -head(HarR$Dates) +head(HarR$attrs$Dates) } diff --git a/man/CST_PeriodMean.Rd b/man/CST_PeriodMean.Rd index b9ae538ee38a6bdd51e03fa09e7be58925211964..b1004ad56896fe2aa943677debd1aeec1590888f 100644 --- a/man/CST_PeriodMean.Rd +++ b/man/CST_PeriodMean.Rd @@ -58,10 +58,10 @@ this function: exp <- NULL exp$data <- array(rnorm(45), dim = c(member = 7, ftime = 8)) class(exp) <- 's2dv_cube' -exp$Dates$start <- c(seq(as.Date("01-07-1993", "\%d-\%m-\%Y", tz = 'UTC'), - as.Date("01-08-1993","\%d-\%m-\%Y", tz = 'UTC'), "day"), - seq(as.Date("01-07-1994", "\%d-\%m-\%Y", tz = 'UTC'), - as.Date("01-08-1994","\%d-\%m-\%Y", tz = 'UTC'), "day")) +exp$attrs$Dates <- c(seq(as.Date("01-07-1993", "\%d-\%m-\%Y", tz = 'UTC'), + as.Date("01-08-1993","\%d-\%m-\%Y", tz = 'UTC'), "day"), + seq(as.Date("01-07-1994", "\%d-\%m-\%Y", tz = 'UTC'), + as.Date("01-08-1994","\%d-\%m-\%Y", tz = 'UTC'), "day")) SA <- CST_PeriodMean(exp) } diff --git a/man/CST_QThreshold.Rd b/man/CST_QThreshold.Rd index 0edbcba89a4e3a3ff57c7c5b4d090f6859c38185..eda0fd1ced67dd2d87d4b6924f343e4e27304ea1 100644 --- a/man/CST_QThreshold.Rd +++ b/man/CST_QThreshold.Rd @@ -85,12 +85,12 @@ class(exp) <- 's2dv_cube' exp_probs <- CST_QThreshold(exp, threshold) exp$data <- array(rnorm(5 * 3 * 214 * 2), c(member = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates[[1]] <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) exp_probs <- CST_QThreshold(exp, threshold) } diff --git a/man/CST_SelectPeriodOnData.Rd b/man/CST_SelectPeriodOnData.Rd index 6e041624af16a773bbfa7a32a6a06ca55aca3f60..22b2a9c2dee5baa668962d8ade5a6d0519810581 100644 --- a/man/CST_SelectPeriodOnData.Rd +++ b/man/CST_SelectPeriodOnData.Rd @@ -37,13 +37,13 @@ Auxiliary function to subset data for a specific period. exp <- NULL exp$data <- array(rnorm(5 * 3 * 214 * 2), c(memb = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) class(exp) <- 's2dv_cube' Period <- CST_SelectPeriodOnData(exp, start = list(21, 6), end = list(21, 9)) - } diff --git a/man/CST_Threshold.Rd b/man/CST_Threshold.Rd index 5d260e9cb656f205a5d67339b573fdfd1627285b..ffe06000d67a6c1371749a872a3d48d1d0dc4be8 100644 --- a/man/CST_Threshold.Rd +++ b/man/CST_Threshold.Rd @@ -68,12 +68,12 @@ threshold <- 0.9 exp <- NULL exp$data <- array(rnorm(5 * 3 * 214 * 2), c(member = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) class(exp) <- 's2dv_cube' exp_probs <- CST_Threshold(exp, threshold, start = list(21, 4), end = list(21, 6)) diff --git a/man/CST_TotalSpellTimeExceedingThreshold.Rd b/man/CST_TotalSpellTimeExceedingThreshold.Rd index 75a2d1e2c55dceffc2e3b708e562b35d7e219435..e2f7d26c15bd92c2ef82bcf21d250ce8aac514df 100644 --- a/man/CST_TotalSpellTimeExceedingThreshold.Rd +++ b/man/CST_TotalSpellTimeExceedingThreshold.Rd @@ -78,12 +78,12 @@ by using function \code{AbsToProbs}. See section @examples. exp <- NULL exp$data <- array(rnorm(5 * 3 * 214 * 2)*23, c(member = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) class(exp) <- 's2dv_cube' TTSET <- CST_TotalSpellTimeExceedingThreshold(exp, threshold = 23, spell = 3) diff --git a/man/CST_TotalTimeExceedingThreshold.Rd b/man/CST_TotalTimeExceedingThreshold.Rd index 5dea9647c56d673e353f7357bfb1018a3d7656ef..b09ae53bb10e5b46fdd792a7d523bd70300c4462 100644 --- a/man/CST_TotalTimeExceedingThreshold.Rd +++ b/man/CST_TotalTimeExceedingThreshold.Rd @@ -85,12 +85,12 @@ indices for heat stress can be obtained by using this function: exp <- NULL exp$data <- array(abs(rnorm(5 * 3 * 214 * 2)*280), c(member = 5, sdate = 3, ftime = 214, lon = 2)) -exp$Dates$start <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), - as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), - as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), - as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +exp$attrs$Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), + as.Date("30-11-2000", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "\%d-\%m-\%Y"), + as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), + as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) class(exp) <- 's2dv_cube' DOT <- CST_TotalTimeExceedingThreshold(exp, threshold = 280) diff --git a/man/CST_WindCapacityFactor.Rd b/man/CST_WindCapacityFactor.Rd index 1dd879bebefc4f8675f0549a7f38ba8efc6d116c..9d9cfa4769401369d2aa3cfac3d9df2bcef5dd85 100644 --- a/man/CST_WindCapacityFactor.Rd +++ b/man/CST_WindCapacityFactor.Rd @@ -58,18 +58,22 @@ different power curves that span different IEC classes can be selected (see below). } \examples{ -wind <- array(rweibull(n = 100, shape = 2, scale = 6), c(member = 10, lat = 2, lon = 5)) -wind <- CSTools::s2dv_cube(data = wind, lat = c(40, 41), lon = 1:5, - Variable = list(varName = 'sfcWind', level = 'Surface'), - Datasets = 'synthetic', when = Sys.time(), - Dates = list(start = '1990-01-01 00:00:00', end = '1990-01-01 00:00:00'), - source_file = NA) +wind <- NULL +wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), + c(member = 10, lat = 2, lon = 5)) +wind$coords <- list(lat = c(40, 41), lon = 1:5) +variable <- list(Variable = list(varName = 'sfcWind', + metadata = list(sfcWind = list(level = 'Surface')))) +wind$attrs <- list(Variable = variable, Datasets = 'synthetic', + when = Sys.time(), Dates = '1990-01-01 00:00:00') +class(wind) <- 's2dv_cube' WCF <- CST_WindCapacityFactor(wind, IEC_class = "III") } \references{ -Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, F. J. (2019). -Seasonal forecasts of wind power generation. Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 +Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, +F. J. (2019). Seasonal forecasts of wind power generation. +Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 International Standard IEC 61400-1 (third ed.) (2005) } diff --git a/man/CST_WindPowerDensity.Rd b/man/CST_WindPowerDensity.Rd index 9c3040cbd6adfc0ed2ab4c8b05087976c0775d0e..54390a00830ccadccac247807a4605f09eceef7f 100644 --- a/man/CST_WindPowerDensity.Rd +++ b/man/CST_WindPowerDensity.Rd @@ -51,13 +51,16 @@ It is computed as 0.5*ro*wspd^3. As this function is non-linear, it will give inaccurate results if used with period means. } \examples{ -wind <- array(rweibull(n = 100, shape = 2, scale = 6), c(member = 10, lat = 2, lon = 5)) -wind <- CSTools::s2dv_cube(data = wind, lat = c(40, 41), lon = 1:5, - Variable = list(varName = 'sfcWind', level = 'Surface'), - Datasets = 'synthetic', when = Sys.time(), - Dates = list(start = '1990-01-01 00:00:00', end = '1990-01-01 00:00:00'), - source_file = NA) -WPD <- CST_WindPowerDensity(wind) +wind <- NULL +wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), + c(member = 10, lat = 2, lon = 5)) +wind$coords <- list(lat = c(40, 41), lon = 1:5) +variable <- list(Variable = list(varName = 'sfcWind', + metadata = list(sfcWind = list(level = 'Surface')))) +wind$attrs <- list(Variable = variable, Datasets = 'synthetic', + when = Sys.time(), Dates = '1990-01-01 00:00:00') +class(wind) <- 's2dv_cube' +WCF <- CST_WindPowerDensity(wind) } \author{ diff --git a/man/MergeRefToExp.Rd b/man/MergeRefToExp.Rd index f5b4958adcef5cf0d37cec9f8d03fc621dd79113..e6b40c8c57fdff75e2e16ed51aa3c1d6c7d38d76 100644 --- a/man/MergeRefToExp.Rd +++ b/man/MergeRefToExp.Rd @@ -13,7 +13,7 @@ MergeRefToExp( dates2, start2, end2, - time_dim = "time", + time_dim = "ftime", sdate_dim = "sdate", ncores = NULL ) @@ -21,7 +21,7 @@ MergeRefToExp( \arguments{ \item{data1}{A multidimensional array with named dimensions.} -\item{dates1}{a vector of dates or a multidimensional array of dates with +\item{dates1}{A vector of dates or a multidimensional array of dates with named dimensions matching the dimensions on parameter 'data1'.} \item{start1}{A list to define the initial date of the period to select from @@ -48,7 +48,8 @@ the final month of the period.} \item{time_dim}{A character string indicating the name of the temporal dimension. By default, it is set to 'ftime'. More than one dimension name matching the dimensions provided in the object \code{data$data} can be -specified. This dimension is required to subset the data in a requested period.} +specified. This dimension is required to subset the data in a requested +period.} \item{sdate_dim}{A character string indicating the name of the dimension in which the initialization dates are stored.} @@ -80,6 +81,7 @@ ref <- array(1001:1700, c(time = 350, sdate = 2)) data <- array(1:(2*154*2), c(time = 154, sdate = 2, member= 2)) new_data <- MergeRefToExp(data1 = ref, dates1 = ref_dates, start1 = list(21, 6), end1 = list(30, 6), data2 = data, dates2 = data_dates, - start2 = list(1, 7), end = list(21, 9)) + start2 = list(1, 7), end = list(21, 9), + time_dim = 'time') } diff --git a/man/SelectPeriodOnData.Rd b/man/SelectPeriodOnData.Rd index 118cb98e2a1347e31bdb1603588c7ae5821767ea..caaa0fb290b86c2c91a7a5793c47865aa1b1982c 100644 --- a/man/SelectPeriodOnData.Rd +++ b/man/SelectPeriodOnData.Rd @@ -7,23 +7,25 @@ SelectPeriodOnData(data, dates, start, end, time_dim = "ftime", ncores = NULL) } \arguments{ -\item{data}{A multidimensional array with named dimensions.} +\item{data}{A multidimensional array with named dimensions with at least the +time dimension specified in parameter 'time_dim'. All common dimensions +with 'dates' parameter need to have the same length.} -\item{dates}{A vector of dates or a multidimensional array of dates with named -dimensions.} +\item{dates}{An array of dates with named dimensions with at least the time +dimension specified in parameter 'time_dim'. All common dimensions with +'data' parameter need to have the same length.} -\item{start}{An optional parameter to defined the initial date of the period -to select from the data by providing a list of two elements: the initial -date of the period and the initial month of the period.} +\item{start}{A list with two elements to define the initial date of the period +to select from the data. The first element is the initial day of the period +and the second element is the initial month of the period.} -\item{end}{An optional parameter to defined the final date of the period to -select from the data by providing a list of two elements: the final day of -the period and the final month of the period.} +\item{end}{A list with two elements to define the final date of the period +to select from the data. The first element is the final day of the period +and the second element is the final month of the period.} \item{time_dim}{A character string indicating the name of the dimension to -compute select the dates. By default, it is set to 'ftime'. More than one -dimension name matching the dimensions provided in the object -\code{data$data} can be specified.} +compute select the dates. By default, it is set to 'ftime'. Parameters +'data' and 'dates'} \item{ncores}{An integer indicating the number of cores to use in parallel computation.} @@ -47,5 +49,4 @@ Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) dim(Dates) <- c(ftime = 214, sdate = 3) Period <- SelectPeriodOnData(data, Dates, start = list(21, 6), end = list(21, 9)) - } diff --git a/man/SelectPeriodOnDates.Rd b/man/SelectPeriodOnDates.Rd index cce8e55d82776129dc3eda6f97d928534780869e..386fb92880f0e6119ed5b9d2c2d2f3ca701f35a3 100644 --- a/man/SelectPeriodOnDates.Rd +++ b/man/SelectPeriodOnDates.Rd @@ -7,8 +7,7 @@ SelectPeriodOnDates(dates, start, end, time_dim = "ftime", ncores = NULL) } \arguments{ -\item{dates}{A vector of dates or a multidimensional array of dates with named -dimensions.} +\item{dates}{An array of dates with named dimensions.} \item{start}{An optional parameter to defined the initial date of the period to select from the data by providing a list of two elements: the initial @@ -40,5 +39,6 @@ Dates <- c(seq(as.Date("01-05-2000", format = "\%d-\%m-\%Y"), as.Date("30-11-2001", format = "\%d-\%m-\%Y"), by = 'day'), seq(as.Date("01-05-2002", format = "\%d-\%m-\%Y"), as.Date("30-11-2002", format = "\%d-\%m-\%Y"), by = 'day')) +dim(Dates) <- c(ftime = 214, sdate = 3) Period <- SelectPeriodOnDates(Dates, start = list(21, 6), end = list(21, 9)) } diff --git a/man/WindCapacityFactor.Rd b/man/WindCapacityFactor.Rd index 557771e864a523db0a335f2e8e5bdfc8eb282e5d..69549a817e3ebd951e3e47e553205699276f95ce 100644 --- a/man/WindCapacityFactor.Rd +++ b/man/WindCapacityFactor.Rd @@ -70,8 +70,9 @@ WCF <- WindCapacityFactor(wind, IEC_class = "III") } \references{ -Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, F. J. (2019). -Seasonal forecasts of wind power generation. Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 +Lledó, Ll., Torralba, V., Soret, A., Ramon, J., & Doblas-Reyes, +F. J. (2019). Seasonal forecasts of wind power generation. +Renewable Energy, 143, 91–100. https://doi.org/10.1016/j.renene.2019.04.135 International Standard IEC 61400-1 (third ed.) (2005) } diff --git a/tests/testthat/test-AbsToProbs.R b/tests/testthat/test-AbsToProbs.R index 2905cf30c9af3dfe14a8fd1a19f125a734e72f19..c448670b91c16da335e69001b690b7ddaaafe734 100644 --- a/tests/testthat/test-AbsToProbs.R +++ b/tests/testthat/test-AbsToProbs.R @@ -1,28 +1,97 @@ -context("Generic tests") -test_that("Sanity checks", { - #source("csindicators/R/AbsToProbs.R") - expect_error(AbsToProbs('x'), "Parameter 'data' must be numeric.") - expect_equal(AbsToProbs(1), array(1, c(sdate = 1, member = 1))) - expect_equal(AbsToProbs(1, memb_dim = 'x'), array(1, c(sdate = 1, x = 1))) - expect_error(AbsToProbs(data = NULL), "Parameter 'data' cannot be NULL.") - expect_error(AbsToProbs(1, dates = '2000-01-01', end = 3, start = 4), - "Parameter 'start' and 'end' must be lists indicating the day and the month of the period start and end.") - expect_equal(AbsToProbs(1:10), array(seq(0.1, 1, 0.1), c(sdate = 1, member = 10))) +context("CSIndicators::AbsToProbs tests") + +############################################## +# dat1 +dat1 <- NULL +dat1$data <- array(rnorm(5 * 2 * 61 * 1), + c(member = 5, sdate = 2, ftime = 61, lon = 1)) +Dates1 <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), + as.Date("30-06-2000", format = "%d-%m-%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "%d-%m-%Y"), + as.Date("30-06-2001", format = "%d-%m-%Y"), by = 'day')) +dat1$attrs$Dates <- Dates1 +class(dat1) <- 's2dv_cube' +# dat2 +Dates2 <- Dates1 +dim(Dates2) <- c(ftime = 61, sdate = 2) +############################################## + +test_that("1. Sanity checks", { + # CST_AbsToProbs + expect_error( + CST_AbsToProbs('x'), + paste0("Parameter 'data' must be of the class 's2dv_cube'.") + ) + expect_warning( + CST_AbsToProbs(dat1, start = list(21, 4), end = list(21, 6)), + paste0("Dimensions in 'data' element 'attrs$Dates' are missed and ", + "all data would be used."), + fixed = TRUE + ) + # AbsToProbs + expect_error( + AbsToProbs('x'), + "Parameter 'data' must be numeric." + ) + expect_warning( + AbsToProbs(1:10, start = list(21, 4), end = list(21, 6)), + paste0("Parameter 'data' doesn't have dimension names and all ", + "data will be used.") + ) + expect_error( + AbsToProbs(dat1$data, start = c(21, 4), end = c(21, 6)), + paste0("Parameter 'start' and 'end' must be lists indicating the ", + "day and the month of the period start and end.") + ) + expect_warning( + AbsToProbs(dat1$data, start = list(21, 4), end = list(21, 6)), + paste0("Parameter 'dates' is not provided and all data will be used.") + ) + expect_warning( + AbsToProbs(data = dat1$data, dates = Dates1, + start = list(21, 4), end = list(21, 6)), + paste0("Parameter 'dates' doesn't have dimension names and all data will be used.") + ) + expect_equal( + dim(AbsToProbs(data = dat1$data, dates = Dates2, + start = list(21, 4), end = list(21, 6))), + c(member = 5, sdate = 2, ftime = 52, lon = 1) + ) + expect_equal( + AbsToProbs(1), + 1 + ) + expect_equal( + AbsToProbs(1:10), + seq(0.1, 1.0, 0.1) + ) + expect_equal( + AbsToProbs(1, memb_dim = 'x'), + 1 + ) + expect_error( + AbsToProbs(data = NULL), + "Parameter 'data' must be numeric." + ) data <- array(1:24, c(member = 3, sdate = 2, lon = 4)) - expect_equal(AbsToProbs(data), array(rep(0:1,12), c(sdate = 2, member = 3, lon = 4))) + expect_equal( + AbsToProbs(data), + array(c(rep(0, 3), rep(1, 3)), c(member = 3, sdate = 2, lon = 4)) + ) }) -test_that("Seasonal forecasts", { +############################################## - exp <- CSTools::lonlat_temp$exp$data[1,1:3,1:3,,1:5,1:5] - exp_probs <- AbsToProbs(exp) - expect_equal(dim(exp)[3:5], dim(exp_probs)[3:5]) - expect_equal(round(exp_probs[,1,1,1,1]), c(1, 0, 1)) - exp <- exp[,1,,,] # one sdate - expect_error(exp1_probs <- AbsToProbs(exp), - "Could not find dimension 'sdate' in 1th object provided in 'data'.") - library(s2dv) - exp1 <- InsertDim(exp, 2, 1, name = 'sdate') - exp1_probs <- AbsToProbs(exp1) - expect_equal(round(exp1_probs[1,,2,2,2]), c(1, 0, 1)) -}) +# test_that("2. Seasonal forecasts", { +# exp <- CSTools::lonlat_temp$exp$data[1,1:3,1:3,,1:5,1:5] +# exp_probs <- AbsToProbs(exp) +# expect_equal(dim(exp)[3:5], dim(exp_probs)[3:5]) +# expect_equal(round(exp_probs[1,,1,1,1]), c(1, 0, 1)) +# exp <- exp[,1,,,] # one sdate +# expect_error(exp1_probs <- AbsToProbs(exp), +# "Could not find dimension 'sdate' in 1th object provided in 'data'.") +# library(s2dv) +# exp1 <- InsertDim(exp, 2, 1, name = 'sdate') +# exp1_probs <- AbsToProbs(exp1) +# expect_equal(round(exp1_probs[,1,2,2,2]), c(1, 0, 1)) +# }) diff --git a/tests/testthat/test-AccumulationExceedingThreshold.R b/tests/testthat/test-AccumulationExceedingThreshold.R index 926fe1977e618beaaf5d1c240faec7396072284c..dda5c423e61ce726303145d86f2612105defed13 100644 --- a/tests/testthat/test-AccumulationExceedingThreshold.R +++ b/tests/testthat/test-AccumulationExceedingThreshold.R @@ -236,49 +236,48 @@ test_that("4. Output checks", { ############################################## test_that("5. Seasonal forecasts", { - library(CSTools) - exp <- CSTools::lonlat_temp$exp - exp$data <- exp$data[ , 1:4, 1:2, , , ] - res <- CST_AccumulationExceedingThreshold(exp, threshold = 280, time_dim = 'ftime') + # exp <- CSTools::lonlat_temp$exp + # exp$data <- exp$data[ , 1:4, 1:2, , , ] + # res <- CST_AccumulationExceedingThreshold(exp, threshold = 280, time_dim = 'ftime') - expect_equal( - round(res$data[, 2, 2, 2]), - c(0, 280, 281, 281) - ) + # expect_equal( + # round(res$data[, 2, 2, 2]), + # c(0, 280, 281, 281) + # ) - # GDD - exp <- array(NA, dim = c(member = 6, sdate = 3, ftime = 214, lat = 4, lon = 4)) - exp1 <- drop(CSTools::lonlat_prec$data) * 86400000 - exp[, , 1:31, , ] <- exp1 + 10; exp[, , 32:62, , ] <- exp1 + 11 - exp[, , 63:93, , ] <- exp1 + 12; exp[, , 94:124, , ] <- exp1 + 13 - exp[, , 125:155, , ] <- exp1 + 14; exp[, , 156:186, , ] <- exp1 + 15 - exp[, , 187:214, , ] <- exp1[, , 1:28, , ] + 16 + # # GDD + # exp <- array(NA, dim = c(member = 6, sdate = 3, ftime = 214, lat = 4, lon = 4)) + # exp1 <- drop(CSTools::lonlat_prec$data) * 86400000 + # exp[, , 1:31, , ] <- exp1 + 10; exp[, , 32:62, , ] <- exp1 + 11 + # exp[, , 63:93, , ] <- exp1 + 12; exp[, , 94:124, , ] <- exp1 + 13 + # exp[, , 125:155, , ] <- exp1 + 14; exp[, , 156:186, , ] <- exp1 + 15 + # exp[, , 187:214, , ] <- exp1[, , 1:28, , ] + 16 - Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), - as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-05-2001", format = "%d-%m-%Y"), - as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-05-2002", format = "%d-%m-%Y"), - as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) - GDD <- AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, time_dim = 'ftime', - start = list(1, 4), end = list(31, 10), na.rm = TRUE) + # Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), + # as.Date("30-11-2000", format = "%d-%m-%Y"), by = 'day'), + # seq(as.Date("01-05-2001", format = "%d-%m-%Y"), + # as.Date("30-11-2001", format = "%d-%m-%Y"), by = 'day'), + # seq(as.Date("01-05-2002", format = "%d-%m-%Y"), + # as.Date("30-11-2002", format = "%d-%m-%Y"), by = 'day')) + # GDD <- AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, time_dim = 'ftime', + # start = list(1, 4), end = list(31, 10), na.rm = TRUE) - expect_equal( - round(GDD[,1,1,1]), - c(538, 367, 116, 519, 219, 282) - ) - expect_equal( - dim(GDD), - c(member = 6, sdate = 3, lat =4, lon = 4) - ) - expect_error( - AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, start = list(1, 4), end = list(31, 10), time_dim = 'time'), - "Parameter 'time_dim' is not found in 'data' dimension." - ) - expect_equal( - all(is.na(AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, time_dim = 'ftime',start = list(1, 4), end = list(31, 10)))), - all(is.na(c(NA, NA))) - ) + # expect_equal( + # round(GDD[,1,1,1]), + # c(538, 367, 116, 519, 219, 282) + # ) + # expect_equal( + # dim(GDD), + # c(member = 6, sdate = 3, lat =4, lon = 4) + # ) + # expect_error( + # AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, start = list(1, 4), end = list(31, 10), time_dim = 'time'), + # "Parameter 'time_dim' is not found in 'data' dimension." + # ) + # expect_equal( + # all(is.na(AccumulationExceedingThreshold(exp - 17, threshold = 0, dates = Dates, time_dim = 'ftime',start = list(1, 4), end = list(31, 10)))), + # all(is.na(c(NA, NA))) + # ) # test the 'diff' input_1 <- c(1:20) diff --git a/tests/testthat/test-MergeRefToExp.R b/tests/testthat/test-MergeRefToExp.R index b4503d86f34abf8c1348f4dafc7cd82dedc32d46..2c3e8f65637be858ef835dd10ba07e9e4a871938 100644 --- a/tests/testthat/test-MergeRefToExp.R +++ b/tests/testthat/test-MergeRefToExp.R @@ -1,6 +1,8 @@ -context("Generic tests") +context("CSIndicators::MergeRefToExp tests") + +########################################################################### + test_that("Sanity checks", { - #source("csindicators/R/MergeRefToExp.R") data_dates <- c(seq(as.Date("01-07-1993", "%d-%m-%Y", tz = 'UTC'), as.Date("01-12-1993", "%d-%m-%Y", tz = 'UTC'), "day"), seq(as.Date("01-07-1994", "%d-%m-%Y", tz = 'UTC'), @@ -10,33 +12,31 @@ test_that("Sanity checks", { ref_dates <- seq(as.Date("01-01-1993", "%d-%m-%Y", tz = 'UTC'), as.Date("01-12-1994", "%d-%m-%Y", tz = 'UTC'), "day") dim(ref_dates) <- c(ftime = 350, sdate = 2) - ref <- array(1001:1700, c(ftime = 350, sdate = 2)) - data <- array(1:(2 * 154 * 2), c(ftime = 154, sdate = 2, member= 2)) + ref <- NULL + ref$data <- array(1001:1700, c(ftime = 350, sdate = 2)) + ref$attrs$Dates <- ref_dates + class(ref) <- 's2dv_cube' + data <- NULL + data$data <- array(1:(2 * 154 * 2), c(ftime = 154, sdate = 2, member= 2)) + data$attrs$Dates <- data_dates + class(data) <- 's2dv_cube' -suppressWarnings( - ref <- CSTools::s2dv_cube(data = ref, Dates = list(start = ref_dates, - end = ref_dates)) -) -suppressWarnings( - data <- CSTools::s2dv_cube(data = data, Dates = list(start = data_dates, - end = data_dates)) -) suppressWarnings( expect_equal(CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), end1 = list(30, 6), start2 = list(1, 7), - end2 = list(21, 9))$Dates, + end2 = list(21, 9))$attrs$Dates, SelectPeriodOnDates(ref_dates, start = list(21, 6), end = list(21,9))) ) output <- array(c(1172:1181, 1:83, 1537:1546, 155:237, 1172:1181, 309:391, 1537:1546, 463:545), c(ftime = 93, sdate = 2, member = 2)) -suppressWarnings( - expect_equal(CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), + expect_equal( + CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), end1 = list(30, 6), start2 = list(1, 7), end2 = list(21, 9))$data, output) -) + # issue 13: One lead time data_dates <- c(as.Date("01-06-1993", "%d-%m-%Y", tz = 'UTC'), @@ -48,17 +48,15 @@ suppressWarnings( ref_dates <- c(as.Date("01-05-1993", "%d-%m-%Y", tz = 'UTC'), as.Date("01-05-1994", "%d-%m-%Y", tz = 'UTC')) dim(ref_dates) <- c(ftime = 1, sdate = 2) - ref <- array(1:2, c(ftime = 1, sdate = 2)) - data <- array(1:(2 * 3 * 2), c(ftime = 2, sdate = 2, member = 3)) + ref <- NULL + ref$data <- array(1:2, c(ftime = 1, sdate = 2)) + ref$attrs$Dates <- ref_dates + class(ref) <- 's2dv_cube' + data <- NULL + data$data <- array(1:(2 * 3 * 2), c(ftime = 2, sdate = 2, member = 3)) + data$attrs$Dates <- data_dates + class(data) <- 's2dv_cube' -suppressWarnings( - ref <- CSTools::s2dv_cube(data = ref, Dates = list(start = ref_dates, - end = ref_dates)) -) -suppressWarnings( - data <- CSTools::s2dv_cube(data = data, Dates = list(start = data_dates, - end = data_dates)) -) res_dates <- c(as.Date("01-05-1993", "%d-%m-%Y", tz = 'UTC'), as.Date("01-06-1993", "%d-%m-%Y", tz = 'UTC'), as.Date("01-07-1993", "%d-%m-%Y", tz = 'UTC'), @@ -67,23 +65,24 @@ suppressWarnings( as.Date("01-07-1994","%d-%m-%Y", tz = 'UTC')) dim(res_dates) <- c(ftime = 3, sdate = 2) -suppressWarnings( - expect_equal(CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(1, 5), + + expect_equal( + CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(1, 5), end1 = list(31, 5), start2 = list(1, 6), - end2 = list(31, 7))$Dates, - res_dates) -) + end2 = list(31, 7))$attrs$Dates, + res_dates + ) output <- abind::abind(t(matrix(rep(1:2, 3), ncol = 2, nrow = 3, byrow = T)), data$data, along = 1) names(dim(output)) <- c('ftime', 'sdate', 'member') -suppressWarnings( - expect_equal(CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(1, 5), + expect_equal( + CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(1, 5), end1 = list(31, 5), start2 = list(1, 6), end2 = list(31, 7))$data, - output) -) + output + ) }) @@ -103,34 +102,26 @@ test_that("Seasonal", { dim.dates <- c(ftime=215, sweek = 1, sday = 1, sdate=(hcst.endyear-hcst.inityear)+1) dim(dates) <- dim.dates - - ref <- array(1:(215*25), c(ftime = 215, sdate = 25)) - -suppressWarnings( - ref <- CSTools::s2dv_cube(data = ref, - Dates = list(start = dates, - end = dates)) -) - - data <- array(1:(215*25*3), c(ftime = 215, sdate = 25, member=3)) - -suppressWarnings( - data <- CSTools::s2dv_cube(data = data, - Dates = list(start = dates, - end = dates)) -) - -suppressWarnings( - expect_equal(CST_MergeRefToExp(data1 = data, data2 = ref, start1 = list(21, 6), - end1 = list(30, 6), start2 = list(1, 7), - end2 = list(21, 9))$Dates, - SelectPeriodOnDates(dates, start = list(21, 6), end = list(21,9))) -) + ref <- NULL + ref$data <- array(1:(215*25), c(ftime = 215, sdate = 25)) + ref$attrs$Dates <- dates + class(ref) <- 's2dv_cube' + data <- NULL + data$data <- array(1:(215*25*3), c(ftime = 215, sdate = 25, member=3)) + data$attrs$Dates <- dates + class(data) <- 's2dv_cube' + + expect_equal( + CST_MergeRefToExp(data1 = data, data2 = ref, start1 = list(21, 6), + end1 = list(30, 6), start2 = list(1, 7), + end2 = list(21, 9))$attrs$Dates, + SelectPeriodOnDates(dates, start = list(21, 6), end = list(21,9)) + ) -suppressWarnings( - expect_equal(CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), + expect_equal( + CST_MergeRefToExp(data1 = ref, data2 = data, start1 = list(21, 6), end1 = list(30, 6), start2 = list(1, 7), - end2 = list(21, 9))$Dates, - SelectPeriodOnDates(dates, start = list(21, 6), end = list(21,9))) -) + end2 = list(21, 9))$attrs$Dates, + SelectPeriodOnDates(dates, start = list(21, 6), end = list(21,9)) + ) }) diff --git a/tests/testthat/test-PeriodAccumulation.R b/tests/testthat/test-PeriodAccumulation.R index 0cd69a9393ee65c10c058891b6e1943288eeb9c0..f79d00c66539168c6e71b3b714ab4b46ef18d775 100644 --- a/tests/testthat/test-PeriodAccumulation.R +++ b/tests/testthat/test-PeriodAccumulation.R @@ -1,40 +1,59 @@ -context("Generic tests") +context("CSIndicators::PeriodAccumulation tests") + test_that("Sanity Checks", { - #source("csindicators/R/PeriodAccumulation.R") - expect_error(PeriodAccumulation('x'), "Parameter 'data' must be numeric.") - expect_equal(PeriodAccumulation(1), 1) - expect_equal(PeriodAccumulation(1, time_dim = 'x'), 1) - expect_error(PeriodAccumulation(data = NULL), - "Parameter 'data' cannot be NULL.") - expect_error(PeriodAccumulation(1, dates = '2000-01-01', end = 3, start = 4), + expect_error( + PeriodAccumulation('x'), + "Parameter 'data' must be numeric." + ) + expect_equal( + PeriodAccumulation(1), + 1 + ) + expect_equal( + PeriodAccumulation(1, time_dim = 'x'), + 1 + ) + expect_error( + PeriodAccumulation(data = NULL), + "Parameter 'data' cannot be NULL." + ) + expect_error( + PeriodAccumulation(1, dates = '2000-01-01', end = 3, start = 4), paste("Parameter 'start' and 'end' must be lists indicating", - "the day and the month of the period start and end.")) - expect_equal(PeriodAccumulation(1:10), 55) + "the day and the month of the period start and end.") + ) + expect_equal( + PeriodAccumulation(1:10), + 55 + ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) - expect_equal(PeriodAccumulation(data), - array(c(9, 12, 27, 30, 45, 48, 63, 66), c(sdate = 2, lon = 4))) + expect_equal( + PeriodAccumulation(data), + array(c(9, 12, 27, 30, 45, 48, 63, 66), c(sdate = 2, lon = 4)) + ) }) -test_that("seasonal", { - - exp <- CSTools::lonlat_prec - exp$data <- array(1:(1 * 3 * 214 * 2), - c(memb = 1, sdate = 3, ftime = 214, lon = 2)) - exp$Dates[[1]] <- c(seq(as.Date("01-04-2000", format = "%d-%m-%Y"), - as.Date("31-10-2000", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-04-2001", format = "%d-%m-%Y"), - as.Date("31-10-2001", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-04-2002", format = "%d-%m-%Y"), - as.Date("31-10-2002", format = "%d-%m-%Y"), by = 'day')) - - output <- exp - output$data <- array(c(sum(exp$data[1,1,21:82,1]), sum(exp$data[1,2,21:82,1]), - sum(exp$data[1,3,21:82,1]), sum(exp$data[1,1,21:82,2]), - sum(exp$data[1,2,21:82,2]), sum(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) +# test_that("seasonal", { +# exp <- CSTools::lonlat_prec +# exp$data <- array(1:(1 * 3 * 214 * 2), +# c(memb = 1, sdate = 3, ftime = 214, lon = 2)) +# exp$dims <- dim(exp$data) +# exp$attrs$Dates <- c(seq(as.Date("01-04-2000", format = "%d-%m-%Y"), +# as.Date("31-10-2000", format = "%d-%m-%Y"), by = 'day'), +# seq(as.Date("01-04-2001", format = "%d-%m-%Y"), +# as.Date("31-10-2001", format = "%d-%m-%Y"), by = 'day'), +# seq(as.Date("01-04-2002", format = "%d-%m-%Y"), +# as.Date("31-10-2002", format = "%d-%m-%Y"), by = 'day')) +# dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) +# output <- exp +# output$data <- array(c(sum(exp$data[1,1,21:82,1]), sum(exp$data[1,2,21:82,1]), +# sum(exp$data[1,3,21:82,1]), sum(exp$data[1,1,21:82,2]), +# sum(exp$data[1,2,21:82,2]), sum(exp$data[1,3,21:82,2])), +# c(memb = 1, sdate = 3, lon = 2)) - expect_equal(CST_PeriodAccumulation(exp, start = list(21, 4), - end = list(21, 6))$data, output$data) - -}) +# expect_equal( +# CST_PeriodAccumulation(exp, start = list(21, 4), end = list(21, 6))$data, +# output$data +# ) +# }) diff --git a/tests/testthat/test-PeriodMean.R b/tests/testthat/test-PeriodMean.R index 75b6d576550eef3e1aed99a9e025490efbf28b84..3cd6365378a69c5490e668d910b0bc116beb6752 100644 --- a/tests/testthat/test-PeriodMean.R +++ b/tests/testthat/test-PeriodMean.R @@ -1,44 +1,58 @@ -context("Generic tests") +context("CSIndicators::PeriodMean tests") + test_that("Sanity Checks", { - #source("csindicators/R/PeriodMean.R") - expect_error(PeriodMean('x'), "Parameter 'data' must be numeric.") + expect_error( + PeriodMean('x'), + "Parameter 'data' must be numeric." + ) suppressWarnings( - expect_equal(PeriodMean(array(1, c(x = 1)), time_dim = 'x'), 1) + expect_equal( + PeriodMean(array(1, c(x = 1)), time_dim = 'x'), + 1 + ) ) - - expect_error(PeriodMean(data = NULL), "Parameter 'data' cannot be NULL.") - expect_error(PeriodMean(1, dates = '2000-01-01', end = 3, start = 4), + expect_error( + PeriodMean(data = NULL), + "Parameter 'data' cannot be NULL." + ) + expect_error( + PeriodMean(1, dates = '2000-01-01', end = 3, start = 4), "Parameter 'start' and 'end' must be lists indicating the day and the month of the period start and end.") suppressWarnings( - expect_equal(PeriodMean(array(1:10, c(time = 10))), 5.5) + expect_equal( + PeriodMean(array(1:10, c(time = 10))), + 5.5 + ) ) data <- array(1:24, c(sdate = 2, time = 3, lon = 4)) suppressWarnings( - expect_equal(PeriodMean(data), - array(c(3,4,9,10,15,16,21,22), c(sdate = 2, lon = 4))) + expect_equal( + PeriodMean(data), + array(c(3, 4, 9, 10, 15, 16, 21, 22), + c(sdate = 2, lon = 4)) + ) ) }) -test_that("seasonal", { - - exp <- CSTools::lonlat_prec - exp$data <- array(1:(1 * 3 * 214 * 2), - c(memb = 1, sdate = 3, ftime = 214, lon = 2)) - exp$Dates[[1]] <- c(seq(as.Date("01-04-2000", format = "%d-%m-%Y"), - as.Date("31-10-2000", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-04-2001", format = "%d-%m-%Y"), - as.Date("31-10-2001", format = "%d-%m-%Y"), by = 'day'), - seq(as.Date("01-04-2002", format = "%d-%m-%Y"), - as.Date("31-10-2002", format = "%d-%m-%Y"), by = 'day')) - output <- exp - output$data <- array(c(mean(exp$data[1,1,21:82,1]), mean(exp$data[1,2,21:82,1]), - mean(exp$data[1,3,21:82,1]), mean(exp$data[1,1,21:82,2]), - mean(exp$data[1,2,21:82,2]), mean(exp$data[1,3,21:82,2])), - c(memb = 1, sdate = 3, lon = 2)) - expect_equal( - CST_PeriodMean(exp, start = list(21, 4), end = list(21, 6))$data, - output$data) - - - -}) +# test_that("seasonal", { +# exp <- CSTools::lonlat_prec +# exp$data <- array(1:(1 * 3 * 214 * 2), +# c(memb = 1, sdate = 3, ftime = 214, lon = 2)) +# exp$dims <- dim(exp$data) +# exp$attrs$Dates <- c(seq(as.Date("01-04-2000", format = "%d-%m-%Y"), +# as.Date("31-10-2000", format = "%d-%m-%Y"), by = 'day'), +# seq(as.Date("01-04-2001", format = "%d-%m-%Y"), +# as.Date("31-10-2001", format = "%d-%m-%Y"), by = 'day'), +# seq(as.Date("01-04-2002", format = "%d-%m-%Y"), +# as.Date("31-10-2002", format = "%d-%m-%Y"), by = 'day')) +# dim(exp$attrs$Dates) <- c(ftime = 214, sdate = 3) +# output <- exp +# output$data <- array(c(mean(exp$data[1,1,21:82,1]), mean(exp$data[1,2,21:82,1]), +# mean(exp$data[1,3,21:82,1]), mean(exp$data[1,1,21:82,2]), +# mean(exp$data[1,2,21:82,2]), mean(exp$data[1,3,21:82,2])), +# c(memb = 1, sdate = 3, lon = 2)) +# expect_equal( +# CST_PeriodMean(exp, start = list(21, 4), end = list(21, 6))$data, +# output$data +# ) +# }) diff --git a/tests/testthat/test-QThreshold.R b/tests/testthat/test-QThreshold.R index 708c00a41d43db813d6b00bbb3f63cc88aae2bd3..1dc6c65e3d1f8d4c66b65d307fa55befbd9cbb68 100644 --- a/tests/testthat/test-QThreshold.R +++ b/tests/testthat/test-QThreshold.R @@ -1,77 +1,123 @@ -context("Generic tests") +context("CSIndicators::QThreshold tests") + test_that("Sanity checks", { - #source("csindicators/R/QThreshold.R") - expect_error(QThreshold(NULL), - "Parameter 'data' cannot be NULL.") - expect_error(QThreshold('x'), - "Parameter 'data' must be numeric.") + expect_error( + QThreshold(NULL), + "Parameter 'data' cannot be NULL." + ) + expect_error( + QThreshold('x'), + "Parameter 'data' must be numeric." + ) data <- 1:20 - expect_error(QThreshold(data, NULL), - "Parameter 'threshold' cannot be NULL.") - expect_error(QThreshold(data, 'x'), - "Parameter 'threshold' must be numeric.") + expect_error( + QThreshold(data, NULL), + "Parameter 'threshold' cannot be NULL." + ) + expect_error( + QThreshold(data, 'x'), + "Parameter 'threshold' must be numeric." + ) threshold <- 10 - expect_error(QThreshold(data, threshold), - "'x' must have 1 or more non-missing values") + expect_error( + QThreshold(data, threshold), + "'x' must have 1 or more non-missing values" + ) dim(data) <- c(2, 10) - expect_error(QThreshold(data, threshold), - "Parameter 'data' must have named dimensions.") + expect_error( + QThreshold(data, threshold), + "Parameter 'data' must have named dimensions." + ) names(dim(data)) <- c('lat', 'sdate') threshold <- array(1:2, 2) - expect_error(QThreshold(data, threshold), - "Parameter 'threshold' must have named dimensions.") + expect_error( + QThreshold(data, threshold), + "Parameter 'threshold' must have named dimensions." + ) dim(threshold) <- c(time = 2) - data <- array(1:40, c(x = 2, sdate = 20)) threshold <- 10 - expect_equal(dim(QThreshold(data, threshold)), c(sdate = 20, x = 2)) + expect_equal( + dim(QThreshold(data, threshold)), + c(sdate = 20, x = 2) + ) data <- array(1:40, c(x = 2, ftime = 20)) - expect_error(QThreshold(data, threshold), - "Could not find dimension 'sdate' in 1th object provided in 'data'.") - expect_equal(dim(QThreshold(data, threshold, sdate_dim = 'ftime')), - c(ftime = 20, x = 2)) + expect_error( + QThreshold(data, threshold), + "Could not find dimension 'sdate' in 1th object provided in 'data'." + ) + expect_equal( + dim(QThreshold(data, threshold, sdate_dim = 'ftime')), + c(ftime = 20, x = 2) + ) dim(threshold) <- c(member = 1, ftime = 1) - expect_equal(dim(QThreshold(data, threshold, sdate_dim = 'ftime')), - c(ftime = 20, x = 2)) - expect_equal(dim(QThreshold(data, threshold, memb_dim = 'x', sdate_dim = 'ftime')), - c(ftime = 20, x = 2)) - expect_error(QThreshold(data, threshold, - sdate_dim = 'x', ncores = 'Z'), - "Parameter 'ncores' must be numeric") + expect_equal( + dim(QThreshold(data, threshold, sdate_dim = 'ftime')), + c(ftime = 20, x = 2) + ) + expect_equal( + dim(QThreshold(data, threshold, memb_dim = 'x', sdate_dim = 'ftime')), + c(ftime = 20, x = 2) + ) + expect_error( + QThreshold(data, threshold, sdate_dim = 'x', ncores = 'Z'), + "Parameter 'ncores' must be numeric" + ) # dimensions: data <- array(1:20, c(time = 5, sdate = 2, lat = 2)) # does this case made sense? threshold <- array(1:5, c(time = 5)) - expect_equal(dim(QThreshold(data, threshold)), - c(sdate = 2, time = 5, lat = 2)) + expect_equal( + dim(QThreshold(data, threshold)), + c(sdate = 2, time = 5, lat = 2) + ) threshold <- array(1:2, c(lat = 2)) - expect_equal(dim(QThreshold(data, threshold)), - c(sdate = 2, time = 5, lat = 2)) + expect_equal( + dim(QThreshold(data, threshold)), + c(sdate = 2, time = 5, lat = 2) + ) data <- array(1:60, c(time = 5, member = 3, sdate = 2, lat = 2)) - expect_equal(dim(QThreshold(data, threshold)), - c(sdate = 2, member = 3, time = 5, lat = 2)) - expect_equal(dim(QThreshold(data, threshold, memb_dim = NULL)), - c(sdate = 2, time = 5, member = 3, lat = 2)) + expect_equal( + dim(QThreshold(data, threshold)), + c(sdate = 2, member = 3, time = 5, lat = 2) + ) + expect_equal( + dim(QThreshold(data, threshold, memb_dim = NULL)), + c(sdate = 2, time = 5, member = 3, lat = 2) + ) }) -test_that("Seasonal forecasts", { - - obs <- CSTools::lonlat_temp$obs$data - 248 - obs_percentile <- QThreshold(obs, threshold = 35) - expect_equal(dim(obs)[4:6], dim(obs_percentile)[4:6]) - expect_equal(obs_percentile[, 1, 1, 3, 20, 53], c(rep(0.4, 4), rep(0.2, 2))) - obs1 <- obs[,,2,,,] # no sdate - expect_error(obs1_percentile <- QThreshold(obs1, threshold = 35), - "Could not find dimension 'sdate' in 1th object provided in 'data'.") - library(s2dv) - obs1 <- InsertDim(obs1, 1, 1, name = 'sdate') # one sdate - expect_error(obs1_percentile <- QThreshold(obs1, threshold = 35), - "'x' must have 1 or more non-missing values") - obs2 <- obs[,,,2,,] # one ftime - obs2_percentile <- QThreshold(obs2, threshold = 35) - expect_equal(dim(obs2), dim(obs2_percentile)) - expect_equal(obs2_percentile[,14,53], c(0.6, 0.4, 0.6, 0.6, 0.4, 0.4)) - -}) +# test_that("Seasonal forecasts", { +# obs <- CSTools::lonlat_temp$obs$data - 248 +# obs_percentile <- QThreshold(obs, threshold = 35) +# expect_equal( +# dim(obs)[4:6], +# dim(obs_percentile)[4:6] +# ) +# expect_equal( +# obs_percentile[, 1, 1, 3, 20, 53], +# c(rep(0.4, 4), rep(0.2, 2)) +# ) +# obs1 <- obs[,,2,,,] # no sdate +# expect_error( +# obs1_percentile <- QThreshold(obs1, threshold = 35), +# "Could not find dimension 'sdate' in 1th object provided in 'data'." +# ) +# obs1 <- s2dv::InsertDim(obs1, 1, 1, name = 'sdate') # one sdate +# expect_error( +# obs1_percentile <- QThreshold(obs1, threshold = 35), +# "'x' must have 1 or more non-missing values" +# ) +# obs2 <- obs[,,,2,,] # one ftime +# obs2_percentile <- QThreshold(obs2, threshold = 35) +# expect_equal( +# dim(obs2), +# dim(obs2_percentile) +# ) +# expect_equal( +# obs2_percentile[,14,53], +# c(0.6, 0.4, 0.6, 0.6, 0.4, 0.4) +# ) +# }) diff --git a/tests/testthat/test-SelectPeriod.R b/tests/testthat/test-SelectPeriod.R index 3db72d862f8900141d25209015a37b6da29477f6..1c264c2b2ef5f88a85fb2ddac70f77908efd9ff9 100644 --- a/tests/testthat/test-SelectPeriod.R +++ b/tests/testthat/test-SelectPeriod.R @@ -1,37 +1,71 @@ -context("Generic tests") - #source("R/zzz.R") - #source("R/SelectPeriodOnDates.R") - #source("R/SelectPeriodOnData.R") - library(s2dv) -test_that("Sanity checks", { - #source("csindicators/R/AbsToProbs.R") +context("CSIndicators::SelectPeriodOnData and SelectPeriodOnDates tests") + +library(s2dv) + +############################################## + +test_that("1. Sanity checks", { + expect_error( + CST_SelectPeriodOnData(1:10), + paste0("Parameter 'data' must be of the class 's2dv_cube'.") + ) expect_error(SelectPeriodOnDates('x', start = list(1,1), end = list(1,1)), "invalid 'trim' argument") +}) + +############################################## + +test_that("2. Output checks", { # Lluis issue #8: dates <- c(seq(as.Date("02-05-1993", "%d-%m-%Y", tz = 'UTC'), - as.Date("01-12-1993","%d-%m-%Y", tz = 'UTC'), "day"), - seq(as.Date("02-05-1994", "%d-%m-%Y", tz = 'UTC'), - as.Date("01-12-1994","%d-%m-%Y", tz = 'UTC'), "day"), - seq(as.Date("02-05-1995", "%d-%m-%Y", tz = 'UTC'), - as.Date("01-12-1995","%d-%m-%Y", tz = 'UTC'), "day")) + as.Date("01-12-1993","%d-%m-%Y", tz = 'UTC'), "day"), + seq(as.Date("02-05-1994", "%d-%m-%Y", tz = 'UTC'), + as.Date("01-12-1994","%d-%m-%Y", tz = 'UTC'), "day"), + seq(as.Date("02-05-1995", "%d-%m-%Y", tz = 'UTC'), + as.Date("01-12-1995","%d-%m-%Y", tz = 'UTC'), "day")) dim(dates) <- c(time = 214, file_date = 3) output <- c(seq(as.Date("21-06-1993", "%d-%m-%Y", tz = 'UTC'), - as.Date("21-09-1993","%d-%m-%Y", tz = 'UTC'), "day"), - seq(as.Date("21-06-1994", "%d-%m-%Y", tz = 'UTC'), - as.Date("21-09-1994","%d-%m-%Y", tz = 'UTC'), "day"), - seq(as.Date("21-06-1995", "%d-%m-%Y", tz = 'UTC'), - as.Date("21-09-1995","%d-%m-%Y", tz = 'UTC'), "day")) + as.Date("21-09-1993","%d-%m-%Y", tz = 'UTC'), "day"), + seq(as.Date("21-06-1994", "%d-%m-%Y", tz = 'UTC'), + as.Date("21-09-1994","%d-%m-%Y", tz = 'UTC'), "day"), + seq(as.Date("21-06-1995", "%d-%m-%Y", tz = 'UTC'), + as.Date("21-09-1995","%d-%m-%Y", tz = 'UTC'), "day")) dim(output) <- c(time = 93, file_date = 3) - expect_equal(SelectPeriodOnDates(dates, list(21,6),list(21,9), time_dim = 'time'), output) + expect_equal( + SelectPeriodOnDates(dates, list(21,6),list(21,9), time_dim = 'time'), + output + ) dates <- s2dv::Reorder(dates, c('file_date', 'time')) output <- s2dv::Reorder(output, c('file_date', 'time')) - expect_equal(SelectPeriodOnDates(dates, list(21,6),list(21,9), time_dim = 'time'), output) + expect_equal( + SelectPeriodOnDates(dates, list(21,6),list(21,9), time_dim = 'time'), + output + ) + # test different common dimensions + + exp <- array(1:61, dim = c(ftime = 61)) + Dates <- c(seq(as.Date("01-05-2000", format = "%d-%m-%Y"), + as.Date("30-06-2000", format = "%d-%m-%Y"), by = 'day'), + seq(as.Date("01-05-2001", format = "%d-%m-%Y"), + as.Date("30-06-2001", format = "%d-%m-%Y"), by = 'day'), + seq(as.Date("01-05-2002", format = "%d-%m-%Y"), + as.Date("30-06-2002", format = "%d-%m-%Y"), by = 'day')) + dim(Dates) <- c(ftime = 61, sdate = 3) + res <- SelectPeriodOnData(data = exp, dates = Dates, + start = list(21, 4), end = list(21, 6)) + expect_equal( + dim(res), + c(ftime = 52) + ) + }) -test_that("Decadal", { +############################################## +test_that("3. Decadal", { # -------- DECADAL ----------# # decadal: 1 sdate several consequtive years: dates <- seq(as.Date("01-01-2000", "%d-%m-%Y", tz = 'UTC'), as.Date("31-12-2005","%d-%m-%Y", tz = 'UTC'), "day") + dim(dates) <- c(ftime = length(dates)) # No dims -> test .position output <- c( seq(as.Date("2000-02-01", "%Y-%m-%d"), as.Date("2000-02-10", "%Y-%m-%d"), 'day'), @@ -43,17 +77,18 @@ test_that("Decadal", { dim(output) <- c(ftime = 60) expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 2)), - output) + output + ) data <- array(1:(length(dates)*3), c(memb = 1, ftime = length(dates), lon = 3)) expect_equal( SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 2)), - array(c(c(32:41, 398:407, 763:772, 1128:1137, 1493:1502, 1859:1868), c(32:41, 398:407, 763:772, 1128:1137, 1493:1502, 1859:1868) + 2192, c(32:41, 398:407, 763:772, 1128:1137, 1493:1502, 1859:1868) + 2 * 2192), - c(ftime = 60, memb = 1, lon = 3))) + c(memb = 1, ftime = 60, lon = 3)) + ) output2 <- c( seq(as.Date("2000-02-01", "%Y-%m-%d"), as.Date("2000-04-10", "%Y-%m-%d"), 'day'), @@ -65,23 +100,27 @@ test_that("Decadal", { dim(output2) <- c(ftime = 416) expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 4)), - output2) + output2 + ) expect_equal( SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 4)), array(c(c(32:101, 398:466, 763:831, 1128:1196, 1493:1562, 1859:1927), c(32:101, 398:466, 763:831, 1128:1196, 1493:1562, 1859:1927) + 2192, c(32:101, 398:466, 763:831, 1128:1196, 1493:1562, 1859:1927) + 2 * 2192), - c(ftime = 416, memb = 1, lon = 3))) + c(memb = 1, ftime = 416, lon = 3)) + ) # 1 dim -> test Apply dim(dates) <- c(ftime = length(dates)) expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 2)), - output) # no need to check on Data, repited + output + ) # no need to check on Data, repited expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 4)), - output2) # no need to check on Data, repited + output2 + ) # no need to check on Data, repited # decadal: 5 sdates several consequtive years dates <- rep(seq(as.Date("01-01-2000", "%d-%m-%Y", tz = 'UTC'), @@ -96,49 +135,54 @@ test_that("Decadal", { data <- array(1:(length(dates)*3), c(memb = 1, sdate = 5, ftime = length(dates)/5, lon = 3)) expect_equal( #To be extended for all sdate dimensions: - SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 2))[,1,1,1], + SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 2))[1,1, ,1], c(1:10 * 5 + 151, 1:10 * 5 + 1981, 1:10 * 5 + 3806, - 1:10 * 5 + 5631, 1:10 * 5 + 7456, 1:10 * 5 + 9286)) + 1:10 * 5 + 5631, 1:10 * 5 + 7456, 1:10 * 5 + 9286) + ) output4 <- rep(output2, 5) dim(output4) <- c(ftime = 416, sdate = 5) expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 4)), - output4) + output4 + ) expect_equal( #To be extended for all ftime dimensions: - SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 4))[1,1,,1], - 156:160) + SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 4))[1, ,1,1], + 156:160 + ) # Multiple dims: sdate, fyear, ftime - library(CSTools) - dates <- SplitDim(dates, indices = dates[,1], - split_dim = 'ftime', freq = 'year') + dates <- CSTools::SplitDim(dates, indices = dates[,1], + split_dim = 'ftime', freq = 'year') dates <- as.POSIXct(dates * 24 * 3600, origin = '1970-01-01', tz = 'UTC') - output5 <- SplitDim(output3, indices = output3[,1], split_dim = 'ftime' , freq = 'year') + output5 <- CSTools::SplitDim(output3, indices = output3[,1], split_dim = 'ftime' , freq = 'year') output5 <- as.POSIXct(output5 * 24 * 3600, origin = '1970-01-01', tz = 'UTC') expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 2)), - output5) + output5 + ) data <- array(1:(366*6*5*3), c(memb = 1, sdate = 5, year = 6, ftime = 366, lon = 3)) expect_equal( SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 2)), - InsertDim(Reorder(data[,,,32:41,], c('ftime', 'sdate', 'year', 'lon')), - len = 1, pos = 2, name = 'memb')) - output6 <- SplitDim(output4, indices = output4[,1], split_dim = 'ftime' , freq = 'year') + InsertDim(Reorder(data[, , , 32:41, ], c('sdate', 'year', 'ftime', 'lon')), + len = 1, pos = 1, name = 'memb') + ) + output6 <- CSTools::SplitDim(output4, indices = output4[,1], split_dim = 'ftime' , freq = 'year') output6 <- as.POSIXct(output6 * 24 * 3600, origin = '1970-01-01', tz = 'UTC') expect_equal( SelectPeriodOnDates(dates, start = list(1, 2), end = list(10, 4)), - output6) - #expect_equal( # to be fixed: + output6 + ) + # expect_equal( # to be fixed: # SelectPeriodOnData(data, dates, start = list(1, 2), end = list(10, 4)), # (931:935), outer(seq(931, 3001, 30), 0:4, '+') # InsertDim(Reorder(data[,,,32:41,], c('ftime', 'sdate', 'year', 'lon')), # len = 1, pos = 2, name = 'memb')) }) - -test_that("Seasonal", { +############################################## +test_that("4. Seasonal", { # 1 start month, select the required 'ftime' of each 'sdate' in-between the entire timeseries dates <- c(seq(as.Date("01-04-2000", format = "%d-%m-%Y"), as.Date("31-10-2000", format = "%d-%m-%Y"), by = 'day'), @@ -148,7 +192,7 @@ test_that("Seasonal", { as.Date("31-10-2002", format = "%d-%m-%Y"), by = 'day'), seq(as.Date("01-04-2003", format = "%d-%m-%Y"), as.Date("31-10-2003", format = "%d-%m-%Y"), by = 'day')) - + dim(dates) <- c(ftime = 214, sdate = 4) output <- c(seq(as.Date("21-04-2000", format = "%d-%m-%Y"), as.Date("21-06-2000", format = "%d-%m-%Y"), by = 'day'), seq(as.Date("21-04-2001", format = "%d-%m-%Y"), @@ -157,23 +201,26 @@ test_that("Seasonal", { as.Date("21-06-2002", format = "%d-%m-%Y"), by = 'day'), seq(as.Date("21-04-2003", format = "%d-%m-%Y"), as.Date("21-06-2003", format = "%d-%m-%Y"), by = 'day')) - dim(output) <- c(ftime = (30 - 20 + 31 + 21) * 4) + dim(output) <- c(ftime = 62, sdate = 4) expect_equal( - SelectPeriodOnDates(dates, start = list(21, 4), end = list(21, 6)), - output) + SelectPeriodOnDates(dates, start = list(21, 4), end = list(21, 6)), + output + ) - # following the above case, and select the data + # following the above case, and select the data data <- array(1:(5 * 4 * 214 * 2), c(memb = 5, sdate = 4, ftime = 214, lon = 2)) dim(dates) <- c(ftime = 214, sdate = 4) expect_equal( - SelectPeriodOnData(data, dates, start = list(21, 4), end = list(21, 6))[,1,1,1], - data[1,1,21:82,1]) + SelectPeriodOnData(data, dates, start = list(21, 4), end = list(21, 6))[1,1, ,1], + data[1,1,21:82,1] + ) -# when selecting the days across two years + # when selecting the days across two years dates <- seq(as.Date("2000-01-01", "%Y-%m-%d"), as.Date("2003-12-31", "%Y-%m-%d"), 'day') + dim(dates) <- c(ftime = 1461) output1 <- c(seq(as.Date("01-01-2000", format = "%d-%m-%Y"), as.Date("31-01-2000", format = "%d-%m-%Y"), by = 'day'), @@ -188,15 +235,18 @@ test_that("Seasonal", { dim(output1) <- c(ftime = 31 * 8) expect_equal( - SelectPeriodOnDates(dates, start = list(1, 12), end = list(31, 1)), - output1) - # following the above case, and select the data + SelectPeriodOnDates(dates, start = list(1, 12), end = list(31, 1)), + output1 + ) + + # following the above case, and select the data data1 <- array(1:(length(dates) * 2), c(memb = 1, ftime = length(dates), lon = 2)) expect_equal( - SelectPeriodOnData(data1, dates, start = list(1, 12), end = list(31, 1)), - array(c(c(1:31, 336:397, 701:762, 1066:1127, 1431:1461), - c(1:31, 336:397, 701:762, 1066:1127, 1431:1461) + 1461), - c(ftime = 31 * 8, memb = 1, lon = 2))) + SelectPeriodOnData(data1, dates, start = list(1, 12), end = list(31, 1)), + array(c(c(1:31, 336:397, 701:762, 1066:1127, 1431:1461), + c(1:31, 336:397, 701:762, 1066:1127, 1431:1461) + 1461), + c(memb = 1, ftime = 31 * 8, lon = 2)) + ) }) diff --git a/tests/testthat/test-Threshold.R b/tests/testthat/test-Threshold.R index 7ff0ec9bb4b9dd3f8d34766399d02d7a959ae1ca..24ca6010fd895b35b824f5c83fba030280b3d45f 100644 --- a/tests/testthat/test-Threshold.R +++ b/tests/testthat/test-Threshold.R @@ -1,51 +1,84 @@ -context("Generic tests") +context("CSIndicators::Threshold tests") + test_that("Sanity checks", { - #source("csindicators/R/Threshold.R") - expect_error(Threshold(NULL), - "Parameter 'data' cannot be NULL.") - expect_error(Threshold('x'), - "Parameter 'data' must be numeric.") + expect_error( + Threshold(NULL), + "Parameter 'data' cannot be NULL." + ) + expect_error( + Threshold('x'), + "Parameter 'data' must be numeric." + ) data <- 1:20 - expect_error(Threshold(data, NULL), - "Parameter 'threshold' cannot be NULL.") - expect_error(Threshold(data, 'x'), - "Parameter 'threshold' must be numeric.") + expect_error( + Threshold(data, NULL), + "Parameter 'threshold' cannot be NULL." + ) + expect_error( + Threshold(data, 'x'), + "Parameter 'threshold' must be numeric." + ) threshold <- 0.9 - expect_equal(Threshold(data, threshold), 18.1) + expect_equal( + Threshold(data, threshold), + 18.1 + ) dim(data) <- c(2, 10) - expect_error(Threshold(data, threshold), - "Parameter 'data' must have named dimensions.") + expect_error( + Threshold(data, threshold), + "Parameter 'data' must have named dimensions." + ) names(dim(data)) <- c('lat', 'sdate') - expect_error(Threshold(data, threshold), - "Could not find dimension 'member' in 1th object provided in 'data'.") - expect_equal(Threshold(data, threshold, memb_dim = NULL), - array(c(17.2, 18.2), c(lat = 2))) + expect_error( + Threshold(data, threshold), + "Could not find dimension 'member' in 1th object provided in 'data'." + ) + expect_equal( + Threshold(data, threshold, memb_dim = NULL), + array(c(17.2, 18.2), c(lat = 2)) + ) threshold <- c(0.1, 0.2) - expect_equal(Threshold(data, threshold, memb_dim = NULL), - array(c(2.8, 4.6, 3.8, 5.6), c(probs = 2, lat = 2))) + expect_equal( + Threshold(data, threshold, memb_dim = NULL), + array(c(2.8, 4.6, 3.8, 5.6), c(probs = 2, lat = 2)) + ) data <- array(1:40, c(x = 2, ftime = 20)) - expect_error(Threshold(data, threshold), - "Could not find dimension 'sdate' in 1th object provided in 'data'.") - expect_equal(dim(Threshold(data, threshold, sdate_dim = 'ftime', memb_dim = NULL)), - c(probs = 2, x = 2)) + expect_error( + Threshold(data, threshold), + "Could not find dimension 'sdate' in 1th object provided in 'data'." + ) + expect_equal( + dim(Threshold(data, threshold, sdate_dim = 'ftime', memb_dim = NULL)), + c(probs = 2, x = 2) + ) # threshold with dimensions ? dim(threshold) <- c(member = 2, ftime = 1) - expect_equal(dim(Threshold(data, threshold, sdate_dim = 'ftime', memb_dim = NULL)), - c(probs = 2, x = 2)) - expect_equal(dim(Threshold(data, threshold, memb_dim = 'x', sdate_dim = 'ftime')), - c(probs = 2)) + expect_equal( + dim(Threshold(data, threshold, sdate_dim = 'ftime', memb_dim = NULL)), + c(probs = 2, x = 2) + ) + expect_equal( + dim(Threshold(data, threshold, memb_dim = 'x', sdate_dim = 'ftime')), + c(probs = 2) + ) }) test_that("Seasonal forecasts", { - exp <- CSTools::lonlat_temp$exp$data thresholdP <- Threshold(exp, threshold = 0.9) - expect_equal(dim(exp)[4:6], dim(thresholdP)[2:4]) - expect_equal(round(thresholdP[1, , 2, 2]), c(283, 281, 280)) + expect_equal( + dim(exp)[4:6], + dim(thresholdP)[2:4] + ) + expect_equal( + round(thresholdP[1, , 2, 2]), + c(283, 281, 280) + ) exp1 <- exp[1, 1, 1, , , ] # no member - library(s2dv) # 1 member and 1 sdate - exp1 <- InsertDim(InsertDim(exp1, 1, 1, name = 'sdate'), 1, 1, name = 'member') + exp1 <- s2dv::InsertDim(InsertDim(exp1, 1, 1, name = 'sdate'), 1, 1, name = 'member') # 1 member and 1 sdate exp1_thresholdP <- Threshold(exp1, threshold = 0.9) - expect_equal(round(exp1_thresholdP[, 2, 2]), c(281, 279, 276)) - + expect_equal( + round(exp1_thresholdP[, 2, 2]), + c(281, 279, 276) + ) }) diff --git a/tests/testthat/test-TotalTimeExceedingThreshold.R b/tests/testthat/test-TotalTimeExceedingThreshold.R index 4a5a365d65439e56f321e74a694e661fe2377e92..68c6d777f2dddb896380cb902488451680862cba 100644 --- a/tests/testthat/test-TotalTimeExceedingThreshold.R +++ b/tests/testthat/test-TotalTimeExceedingThreshold.R @@ -238,15 +238,17 @@ test_that("Seasonal forecasts", { exp$data <- exp$data[1, 1:3, , , , ] - 247 SU35_NoP <- CST_TotalTimeExceedingThreshold(exp, threshold = 35)$data expect_equal( - SU35_NoP[1, , 15, 3], c(0, 1, 1, 1, 0, 0)) - # convert to percentile - exp_percentile <- AbsToProbs(exp$data) - obs_percentile <- drop(QThreshold(obs$data, threshold = 35) + SU35_NoP[1, , 15, 3], + c(0, 1, 1, 1, 0, 0) ) + # convert to percentile + exp_percentile <- AbsToProbs(exp$data) + obs_percentile <- drop(QThreshold(obs$data, threshold = 35)) data <- exp data$data <- exp_percentile SU35_P <- CST_TotalTimeExceedingThreshold(data, threshold = obs_percentile)$data expect_equal( - SU35_P[ ,2, 5, 5], c(3, 3, 3, 3, 3, 3) + SU35_P[2, , 5, 5], + c(3, 3, 3, 3, 3, 3) ) }) diff --git a/tests/testthat/test-WindCapacityFactor.R b/tests/testthat/test-WindCapacityFactor.R new file mode 100644 index 0000000000000000000000000000000000000000..1bf9089e2fd3eef4ee9903f4c136f5fdf22c5906 --- /dev/null +++ b/tests/testthat/test-WindCapacityFactor.R @@ -0,0 +1,49 @@ +context("CSIndicators::WindCapacityFactor tests") + +# dat1 +wind <- NULL +wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), + c(member = 10, lat = 2, lon = 5)) +wind$coords <- list(lat = c(40, 41), lon = 1:5) +variable <- list(varName = 'sfcWind', + metadata = list(sfcWind = list(level = 'Surface'))) +wind$attrs <- list(Variable = variable, Datasets = 'synthetic', + when = Sys.time(), Dates = '1990-01-01 00:00:00') +class(wind) <- 's2dv_cube' +WCF <- CST_WindCapacityFactor(wind, IEC_class = "III") + +########################################################################### +test_that("1. Input checks", { + # Check 's2dv_cube' + expect_error( + CST_WindCapacityFactor(wind = 1), + "Parameter 'wind' must be of the class 's2dv_cube'." + ) + # Dates subset + expect_warning( + CST_WindCapacityFactor(wind = wind, start = list(1,3), end = list(1,7)), + paste0("Dimensions in 'wind' element 'attrs$Dates' are missed and ", + "all data would be used."), + fixed = TRUE + ) + # start and end + expect_error( + WindCapacityFactor(wind = wind$data, dates = wind$attrs$Dates, + start = c(1,2), end = c(2,3)), + paste0("Parameter 'start' and 'end' must be lists indicating the ", + "day and the month of the period start and end.") + ) +}) + +########################################################################### +test_that("2. Output checks", { + expect_equal( + CST_WindCapacityFactor(wind = wind)$attrs$Variable$varName, + 'WindCapacityFactor' + ) + expect_equal( + dim(CST_WindCapacityFactor(wind = wind)$data), + c(member = 10, lat = 2, lon = 5) + ) +}) + diff --git a/tests/testthat/test-WindPowerDensity.R b/tests/testthat/test-WindPowerDensity.R new file mode 100644 index 0000000000000000000000000000000000000000..249c5290cfb43ddf49ba7bacef4747875763de32 --- /dev/null +++ b/tests/testthat/test-WindPowerDensity.R @@ -0,0 +1,48 @@ +context("CSIndicators::WindPowerDensity tests") + +# dat1 +wind <- NULL +wind$data <- array(rweibull(n = 100, shape = 2, scale = 6), + c(member = 10, lat = 2, lon = 5)) +wind$coords <- list(lat = c(40, 41), lon = 1:5) +variable <- list(varName = 'sfcWind', + metadata = list(sfcWind = list(level = 'Surface'))) +wind$attrs <- list(Variable = variable, Datasets = 'synthetic', + when = Sys.time(), Dates = '1990-01-01 00:00:00') +class(wind) <- 's2dv_cube' + +########################################################################### +test_that("1. Input checks", { + # Check 's2dv_cube' + expect_error( + CST_WindPowerDensity(wind = 1), + "Parameter 'wind' must be of the class 's2dv_cube'." + ) + # Dates subset + expect_warning( + CST_WindPowerDensity(wind = wind, start = list(1,3), end = list(1,7)), + paste0("Dimensions in 'wind' element 'attrs$Dates' are missed and ", + "all data would be used."), + fixed = TRUE + ) + # start and end + expect_error( + WindPowerDensity(wind = wind$data, dates = wind$attrs$Dates, + start = c(1,2), end = c(2,3)), + paste0("Parameter 'start' and 'end' must be lists indicating the ", + "day and the month of the period start and end.") + ) +}) + +########################################################################### +test_that("2. Output checks", { + expect_equal( + CST_WindPowerDensity(wind = wind)$attrs$Variable$varName, + 'WindPowerDensity' + ) + expect_equal( + dim(CST_WindPowerDensity(wind = wind)$data), + c(member = 10, lat = 2, lon = 5) + ) +}) + diff --git a/vignettes/AgriculturalIndicators.Rmd b/vignettes/AgriculturalIndicators.Rmd index 50df881aefb029551ee3fdb74600c9beb4b426a6..ec14f58a0ef816c058344cc515cb61a7324744a9 100644 --- a/vignettes/AgriculturalIndicators.Rmd +++ b/vignettes/AgriculturalIndicators.Rmd @@ -84,7 +84,6 @@ c(prlr_exp, prlr_obs) %<-% CST_Load(var = 'prlr', grid = "r1440x721", method = 'bicubic') ``` - The output contains data and metadata for the experiment and the observations. The elements `prlr_exp$data` and `prlr_obs$data` have dimensions: @@ -97,10 +96,8 @@ dim(prlr_obs$data) # 1 1 4 214 4 4 ``` - To compute **SprR** of forecast and observation, we can run: - ``` SprR_exp <- CST_PeriodAccumulation(prlr_exp, start = list(21, 4), end = list(21, 6)) SprR_obs <- CST_PeriodAccumulation(prlr_obs, start = list(21, 4), end = list(21, 6)) @@ -124,7 +121,7 @@ dim(SprR_obs$data) The forecast SprR for the 1st member from 2013-2016 of the 1st grid point in mm are: ``` -SprR_exp$data[1,1,,1,1] * 86400 * 1000 +SprR_exp$data[1, 1, , 1, 1] * 86400 * 1000 #[1] 93.23205 230.41904 194.01412 226.52614 ``` @@ -140,7 +137,7 @@ HarvestR_obs <- CST_PeriodAccumulation(prlr_obs, start = list(21, 8), end = list The forecast HarvestR for the 1st member from 2013-2016 of the 1st grid point in mm are: ``` -HarvestR_exp$data[1,1,,1,1] * 86400 * 1000 +HarvestR_exp$data[1, 1, , 1, 1] * 86400 * 1000 #[1] 52.30026 42.88068 156.87961 32.18579 ``` @@ -159,7 +156,7 @@ To plot the map of ensemble-mean bias of HarvestR forecast, run cols <- c('#b2182b', '#d6604d', '#f4a582', '#fddbc7', '#d1e5f0', '#92c5de', '#4393c3', '#2166ac') -PlotEquiMap(Bias[1,,], lon = prlr_obs$lon, lat = prlr_obs$lat, +PlotEquiMap(Bias[1, , ], lon = prlr_obs$coords$lon, lat = prlr_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'mm', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, col_inf = cols[1], @@ -258,7 +255,7 @@ Here, we plot the 2013-2016 mean climatology of ERA5 GST by running GST_Clim <- MeanDims(drop(GST_obs$data), 'sdate') cols <- c('#ffffd4','#fee391','#fec44f','#fe9929','#ec7014','#cc4c02','#8c2d04') -PlotEquiMap(GST_Clim, lon = tas_obs$lon, lat = tas_obs$lat, +PlotEquiMap(GST_Clim, lon = tas_obs$coords$lon, lat = tas_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = '°C', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, col_inf = cols[1], @@ -363,7 +360,8 @@ SU35_exp_BC_Y2016 <- MeanDims(SU35_exp_BC[, 4, , ], 'member') cols <- c("#fee5d9", "#fcae91", "#fb6a4a", "#de2d26","#a50f15") toptitle <- 'ERA5 SU35 forecast in 2016' -PlotEquiMap(SU35_obs_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, +PlotEquiMap(SU35_obs_Y2016, + lon = tasmax_obs$coords$lon, lat = tasmax_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'day', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, margin_scale = c(1, 1, 1, 1), @@ -373,7 +371,8 @@ PlotEquiMap(SU35_obs_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, bar_extra_margin = c(0, 0, 0, 0), units_scale = 2) toptitle <- 'SU35 forecast in 2016' -PlotEquiMap(SU35_exp_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, +PlotEquiMap(SU35_exp_Y2016, + lon = tasmax_obs$coords$lon, lat = tasmax_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'day', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, margin_scale = c(1, 1, 1, 1), @@ -383,7 +382,8 @@ PlotEquiMap(SU35_exp_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, bar_extra_margin = c(0, 0, 0, 0), units_scale = 2) toptitle <- 'Bias-adjusted SU35 forecast in 2016' -PlotEquiMap(SU35_exp_BC_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, +PlotEquiMap(SU35_exp_BC_Y2016, + lon = tasmax_obs$coords$lon, lat = tasmax_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'day', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, margin_scale = c(1, 1, 1, 1), @@ -429,7 +429,8 @@ obs_percentile <- drop(obs_percentile) After translating both forecasts and observations into probabilities, the comparison can then be done by running ``` -SU35_exp_Percentile <- TotalTimeExceedingThreshold(S5txP, threshold = obs_percentile, time_dim = 'ftime') +SU35_exp_Percentile <- TotalTimeExceedingThreshold(S5txP, threshold = obs_percentile, + time_dim = 'ftime') ``` Compute the same ensemble-mean SU35 **with percentile adjustment** in 2016 by running @@ -442,7 +443,8 @@ Plot the same map for comparison ``` toptitle <- 'SU35 forecast with percentile adjustment in 2016' -PlotEquiMap(SU35_exp_per_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, +PlotEquiMap(SU35_exp_per_Y2016, + lon = tasmax_obs$coords$lon, lat = tasmax_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'day', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, margin_scale = c(1, 1, 1, 1), @@ -453,7 +455,6 @@ PlotEquiMap(SU35_exp_per_Y2016, lon = tasmax_obs$lon, lat = tasmax_obs$lat, ``` - As seen in the figure above, applying the percentile adjustment seems to implicitly adjust certain extent of bias which was observed in the non-bias-adjusted SEAS5 forecast. @@ -501,7 +502,7 @@ To plot the map of correlation coefficient of GDD for the 2013-2016 period. ``` cols <- c("#f7fcf5", "#e5f5e0", "#c7e9c0", "#a1d99b", "#74c476") toptitle <- '2013-2016 correlation coefficient of GDD' -PlotEquiMap(GDD_Corr, lon = tas_obs$lon, lat = tas_obs$lat, +PlotEquiMap(GDD_Corr, lon = tas_obs$coords$lon, lat = tas_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'correlation', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, @@ -572,7 +573,7 @@ Plot the map of WSDI FRPSS for the period from 2013-2016 cols <- c("#edf8fb", "#ccece6", "#99d8c9", "#66c2a4") toptitle <- 'SEAS5 WSDI FRPSS (2013-2016)' -PlotEquiMap(WSDI_FRPSS, lon = tasmax_obs$lon, lat = tasmax_obs$lat, +PlotEquiMap(WSDI_FRPSS, lon = tasmax_obs$coords$lon, lat = tasmax_obs$coords$lat, intylat = 1, intxlon = 1, width = 6, height = 6, filled.continents = FALSE, units = 'FRPSS', title_scale = .8, axes_label_scale = 1, axes_tick_scale = 1, margin_scale = c(1, 1, 1, 1), diff --git a/vignettes/EnergyIndicators.Rmd b/vignettes/EnergyIndicators.Rmd index caf474e68d2ba86a04b41f87d9efa300d216cf6a..f4a1a04b722a6b250efadc015808dc5691811969 100644 --- a/vignettes/EnergyIndicators.Rmd +++ b/vignettes/EnergyIndicators.Rmd @@ -38,7 +38,7 @@ wind <- rweibull(n = 1000, shape = 2, scale = 6) WPD <- WindPowerDensity(wind) mean(WPD) sd(WPD) -par(mfrow=c(1, 2)) +par(mfrow = c(1, 2)) hist(wind, breaks = seq(0, 20)) hist(WPD, breaks = seq(0, 4000, 200)) ``` @@ -64,7 +64,7 @@ Following on the previous example, we will compute now the CF that would be obta ```{r, fig.width=7} WCFI <- WindCapacityFactor(wind, IEC_class = "I") WCFIII <- WindCapacityFactor(wind, IEC_class = "III") -par(mfrow=c(1, 3)) +par(mfrow = c(1, 3)) hist(wind, breaks = seq(0, 20)) hist(WCFI, breaks = seq(0, 1, 0.05), ylim = c(0, 500)) hist(WCFIII, breaks = seq(0, 1, 0.05), ylim = c(0, 500))