diff --git a/DESCRIPTION b/DESCRIPTION index 7fd6c5d40393d199acca72430f517865536fbfa5..b23e150d6eab012afa734c3f3fd5d095706010a3 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -48,13 +48,13 @@ Description: Exploits dynamical seasonal forecasts in order to provide Depends: R (>= 3.4.0), maps, + qmap, easyVerification Imports: s2dverification, s2dv, rainfarmr, multiApply (>= 2.1.1), - qmap, ClimProjDiags, ncdf4, plyr, diff --git a/NAMESPACE b/NAMESPACE index 91e3447127b00a81a074d786c35a9a165b08082c..6c95ac0826159823d987706fb8d064f9d53bda39 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -35,6 +35,7 @@ export(PlotForecastPDF) export(PlotMostLikelyQuantileMap) export(PlotPDFsOLE) export(PlotTriangles4Categories) +export(QuantileMapping) export(RFSlope) export(RFTemp) export(RF_Weights) diff --git a/NEWS.md b/NEWS.md index 6f629458ee0e1d9e86b1a8e60851b4d37736b3b2..ea02560ef0e0fea7c0da3798d0b68d9b6e060ebf 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,12 +1,14 @@ ### CSTools 4.0.0 **Submission date to CRAN: XX-12-2020** + - New features: + PlotPDFsOLE includes parameters to modify legend style + CST_RFSlope handless missing values in the temporal dimension and new 'ncores' parameter allows parallel computation + CST_RFWeights accepts s2dv_cube objects as input and new 'ncores' paramenter allows parallel computation + RFWeights is exposed to users + CST_RainFARM accepts multi-dimensional slopes and weights and handless missing values in sample dimensions. - + CST_MultiMetric includes 'rpss' metric and it is addapted to s2dv + + QuantileMapping is exposed to users + + CST_MultiMetric includes 'rpss' metric and it is addapted to s2dv. + PlotMostLikelyQuantileMap vignette + PlotTriangles4Categories includes two parameters to adjust axis and margins + CategoricalEnsCombination is exposed to users @@ -16,7 +18,10 @@ + PlotForecastPDF correctly displays terciles labels + CST_SaveExp correctly save time units + CST_SplitDims returns ordered output following ascending order provided in indices when it is numeric - + Figures resize option from vignettes has been removed + + qmap library moved from Imports to Depends + + CST_QuantileMapping correctly handles exp_cor + + Figures resize option from vignettes has been removed. + ### CSTools 3.1.0 **Submission date to CRAN: 02-07-2020** diff --git a/R/CST_QuantileMapping.R b/R/CST_QuantileMapping.R index 3923569ae3ea59355fc2ba36dbd2d11f05d99d4a..b27cbd4cd49bc92d20bbbcc954d679b271706557 100644 --- a/R/CST_QuantileMapping.R +++ b/R/CST_QuantileMapping.R @@ -23,6 +23,8 @@ #'\itemize{ #'\item{wet.day} {logical indicating whether to perform wet day correction or not.(Not available in 'DIS' method)} #'\item{qstep} {NULL or a numeric value between 0 and 1.}} +#' When providing a forecast to be corrected through the pararmeter \code{exp_cor}, some inputs might need to be modified. The quantile correction is compute by comparing objects passed through 'exp' and 'obs' parameters, this correction will be later applied to the forecast provided in 'exp_cor'. Imaging the case of 'exp' and 'obs' having several start dates, stored using a dimension e.g. 'sdate', 'sample_dims' include this dimension 'sdate' and 'exp_cor' has forecasts for several sdates but different from the ones in 'exp'. In this case, the correction computed with 'exp' and 'obs' would be applied for each 'sdate' of 'exp_cor' separately. This example corresponds to a case of split a dataset in training set and validation set. +#' #'@return an oject of class \code{s2dv_cube} containing the experimental data after applyingthe quantile mapping correction. #') <- c(dataset = 1, member = 10, sdate = 20, ftime = 60 , #'@import qmap @@ -48,6 +50,14 @@ #'obs <- lonlat_data$obs #'res <- CST_QuantileMapping(exp, obs) #' +#'exp_cor <- exp +#'exp_cor$data <- exp_cor$data[,,1,,,] +#'dim(exp_cor$data) <- c(dataset = 1, member = 15, sdate = 1, ftime = 3, +#' lat = 22, lon = 53) +#'res <- CST_QuantileMapping(exp, obs, exp_cor, +#' sample_dims = c('sdate', 'ftime', 'member')) +#'res <- CST_QuantileMapping(exp, obs, exp_cor, +#' sample_dims = c('ftime', 'member')) #'data(obsprecip) #'data(modprecip) #'exp <- modprecip$MOSS[1:10000] @@ -60,6 +70,27 @@ #'class(obs) <- 's2dv_cube' #'res <- CST_QuantileMapping(exp = exp, obs = obs, sample_dims = 'time', #' method = 'DIST') +#'# Example using different lenght of members and sdates: +#'exp <- lonlat_data$exp +#'exp$data <- exp$data[,,1:4,,,] +#'dim(exp$data) <- c(dataset = 1, member = 15, sdate = 4, ftime = 3, +#' lat = 22, lon = 53) +#'obs <- lonlat_data$obs +#'obs$data <- obs$data[,,1:4, ,,] +#'dim(obs$data) <- c(dataset = 1, member = 1, sdate = 4, ftime = 3, +#' lat = 22, lon = 53) +#'exp_cor <- lonlat_data$exp +#'exp_cor$data <- exp_cor$data[,1:5,5:6,,,] +#'dim(exp_cor$data) <- c(dataset = 1, member = 5, sdate = 2, ftime = 3, +#' lat = 22, lon = 53) +#'res <- CST_QuantileMapping(exp, obs, exp_cor, +#' sample_dims = c('sdate', 'ftime', 'member')) +#'exp_cor <- lonlat_data$exp +#'exp_cor$data <- exp_cor$data[,,5:6,,,] +#'dim(exp_cor$data) <- c(dataset = 1, member = 15, sdate = 2, ftime = 3, +#' lat = 22, lon = 53) +#'res <- CST_QuantileMapping(exp, obs, exp_cor, +#' sample_dims = c('sdate', 'ftime', 'member')) #'} #'@export CST_QuantileMapping <- function(exp, obs, exp_cor = NULL, @@ -79,18 +110,52 @@ CST_QuantileMapping <- function(exp, obs, exp_cor = NULL, stop("Parameter 'method' must be one of the following methods: ", "'PTF','DIST','RQUANT','QUANT','SSPLIN'.") } - dimnames <- names(dim(exp$data)) QMapped <- QuantileMapping(exp = exp$data, obs = obs$data, exp_cor = exp_cor$data, sample_dims = sample_dims, sample_length = sample_length, method = method, ncores = ncores, ...) - pos <- match(dimnames, names(dim(QMapped))) - QMapped <- aperm(QMapped, pos) - names(dim(QMapped)) <- dimnames - exp$data <- QMapped - exp$Datasets <- c(exp$Datasets, obs$Datasets) - exp$source_files <- c(exp$source_files, obs$source_files) + if (is.null(exp_cor)) { + exp$data <- QMapped + exp$source_files <- c(exp$source_files, obs$source_files) + } else { + exp_cor$data <- QMapped + exp_cor$source_files <- c(exp$source_files, obs$source_files, exp_cor$source_files) + exp <- exp_cor + } return(exp) } +#'Quantiles Mapping for seasonal or decadal forecast data +#' +#'@description This function is a wrapper from fitQmap and doQmap from package 'qmap'to be applied in CSTools objects of class 's2dv_cube'. The quantile mapping adjustment between an experiment, tipically a hindcast, and observations is applied to the experiment itself or to a provided forecast. +#' +#'@author Nuria Perez-Zanon, \email{nuria.perez@bsc.es} +#'@param exp a multi-dimensional array with named dimensions containing the hindcast. +#'@param obs a multi-dimensional array with named dimensions (the same as the provided in 'exp') containing the reference dataset. +#'@param exp_cor a multi-dimensional array with named dimensions in which the quantile mapping correction will be applied. If it is not specified, the correction is applied in object \code{exp}. +#'@param sample_dims a character vector indicating the dimensions that can be used as sample for the same distribution +#'@param sample_length a numeric value indicating the length of the timeseries window to be used as sample for the sample distribution and correction. By default, NULL, the total length of the timeseries will be used. +#'@param method a character string indicating the method to be used: 'PTF','DIST','RQUANT','QUANT','SSPLIN'. By default, the empirical quantile mapping 'QUANT' is used. +#'@param ncores an integer indicating the number of parallel processes to spawn for the use for parallel computation in multiple cores. +#'@param ... additional arguments passed to the method specified by \code{method}. +#' +#'@details The different methods are: +#'\itemize{ +#'\item{'PTF'} {fits a parametric transformations to the quantile-quantile relation of observed and modelled values. See \code{?qmap::fitQmapPTF}.} +#' \item{'DIST'} {fits a theoretical distribution to observed and to modelled time series. See \code{?qmap::fitQmapDIST}.} +#'\item{'RQUANT'} {estimates the values of the quantile-quantile relation of observed and modelled time series for regularly spaced quantiles using local linear least square regression. See \code{?qmap::fitQmapRQUANT}.} +#'\item{'QUANT'} {estimates values of the empirical cumulative distribution function of observed and modelled time series for regularly spaced quantiles. See \code{?qmap::fitQmapQUANT}.} +#'\item{'SSPLIN'} {fits a smoothing spline to the quantile-quantile plot of observed and modelled time series. See \code{?qmap::fitQmapSSPLIN}.}} +#'All methods accepts some common arguments: +#'\itemize{ +#'\item{wet.day} {logical indicating whether to perform wet day correction or not.(Not available in 'DIS' method)} +#'\item{qstep} {NULL or a numeric value between 0 and 1.}} +#'@return an oject of class \code{s2dv_cube} containing the experimental data after applyingthe quantile mapping correction. +#') <- c(dataset = 1, member = 10, sdate = 20, ftime = 60 , +#'@import qmap +#'@import multiApply +#'@import abind +#' +#'@seealso \code{qmap::fitQmap} and \code{qmap::doQmap} +#'@export QuantileMapping <- function(exp, obs, exp_cor = NULL, sample_dims = 'ftime', sample_length = NULL, method = 'QUANT', ncores = NULL, ...) { obsdims <- names(dim(obs)) @@ -134,12 +199,41 @@ QuantileMapping <- function(exp, obs, exp_cor = NULL, sample_dims = 'ftime', obs <- adrop(obs, drop = todrop) } } - if (!all(sample_dims %in% obsdims)) { newobsdims <- sample_dims[!sample_dims %in% obsdims] dim(obs) <- c(dim(obs), 1 : length(newobsdims)) names(dim(obs))[-c(1:length(obsdims))] <- newobsdims + } + + if (!is.null(exp_cor)) { + commondims <- exp_cordims[exp_cordims %in% expdims] + commondims <- names(which(unlist(lapply(commondims, function(x) { + dim(exp_cor)[exp_cordims == x] != dim(exp)[expdims == x]})))) + if (any(commondims %in% sample_dims)) { + todrop <- commondims[(commondims %in% sample_dims)] + todroppos <- match(todrop, sample_dims) + if (all(dim(exp_cor)[todrop] != 1)) { + warning(paste("The sample_dims", paste(todrop, collapse = " "), + "are not used when applying the", + "correction to 'exp_cor'")) + sample_dims <- list(sample_dims, sample_dims, sample_dims[-todroppos]) + } else { + exp_cor <- adrop(exp_cor, drop = todroppos) + } + } else { + todrop <- commondims[!(commondims %in% sample_dims)] + todrop <- match(todrop, obsdims) + if (all(dim(exp_cor)[todrop] != 1)) { + stop("Review parameter 'sample_dims' or the data dimensions ", + "since multiple dimensions with different length have ", + "being found in the data inputs that don't match with ", + "'sample_dims' parameter.") + } else { + exp_cor <- adrop(exp_cor, drop = todrop) + } + } } + if (!is.null(sample_length) & !is.numeric(sample_length)) { warning("Parameter 'sample_length' has not been correctly defined and ", "the whole length of the timeseries will be used.") @@ -174,8 +268,16 @@ QuantileMapping <- function(exp, obs, exp_cor = NULL, sample_dims = 'ftime', method = method, ncores = ncores)$output1 } pos <- match(expdims, names(dim(qmaped))) + out_names <- names(dim(exp)) + if (length(pos) < length(dim(qmaped))) { + toadd <- length(dim(qmaped)) - length(pos) + toadd <- seq(max(pos) + 1, max(pos) + toadd, 1) + pos <- c(pos, toadd) + new <- names(dim(qmaped))[names(dim(qmaped)) %in% out_names == FALSE] + out_names <- c(out_names, new) + } qmaped <- aperm(qmaped, pos) - dim(qmaped) <- dim(exp) + names(dim(qmaped)) <- out_names return(qmaped) } qmapcor <- function(exp, obs, exp_cor = NULL, sample_length = NULL, method = 'QUANT', diff --git a/R/CST_RainFARM.R b/R/CST_RainFARM.R index 73b075c4b0dcca97842d874e721ca5fd2bfce22f..0c94650f9e7ccdd925229794e438cea541b37f1d 100644 --- a/R/CST_RainFARM.R +++ b/R/CST_RainFARM.R @@ -383,42 +383,19 @@ RainFARM <- function(data, lon, lat, nf, weights = 1., nens = 1, # restoring NA values in their position: if (!is.null(posna)) { pos <- which(posna == FALSE) - if (pos[1] == 1) { - result_dims['rainfarm_samples'] <- 1 - x <- array(rep(NA,prod(result_dims)), result_dims) - r <- abind(x, r, along = 3) - names(dim(r)) <- names(result_dims) - pos <- pos[-1] - } - result_dims['rainfarm_samples'] <- 1 - if ((pos[1] - 1) < dim(r)['rainfarm_samples']) { - x <- max(which((pos - 1) < dim(r)['rainfarm_samples'])) - pos_internal <- pos[1:x] - if (pos[length(pos)] > pos[x]) { - pos <- pos[(x + 1) : length(pos)] - } else { - pos <- NULL - } - } else { - pos_internal <- NULL - } - if (!is.null(pos_internal)) { - r <- lapply(pos_internal, - function(x) { - rrr <- Subset(r, along = 'rainfarm_samples', indices = 1:(x-1)) - rrrr <- Subset(r, along = 'rainfarm_samples', indices = x:dim(r)['rainfarm_samples']) - r <- abind(rrr, array(NA, result_dims), rrrr, along = 3) - names(dim(r)) <- names(result_dims) - return(r) - })[[length(pos)]] - } - if (!is.null(pos)) { - result_dims['rainfarm_samples'] <- length(pos) - x <- array(rep(NA,prod(result_dims)), result_dims) - r <- abind(r, x, along = 3) - } - names(dim(r)) <- names(result_dims) + dimdata <- dim(r) + xdim <- which(names(dimdata) == 'rainfarm_samples') + dimdata[xdim] <- dimdata[xdim] + length(pos) + new <- array(NA, dimdata) + posT <- which(posna == TRUE) + i = 1 + invisible(lapply(posT, function(x) { + new[,,x,] <<- r[,,i,] + i <<- i + 1 + })) + #names(dim(r)) <- names(result_dims) warning("Missing values found in the samples.") + r <- new } return(r) } diff --git a/R/CST_SaveExp.R b/R/CST_SaveExp.R index ac377101538d846e2e154dbb7f0f9a2e710e231e..9c689ff7156d825d22cdb1bde8d4a6785c854dbc 100644 --- a/R/CST_SaveExp.R +++ b/R/CST_SaveExp.R @@ -40,7 +40,7 @@ CST_SaveExp <- function(data, destination = "./CST_Data") { "as output by CSTools::CST_Load.") } sdates <- lapply(1:length(data$Datasets), function(x) { - data$Datasets[[x]]$InitializationDates[[1]]})[[1]] + unique(data$Datasets[[x]]$InitializationDates)})[[1]] if (!is.character(attributes(data$Variable)$units)) { units <- attributes(data$Variable)$variable$units } else { diff --git a/R/CST_SplitDim.R b/R/CST_SplitDim.R index 6332684f75ebc404f2c772f9cd0a3edfebcb8482..deb60760a2fe294b95770bbd6824fbd026e09b16 100644 --- a/R/CST_SplitDim.R +++ b/R/CST_SplitDim.R @@ -9,7 +9,9 @@ #'@param indices a vector of numeric indices or dates. If left at NULL, the dates provided in the s2dv_cube object (element Dates) will be used. #'@param freq a character string indicating the frequency: by 'day', 'month' and 'year' or 'monthly' (by default). 'month' identifies months between 1 and 12 independently of the year they belong to, while 'monthly' differenciates months from different years. #'@param new_dim_name a character string indicating the name of the new dimension. +#'@param insert_ftime an integer indicating the number of time steps to add at the begining of the time series. #' +#'@details Parameter 'insert_ftime' has been included for the case of using daily data, requiring split the temporal dimensions by months (or similar) and the first lead time doesn't correspondt to the 1st day of the month. In this case, the insert_ftime could be used, to get a final output correctly organized. E.g.: leadtime 1 is the 2nd of November and the input time series extend to the 31st of December. When requiring split by month with \code{inset_ftime = 1}, the 'monthly' dimension of length two will indicate the month (position 1 for November and position 2 for December), dimension 'time' will be length 31. For November, the position 1 and 31 will be NAs, while from positon 2 to 30 will be filled with the data provided. This allows to select correctly days trhough time dimension. #'@import abind #'@importFrom s2dverification Subset #'@examples @@ -35,11 +37,44 @@ #'dim(new_data$data) #'@export CST_SplitDim <- function(data, split_dim = 'time', indices = NULL, - freq = 'monthly', new_dim_name = NULL) { + freq = 'monthly', new_dim_name = NULL, insert_ftime = NULL) { if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") + stop("Parameter 'data' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") } + if (!is.null(insert_ftime)) { + if (!is.numeric(insert_ftime)) { + stop("Parameter 'insert_ftime' should be an integer.") + } else { + if (length(insert_ftime) > 1) { + warning("Parameter 'insert_ftime' must be of length 1, and only the", + " first element will be used.") + insert_ftime <- insert_ftime[1] + } + # adding NAs at the begining of the data in ftime dim + ftimedim <- which(names(dim(data$data)) == 'ftime') + dims <- dim(data$data) + dims[ftimedim] <- insert_ftime + empty_array <- array(NA, dims) + data$data <- abind(empty_array, data$data, along = ftimedim) + names(dim(data$data)) <- names(dims) + # adding dates to Dates for the new NAs introduced + if ((data$Dates[[1]][2] - data$Dates[[1]][1]) == 1) { + timefreq <- 'days' + } else { + timefreq <- 'months' + warning("Time frequency of forecast time is considered monthly.") + } + start <- data$Dates[[1]] + dim(start) <- c(ftime = length(start)/dims['sdate'], sdate = dims['sdate']) + #new <- array(NA, prod(dim(data$data)[c('ftime', 'sdate')])) + # Pending fix transform to UTC when concatenaiting + data$Dates$start <- do.call(c, lapply(1:dim(start)[2], function(x) { + seq(start[1,x] - as.difftime(insert_ftime, + units = timefreq), + start[dim(start)[1],x], by = timefreq, tz = "UTC")})) + } + } if (is.null(indices)) { if (any(split_dim %in% c('ftime', 'time', 'sdate'))) { if (is.list(data$Dates)) { diff --git a/man/CST_QuantileMapping.Rd b/man/CST_QuantileMapping.Rd index e78c8d563745e077b54edd3f8d904e07510c9bf6..ec5fc8a34aeb5b7dab411383c795c3f26b3437c8 100644 --- a/man/CST_QuantileMapping.Rd +++ b/man/CST_QuantileMapping.Rd @@ -51,6 +51,7 @@ All methods accepts some common arguments: \itemize{ \item{wet.day} {logical indicating whether to perform wet day correction or not.(Not available in 'DIS' method)} \item{qstep} {NULL or a numeric value between 0 and 1.}} +When providing a forecast to be corrected through the pararmeter \code{exp_cor}, some inputs might need to be modified. The quantile correction is compute by comparing objects passed through 'exp' and 'obs' parameters, this correction will be later applied to the forecast provided in 'exp_cor'. Imaging the case of 'exp' and 'obs' having several start dates, stored using a dimension e.g. 'sdate', 'sample_dims' include this dimension 'sdate' and 'exp_cor' has forecasts for several sdates but different from the ones in 'exp'. In this case, the correction computed with 'exp' and 'obs' would be applied for each 'sdate' of 'exp_cor' separately. This example corresponds to a case of split a dataset in training set and validation set. } \examples{ library(qmap) @@ -70,6 +71,14 @@ exp <- lonlat_data$exp obs <- lonlat_data$obs res <- CST_QuantileMapping(exp, obs) +exp_cor <- exp +exp_cor$data <- exp_cor$data[,,1,,,] +dim(exp_cor$data) <- c(dataset = 1, member = 15, sdate = 1, ftime = 3, + lat = 22, lon = 53) +res <- CST_QuantileMapping(exp, obs, exp_cor, + sample_dims = c('sdate', 'ftime', 'member')) +res <- CST_QuantileMapping(exp, obs, exp_cor, + sample_dims = c('ftime', 'member')) data(obsprecip) data(modprecip) exp <- modprecip$MOSS[1:10000] @@ -82,6 +91,27 @@ obs <- list(data = obs) class(obs) <- 's2dv_cube' res <- CST_QuantileMapping(exp = exp, obs = obs, sample_dims = 'time', method = 'DIST') +# Example using different lenght of members and sdates: +exp <- lonlat_data$exp +exp$data <- exp$data[,,1:4,,,] +dim(exp$data) <- c(dataset = 1, member = 15, sdate = 4, ftime = 3, + lat = 22, lon = 53) +obs <- lonlat_data$obs +obs$data <- obs$data[,,1:4, ,,] +dim(obs$data) <- c(dataset = 1, member = 1, sdate = 4, ftime = 3, + lat = 22, lon = 53) +exp_cor <- lonlat_data$exp +exp_cor$data <- exp_cor$data[,1:5,5:6,,,] +dim(exp_cor$data) <- c(dataset = 1, member = 5, sdate = 2, ftime = 3, + lat = 22, lon = 53) +res <- CST_QuantileMapping(exp, obs, exp_cor, + sample_dims = c('sdate', 'ftime', 'member')) +exp_cor <- lonlat_data$exp +exp_cor$data <- exp_cor$data[,,5:6,,,] +dim(exp_cor$data) <- c(dataset = 1, member = 15, sdate = 2, ftime = 3, + lat = 22, lon = 53) +res <- CST_QuantileMapping(exp, obs, exp_cor, + sample_dims = c('sdate', 'ftime', 'member')) } } \seealso{ diff --git a/man/CST_SplitDim.Rd b/man/CST_SplitDim.Rd index 11aca488f25b43346911c1735642b2e2eec5d360..80a94da3c8735a4fe76ecb202af69d788732bf36 100644 --- a/man/CST_SplitDim.Rd +++ b/man/CST_SplitDim.Rd @@ -9,7 +9,8 @@ CST_SplitDim( split_dim = "time", indices = NULL, freq = "monthly", - new_dim_name = NULL + new_dim_name = NULL, + insert_ftime = NULL ) } \arguments{ @@ -22,10 +23,15 @@ CST_SplitDim( \item{freq}{a character string indicating the frequency: by 'day', 'month' and 'year' or 'monthly' (by default). 'month' identifies months between 1 and 12 independently of the year they belong to, while 'monthly' differenciates months from different years.} \item{new_dim_name}{a character string indicating the name of the new dimension.} + +\item{insert_ftime}{an integer indicating the number of time steps to add at the begining of the time series.} } \description{ This function split a dimension in two. The user can select the dimension to split and provide indices indicating how to split that dimension or dates and the frequency expected (monthly or by day, month and year). The user can also provide a numeric frequency indicating the length of each division. } +\details{ +Parameter 'insert_ftime' has been included for the case of using daily data, requiring split the temporal dimensions by months (or similar) and the first lead time doesn't correspondt to the 1st day of the month. In this case, the insert_ftime could be used, to get a final output correctly organized. E.g.: leadtime 1 is the 2nd of November and the input time series extend to the 31st of December. When requiring split by month with \code{inset_ftime = 1}, the 'monthly' dimension of length two will indicate the month (position 1 for November and position 2 for December), dimension 'time' will be length 31. For November, the position 1 and 31 will be NAs, while from positon 2 to 30 will be filled with the data provided. This allows to select correctly days trhough time dimension. +} \examples{ data <- 1 : 20 diff --git a/man/QuantileMapping.Rd b/man/QuantileMapping.Rd new file mode 100644 index 0000000000000000000000000000000000000000..8771e5495022218e2f373f73b91d4f04c1361745 --- /dev/null +++ b/man/QuantileMapping.Rd @@ -0,0 +1,60 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CST_QuantileMapping.R +\name{QuantileMapping} +\alias{QuantileMapping} +\title{Quantiles Mapping for seasonal or decadal forecast data} +\usage{ +QuantileMapping( + exp, + obs, + exp_cor = NULL, + sample_dims = "ftime", + sample_length = NULL, + method = "QUANT", + ncores = NULL, + ... +) +} +\arguments{ +\item{exp}{a multi-dimensional array with named dimensions containing the hindcast.} + +\item{obs}{a multi-dimensional array with named dimensions (the same as the provided in 'exp') containing the reference dataset.} + +\item{exp_cor}{a multi-dimensional array with named dimensions in which the quantile mapping correction will be applied. If it is not specified, the correction is applied in object \code{exp}.} + +\item{sample_dims}{a character vector indicating the dimensions that can be used as sample for the same distribution} + +\item{sample_length}{a numeric value indicating the length of the timeseries window to be used as sample for the sample distribution and correction. By default, NULL, the total length of the timeseries will be used.} + +\item{method}{a character string indicating the method to be used: 'PTF','DIST','RQUANT','QUANT','SSPLIN'. By default, the empirical quantile mapping 'QUANT' is used.} + +\item{ncores}{an integer indicating the number of parallel processes to spawn for the use for parallel computation in multiple cores.} + +\item{...}{additional arguments passed to the method specified by \code{method}.} +} +\value{ +an oject of class \code{s2dv_cube} containing the experimental data after applyingthe quantile mapping correction. +) <- c(dataset = 1, member = 10, sdate = 20, ftime = 60 , +} +\description{ +This function is a wrapper from fitQmap and doQmap from package 'qmap'to be applied in CSTools objects of class 's2dv_cube'. The quantile mapping adjustment between an experiment, tipically a hindcast, and observations is applied to the experiment itself or to a provided forecast. +} +\details{ +The different methods are: +\itemize{ +\item{'PTF'} {fits a parametric transformations to the quantile-quantile relation of observed and modelled values. See \code{?qmap::fitQmapPTF}.} +\item{'DIST'} {fits a theoretical distribution to observed and to modelled time series. See \code{?qmap::fitQmapDIST}.} +\item{'RQUANT'} {estimates the values of the quantile-quantile relation of observed and modelled time series for regularly spaced quantiles using local linear least square regression. See \code{?qmap::fitQmapRQUANT}.} +\item{'QUANT'} {estimates values of the empirical cumulative distribution function of observed and modelled time series for regularly spaced quantiles. See \code{?qmap::fitQmapQUANT}.} +\item{'SSPLIN'} {fits a smoothing spline to the quantile-quantile plot of observed and modelled time series. See \code{?qmap::fitQmapSSPLIN}.}} +All methods accepts some common arguments: +\itemize{ +\item{wet.day} {logical indicating whether to perform wet day correction or not.(Not available in 'DIS' method)} +\item{qstep} {NULL or a numeric value between 0 and 1.}} +} +\seealso{ +\code{qmap::fitQmap} and \code{qmap::doQmap} +} +\author{ +Nuria Perez-Zanon, \email{nuria.perez@bsc.es} +} diff --git a/tests/testthat/test-CST_QuantileMapping.R b/tests/testthat/test-CST_QuantileMapping.R index a03680f7243b5fe0f82bf112371c0dcb5090dbc9..f8482967627a30bbdc7873200754b920df85d58a 100644 --- a/tests/testthat/test-CST_QuantileMapping.R +++ b/tests/testthat/test-CST_QuantileMapping.R @@ -1,6 +1,5 @@ context("Generic tests") test_that("Sanity checks", { -library(qmap) expect_error( CST_QuantileMapping(exp = 1), paste0("Parameter 'exp' and 'obs' must be of the class 's2dv_cube', ", @@ -90,8 +89,25 @@ library(qmap) expect_equal(CST_QuantileMapping(exp = exp, obs = obs, sample_dims = 'time', method = 'SSPLIN'), exp) library(CSTools) - expect_equal(CST_QuantileMapping(exp = lonlat_data$exp, obs = lonlat_data$obs), - CST_QuantileMapping(exp = lonlat_data$exp, obs = lonlat_data$obs, - exp_cor = lonlat_data$exp)) + expect_error(CST_QuantileMapping(exp = lonlat_data$exp, obs = lonlat_data$obs, + exp_cor = lonlat_data$exp), + paste0("Review parameter 'sample_dims' or the data dimensions ", + "since multiple dimensions with different length have being ", + "found in the data inputs that don't match with 'sample_dims' parameter.")) + exp <- lonlat_data$exp + exp$data <- exp$data[,,1:4,,,] + dim(exp$data) <- c(dataset = 1, member = 15, sdate = 4, ftime = 3, + lat = 22, lon = 53) + obs <- lonlat_data$obs + obs$data <- obs$data[,,1:4, ,,] + dim(obs$data) <- c(dataset = 1, member = 1, sdate = 4, ftime = 3, + lat = 22, lon = 53) + exp_cor <- lonlat_data$exp + exp_cor$data <- exp_cor$data[,,5:6,,,] + dim(exp_cor$data) <- c(dataset = 1, member = 15, sdate = 2, ftime = 3, + lat = 22, lon = 53) + expect_warning(CST_QuantileMapping(exp, obs, exp_cor, + sample_dims = c('sdate', 'ftime', 'member')), + "The sample_dims sdate are not used when applying the correction to 'exp_cor'") })