Commits (23)
......@@ -84,4 +84,4 @@ VignetteBuilder: knitr
License: Apache License 2.0
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.0.2
RoxygenNote: 7.0.1
......@@ -12,12 +12,14 @@ export(CST_BEI_Weighting)
export(CST_BiasCorrection)
export(CST_Calibration)
export(CST_CategoricalEnsCombination)
export(CST_DynBiasCorrection)
export(CST_EnsClustering)
export(CST_Load)
export(CST_MergeDims)
export(CST_MultiEOF)
export(CST_MultiMetric)
export(CST_MultivarRMSE)
export(CST_ProxiesAttractor)
export(CST_QuantileMapping)
export(CST_RFSlope)
export(CST_RFTemp)
......@@ -29,6 +31,7 @@ export(CST_SplitDim)
export(CST_WeatherRegimes)
export(Calibration)
export(CategoricalEnsCombination)
export(DynBiasCorrection)
export(EnsClustering)
export(MergeDims)
export(MultiEOF)
......@@ -38,6 +41,8 @@ export(PlotForecastPDF)
export(PlotMostLikelyQuantileMap)
export(PlotPDFsOLE)
export(PlotTriangles4Categories)
export(Predictability)
export(ProxiesAttractor)
export(QuantileMapping)
export(RFSlope)
export(RFTemp)
......@@ -115,4 +120,4 @@ importFrom(utils,head)
importFrom(utils,read.table)
importFrom(utils,tail)
importFrom(verification,verify)
useDynLib(CSTools)
useDynLib(CSTools, .registration = TRUE)
......@@ -3,6 +3,8 @@
**Submission date to CRAN: XX-06-2021**
- New features:
+ Dynamical Bias Correction method: `CST_ProxiesAttractors` and `CST_DynBiasCorrection`
(optionally `Predictability`)
+ CST_BiasCorrection and BiasCorrection allows to calibrate a forecast given the calibration in the hindcast by using parameter 'exp_cor'.
- Fixes:
......
#'@rdname CST_DynBiasCorrection
#'@title Performing a Bias Correction conditioned by the dynamical
#'properties of the data.
#'
#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it}
#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
#'
#'@description This function perform a bias correction conditioned by the
#'dynamical properties of the dataset. This function internally uses the functions
#''Predictability' to divide in terciles the two dynamical proxies
#'computed with 'CST_ProxiesAttractor'. A bias correction
#'between the model and the observations is performed using the division into
#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'.
#'For instance, model values with lower 'dim' will be corrected with observed
#'values with lower 'dim', and the same for theta. The function gives two options
#'of bias correction: one for 'dim' and/or one for 'theta'
#'
#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
#'scale atmospheric predictability.Nature Communications, 10(1), 1316.
#'DOI = https://doi.org/10.1038/s41467-019-09305-8 "
#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
#' Dynamical proxies of North Atlantic predictability and extremes.
#' Scientific Reports, 7-41278, 2017.
#'
#'@param exp an s2v_cube object with the experiment data
#'@param obs an s2dv_cube object with the reference data
#'@param method a character string indicating the method to apply bias
#'correction among these ones: "PTF","RQUANT","QUANT","SSPLIN"
#'@param wetday logical indicating whether to perform wet day correction
#'or not OR a numeric threshold below which all values are set to zero (by
#'default is set to 'FALSE').
#'@param proxy a character string indicating the proxy for local dimension
#' 'dim' or inverse of persistence 'theta' to apply the dynamical
#' conditioned bias correction method.
#'@param quanti a number lower than 1 indicating the quantile to perform
#'the computation of local dimension and theta
#'@param ncores The number of cores to use in parallel computation
#'
#'@return dynbias an s2dvcube object with a bias correction performed
#'conditioned by local dimension 'dim' or inverse of persistence 'theta'
#'
#'@examples
#'# example 1: simple data s2dvcube style
#' set.seed(1)
#' expL <- rnorm(1:2000)
#' dim (expL) <- c(time =100,lat = 4, lon = 5)
#' obsL <- c(rnorm(1:1980),expL[1,,]*1.2)
#' dim (obsL) <- c(time = 100,lat = 4, lon = 5)
#' time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-")
#' time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-")
#' lon <- seq(-1,5,1.5)
#' lat <- seq(30,35,1.5)
#' # qm=0.98 # too high for this short dataset, it is possible that doesn't
#' # get the requirement, in that case it would be necessary select a lower qm
#' # for instance qm=0.60
#' expL <- s2dv_cube(data = expL, lat = lat, lon = lon,
#' Dates = list(start = time_expL, end = time_expL))
#' obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon,
#' Dates = list(start = time_obsL, end = time_obsL))
#' # to use DynBiasCorrection
#' dynbias1 <- DynBiasCorrection(exp = expL$data, obs = obsL$data, proxy= "dim",
#' quanti = 0.6)
#' # to use CST_DynBiasCorrection
#' dynbias2 <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim",
#' quanti = 0.6)
#'
#'@export
CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', wetday=FALSE,
proxy = "dim", quanti,
ncores = NULL) {
if (!inherits(obs, 's2dv_cube')) {
stop("Parameter 'obs' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
}
if (!inherits(exp, 's2dv_cube')) {
stop("Parameter 'exp' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
}
exp$data <- DynBiasCorrection(exp = exp$data, obs = obs$data, method = method,
wetday=wetday,
proxy = proxy, quanti = quanti, ncores = ncores)
return(exp)
}
#'@rdname DynBiasCorrection
#'@title Performing a Bias Correction conditioned by the dynamical
#'properties of the data.
#'
#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it}
#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
#'
#'@description This function perform a bias correction conditioned by the
#'dynamical properties of the dataset. This function used the functions
#''CST_Predictability' to divide in terciles the two dynamical proxies
#'computed with 'CST_ProxiesAttractor'. A bias correction
#'between the model and the observations is performed using the division into
#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'.
#'For instance, model values with lower 'dim' will be corrected with observed
#'values with lower 'dim', and the same for theta. The function gives two options
#'of bias correction: one for 'dim' and/or one for 'theta'
#'
#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
#'scale atmospheric predictability.Nature Communications, 10(1), 1316.
#'DOI = https://doi.org/10.1038/s41467-019-09305-8 "
#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
#' Dynamical proxies of North Atlantic predictability and extremes.
#' Scientific Reports, 7-41278, 2017.
#'
#'@param exp a multidimensional array with named dimensions with the
#'experiment data
#'@param obs a multidimensional array with named dimensions with the
#'observation data
#'@param method a character string indicating the method to apply bias
#'correction among these ones:
#'"PTF","RQUANT","QUANT","SSPLIN"
#'@param wetday logical indicating whether to perform wet day correction
#'or not OR a numeric threshold below which all values are set to zero (by
#'default is set to 'FALSE').
#'@param proxy a character string indicating the proxy for local dimension
#''dim' or inverse of persistence 'theta' to apply the dynamical conditioned
#'bias correction method.
#'@param quanti a number lower than 1 indicating the quantile to perform the
#'computation of local dimension and theta
#'@param ncores The number of cores to use in parallel computation
#'
#'@return a multidimensional array with named dimensions with a bias correction
#'performed conditioned by local dimension 'dim' or inverse of persistence 'theta'
#'
#'@import multiApply
#'@importFrom s2dverification Subset
#'@import qmap
#'@examples
#'expL <- rnorm(1:2000)
#'dim (expL) <- c(time =100,lat = 4, lon = 5)
#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2)
#'dim (obsL) <- c(time = 100,lat = 4, lon = 5)
#'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT',
#' proxy= "dim", quanti = 0.6)
#'@export
DynBiasCorrection<- function(exp, obs, method = 'QUANT',wetday=FALSE,
proxy = "dim", quanti, ncores = NULL){
if (is.null(obs)) {
stop("Parameter 'obs' cannot be NULL.")
}
if (is.null(exp)) {
stop("Parameter 'exp' cannot be NULL.")
}
if (is.null(method)) {
stop("Parameter 'method' cannot be NULL.")
}
if (is.null(quanti)) {
stop("Parameter 'quanti' cannot be NULL.")
}
if (is.null(proxy)) {
stop("Parameter 'proxy' cannot be NULL.")
}
dims <- dim(exp)
attractor.obs <- ProxiesAttractor(data = obs, quanti = quanti)
predyn.obs <- Predictability(dim = attractor.obs$dim,
theta = attractor.obs$theta)
attractor.exp <- ProxiesAttractor(data = exp, quanti = quanti)
predyn.exp <- Predictability(dim = attractor.exp$dim,
theta = attractor.exp$theta)
if (!(any(names(dim(exp)) %in% 'time'))){
if (any(names(dim(exp)) %in% 'sdate')) {
if (any(names(dim(exp)) %in% 'ftime')) {
exp <- MergeDims(exp, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
}
if (!(any(names(dim(obs)) %in% 'time'))){
if (any(names(dim(obs)) %in% 'sdate')) {
if (any(names(dim(obs)) %in% 'ftime')) {
obs <- MergeDims(obs, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
}
dim_exp <- dim(exp)
names_to_check <- names(dim_exp)[which(names(dim_exp) %in%
c('time', 'lat', 'lon', 'sdate') == FALSE)]
if (length(names_to_check) > 0) {
dim_obs <- dim(obs)
if (any(names(dim_obs) %in% names_to_check)) {
if (any(dim_obs[which(names(dim_obs) %in% names_to_check)] !=
dim_exp[which(names(dim_exp) %in% names_to_check)])) {
for (i in names_to_check) {
pos <- which(names(dim_obs) == i)
names(dim(obs))[pos] <- ifelse(dim_obs[pos] !=
dim_exp[which(names(dim_exp) == i)],
paste0('obs_', names(dim_obs[pos])),
names(dim(obs)[pos]))
}
warning("Common dimension names with different length are renamed.")
}
}
}
if (proxy == "dim") {
adjusted <- Apply(list(exp, obs), target_dims = 'time',
fun = .dynbias, method, wetday,
predyn.exp = predyn.exp$pred.dim$pos.d,
predyn.obs = predyn.obs$pred.dim$pos.d,
ncores = ncores, output_dims = 'time')$output1
} else if (proxy == "theta") {
adjusted <- Apply(list(exp, obs), target_dims = 'time',
fun = .dynbias, method, wetday,
predyn.exp = predyn.exp$pred.theta$pos.t,
predyn.obs = predyn.obs$pred.theta$pos.t,
ncores = ncores, output_dims = 'time')$output1
} else {
stop ("Parameter 'proxy' must be set as 'dim' or 'theta'.")
}
if(any(names(dim(adjusted)) %in% 'memberObs')){
if(dim(adjusted)['memberObs'] == 1){
adjusted <- Subset(adjusted,along='memberObs',indices=1,drop = 'selected')
}else{
print('Dimension member in obs changed to memberObs')
}
}
if(any(names(dim(adjusted)) %in% 'datasetObs')){
if(dim(adjusted)['datasetObs'] == 1){
adjusted <- Subset(adjusted,along='datasetObs',indices=1,drop = 'selected')
}else{
print('Dimension dataset in obs changed to datasetObs')
}
}
return(adjusted)
}
.dynbias <- function(exp, obs, method, wetday,predyn.exp, predyn.obs) {
result <- array(rep(NA, length(exp)))
res <- lapply(1:3, function(x) {
exp_sub <- exp[predyn.exp[[x]]]
obs_sub <- obs[predyn.obs[[x]]]
adjust <- .qbiascorrection(exp_sub, obs_sub, method,wetday)
result[predyn.exp[[x]]] <<- adjust
return(NULL)
})
return(result)
}
.qbiascorrection <- function(expX, obsX, method,wetday) {
## functions fitQmap and doQmap
if (method == "PTF") {
qm.fit <- fitQmap(obsX, expX, method = "PTF", transfun = "expasympt",
cost = "RSS", wet.day = wetday)
qmap <- doQmap(expX, qm.fit)
} else if (method == "QUANT") {
qm.fit <- fitQmap(obsX, expX, method = "QUANT", qstep = 0.01, wet.day = wetday)
qmap <- doQmap(expX, qm.fit, type = "tricub")
} else if (method == "RQUANT") {
qm.fit <- fitQmap(obsX, expX, method = "RQUANT", qstep = 0.01,wet.day = wetday)
qmap <- doQmap(expX, qm.fit, type = "linear")
} else if (method == "SSPLIN") {
qm.fit <- fitQmap(obsX, expX, qstep = 0.01, method = "SSPLIN",wet.day = wetday)
qmap <- doQmap(expX, qm.fit)
} else {
stop ("Parameter 'method' doesn't match any of the available methods.")
}
return(qmap)
}
#'@rdname CST_ProxiesAttractor
#'@title Computing two dinamical proxies of the attractor in s2dv_cube.
#'
#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it}
#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
#'
#'@description This function computes two dinamical proxies of the attractor:
#'The local dimension (d) and the inverse of the persistence (theta) for an
#''s2dv_cube' object.
#'These two parameters will be used as a condition for the computation of dynamical
#'scores to measure predictability and to compute bias correction conditioned by
#'the dynamics with the function DynBiasCorrection
#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in
#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019).
#' The hammam effect or how a warm ocean enhances large scale atmospheric predictability.
#' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 "
#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
#' Dynamical proxies of North Atlantic predictability and extremes.
#' Scientific Reports, 7-41278, 2017.
#'
#'@param data a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow
#'and the grids in ncol(dat(time,grids)
#
#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta
#'
#'@param ncores The number of cores to use in parallel computation
#'
#'@return dim and theta
#'
#'@examples
#'# Example 1: Computing the attractor using simple s2dv data
#'attractor <- CST_ProxiesAttractor(data = lonlat_data$obs, quanti = 0.6)
#'
#'@export
CST_ProxiesAttractor <- function(data, quanti, ncores = NULL){
if (!inherits(data, 's2dv_cube')) {
stop("Parameter 'data' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
}
if (is.null(quanti)) {
stop("Parameter 'quanti' cannot be NULL.")
}
data$data <- ProxiesAttractor(data = data$data, quanti = quanti, ncores = ncores)
return(data)
}
#'@rdname ProxiesAttractor
#'
#'@title Computing two dinamical proxies of the attractor.
#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it}
#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
#'
#'@description This function computes two dinamical proxies of the attractor:
#'The local dimension (d) and the inverse of the persistence (theta).
#'These two parameters will be used as a condition for the computation of dynamical
#'scores to measure predictability and to compute bias correction conditioned by
#'the dynamics with the function DynBiasCorrection.
#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in:
#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019).
#' The hammam effect or how a warm ocean enhances large scale atmospheric predictability.
#' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 "
#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
#' Dynamical proxies of North Atlantic predictability and extremes.
#' Scientific Reports, 7-41278, 2017.
#'
#'@param data a multidimensional array with named dimensions to create the attractor. It requires a temporal dimension named 'time' and spatial dimensions called 'lat' and 'lon', or 'latitude' and 'longitude' or 'grid'.
#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta
#'@param ncores The number of cores to use in parallel computation.
#'
#'@return dim and theta
#'
#'@import multiApply
#'
#'@examples
#'# Example 1: Computing the attractor using simple data
#'# Creating an example of matrix data(time,grids):
#'mat <- array(rnorm(36 * 40), c(time = 36, grid = 40))
#'qm <- 0.90 # imposing a threshold
#'Attractor <- ProxiesAttractor(data = mat, quanti = qm)
#'# to plot the result
#'time = c(1:length(Attractor$theta))
#'layout(matrix(c(1, 3, 2, 3), 2, 2))
#'plot(time, Attractor$dim, xlab = 'time', ylab = 'd',
#' main = 'local dimension', type = 'l')
#'plot(time, Attractor$theta, xlab = 'time', ylab = 'theta', main = 'theta')
#'plot(Attractor$dim, Attractor$theta, col = 'blue',
#' main = "Proxies of the Attractor",
#' xlab = "local dimension", ylab = "theta", lwd = 8, 'p')
#'
#'@export
ProxiesAttractor <- function(data, quanti, ncores = NULL){
if (is.null(data)) {
stop("Parameter 'data' cannot be NULL.")
}
if (is.null(quanti)) {
stop("Parameter 'quanti' is mandatory")
}
if (any(names(dim(data)) %in% 'sdate')) {
if (any(names(dim(data)) %in% 'ftime')) {
data <- MergeDims(data, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
if (!(any(names(dim(data)) %in% 'time'))){
stop("Parameter 'data' must have a temporal dimension named 'time'.")
}
if (any(names(dim(data)) %in% 'lat')) {
if (any(names(dim(data)) %in% 'lon')) {
data <- MergeDims(data, merge_dims = c('lon', 'lat'),
rename_dim = 'grid')
}
}
if (any(names(dim(data)) %in% 'latitude')) {
if (any(names(dim(data)) %in% 'longitude')) {
data <- MergeDims(data, merge_dims = c('longitude', 'latitude'),
rename_dim = 'grid')
}
}
if(!(any(names(dim(data)) %in% 'grid'))){
stop("Parameter 'data' must have a spatial dimension named 'grid'.")
}
attractor <- Apply(data, target_dims = c('time', 'grid'),
fun = .proxiesattractor,
quanti = quanti , ncores = ncores)
# rename dimensions
attractor <- lapply(attractor,
FUN = function(x, dimname){
names(dim(x))[dimname] <- 'time'
return(x)},
dimname = which(names(dim(attractor[[1]])) == 'dim2'))
return(list(dim = attractor$dim, theta = attractor$theta))
}
.proxiesattractor <- function(data, quanti) {
# expected dimensions data: time and grid
logdista <- Apply(data, target_dims = 'grid',
fun = function(x, y){
-log(colMeans((y - as.vector(x))^2))},
y = t(data))[[1]]
#Computation of theta
Theta <- function(logdista, quanti){
#Compute the thheshold corresponding to the quantile
thresh <- quantile(logdista, quanti, na.rm = TRUE)
logdista[which(logdista == 'Inf')] <- NaN
Li <- which(as.vector(logdista) > as.numeric(thresh))
#Length of each cluster
Ti <- diff(Li)
N <- length(Ti)
q <- 1 - quanti
Si <- Ti - 1
Nc <- length(which(Si > 0))
N <- length(Ti)
theta <- (sum(q * Si) + N + Nc - sqrt(((sum(q * Si) + N + Nc)^2) -
8 * Nc * sum(q * Si))) / (2 * sum(q * Si))
#Sort the exceedances
logdista <- sort(logdista)
#Find all the Peaks over Thresholds.
findidx <- which(as.vector(logdista) > as.numeric(thresh))
if(length(findidx) < 1) {
stop("Parameter 'quanti' is too high for the length of the data provided.")
}
logextr <- logdista[findidx[[1]]:(length(logdista) - 1)]
#The inverse of the dimension is just the average of the exceedances
dim <- 1 /mean(as.numeric(logextr) - as.numeric(thresh))
return(list(dim = dim, theta = theta))
}
names(dim(logdista)) <- c('dim1', 'dim2')
proxies <- Apply(data = list(logdista = logdista),
target_dims = list('dim1'), fun = Theta, quanti = quanti)
return(list(dim = proxies$dim, theta = proxies$theta))
}
#'@rdname Predictability
#'@title Computing scores of predictability using two dynamical proxies
#'based on dynamical systems theory.
#'
#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it}
#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
#'
#'@description This function divides in terciles the two dynamical proxies
#'computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will
#'be used to measure the predictability of the system in dyn_scores. When the
#'local dimension 'dim' is small and the inverse of persistence 'theta' is
#'small the predictability is high, and viceversa.
#'
#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
#'scale atmospheric predictability.Nature Communications, 10(1), 1316.
#'DOI = https://doi.org/10.1038/s41467-019-09305-8 "
#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
#' Dynamical proxies of North Atlantic predictability and extremes.
#' Scientific Reports, 7-41278, 2017.
#'
#'@param dim An array of N named dimensions containing the local dimension as
#'the output of CST_ProxiesAttractor or ProxiesAttractor.
#
#'@param theta An array of N named dimensions containing the inverse of the
#'persistence 'theta' as the output of CST_ProxiesAttractor or ProxiesAttractor.
#'
#'@param ncores The number of cores to use in parallel computation
#'
#'@return A list of length 2:
#' \itemize{
#' \item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list
#'contains values of local dimension 'dim' divided by terciles:
#'d1: lower tercile (high predictability),
#'d2: middle tercile,
#'d3: higher tercile (low predictability)
#'The 'pos.d' list contains the position of each tercile in parameter 'dim'}
#'
#' \item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'.
#'The 'qtheta' list contains values of the inverse of persistence 'theta'
#'divided by terciles:
#'th1: lower tercile (high predictability),
#'th2: middle tercile,
#'th3: higher tercile (low predictability)
#'The 'pos.t' list contains the position of each tercile in parameter 'theta'}
#'}
#'@return dyn_scores values from 0 to 1. A dyn_score of 1 indicates the highest
#'predictability.
#'
#'@examples
#'# Creating an example of matrix dat(time,grids):
#'m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40)
#'names(dim(m)) <- c('time', 'grid')
#'# imposing a threshold
#' quanti <- 0.90
#'# computing dyn_scores from parameters dim and theta of the attractor
#' attractor <- ProxiesAttractor(dat = m, quanti = 0.60)
#' predyn <- Predictability(dim = attractor$dim, theta = attractor$theta)
#'@export
#'
Predictability<- function(dim, theta, ncores = NULL) {
# if (!inherits(dim, 's2dv_cube')) {
# stop("Parameter 'dim' must be of the class 's2dv_cube', ",
# "as output by CSTools::CST_Load.")
# }
# if (!inherits(theta, 's2dv_cube')) {
# stop("Parameter 'theta' must be of the class 's2dv_cube', ",
# "as output by CSTools::CST_Load.")
# }
if (any(names(dim(dim)) %in% 'sdate')) {
if (any(names(dim(dim)) %in% 'ftime')) {
dim <- MergeDims(dim, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
if (!(any(names(dim(dim)) %in% 'time'))){
stop("Parameter 'dim' must have a temporal dimension named 'time'.")
}
if (any(names(dim(theta)) %in% 'sdate')) {
if (any(names(dim(theta)) %in% 'ftime')) {
theta <- MergeDims(theta, merge_dims = c('ftime', 'sdate'),
rename_dim = 'time')
}
}
if (!(any(names(dim(theta)) %in% 'time'))){
stop("Parameter 'data' must have a temporal dimension named 'time'.")
}
pred <- Apply(list(dim, theta), target_dims = 'time',
fun = .predictability,
ncores = ncores)
dim(pred$dyn_scores) <- dim(theta)
return(list(pred.dim = list(qdim = list(pred$qdim.d1,pred$qdim.d2,pred$qdim.d3),
pos.d = list(pred$pos.d1,pred$pos.d2,pred$pos.d3)),
pred.theta = list(qtheta = list(pred$qtheta.th1,pred$qtheta.th2,pred$qtheta.th3),
pos.t = list(pred$pos.th1,pred$pos.th2,pred$pos.th3)),
dyn_scores = pred$dyn_scores))
}
.predictability <- function(dim, theta) {
if (is.null(dim)) {
stop("Parameter 'dim' cannot be NULL.")
}
if (is.null(theta)) {
stop("Parameter 'theta' cannot be NULL.")
}
if (length(dim) != length(theta)) {
stop("Parameters 'dim' and 'theta' must have the same length")
}
pos <- c(1:length(dim))
# dim
qd1 <- quantile(dim, 0.33, na.rm = TRUE)
qd3 <- quantile(dim, 0.66, na.rm = TRUE)
d3 <- which(dim >= qd3)
d1 <- which(dim <= qd1)
d2 <- which(dim > qd1 & dim < qd3)
qdim <- list(d1 = dim[d1], d2 = dim[d2], d3 = dim[d3])
pos.d <- list(d1=d1,d2=d2,d3=d3)
#theta
qt1 <- quantile(theta, 0.33, na.rm = TRUE)
qt3 <- quantile(theta, 0.66, na.rm = TRUE)
th3 <- which(theta >= qt3)
th1 <- which(theta <= qt1)
th2 <- which(theta > qt1 & theta < qt3)
qtheta <- list(th1 = theta[th1], th2 = theta[th2], th3 = theta[th3])
pos.t <- list(th1 = th1, th2 = th2, th3 = th3)
#scores position
d1th1 <- pos.d$d1[which(pos.d$d1 %in% pos.t$th1)]
d1th2 <- pos.d$d1[which(pos.d$d1 %in% pos.t$th2)]
d2th1 <- pos.d$d2[which(pos.d$d2 %in% pos.t$th1)]
d1th3 <- pos.d$d1[which(pos.d$d1 %in% pos.t$th3)]
d3th1 <- pos.d$d3[which(pos.d$d3 %in% pos.t$th1)]
d2th2 <- pos.d$d2[which(pos.d$d2 %in% pos.t$th2)]
d2th3 <- pos.d$d2[which(pos.d$d2 %in% pos.t$th3)]
d3th2 <- pos.d$d3[which(pos.d$d3 %in% pos.t$th2)]
d3th3 <- pos.d$d3[which(pos.d$d3 %in% pos.t$th3)]
#scores values
dyn_scores <- c(1:length(dim))
dyn_scores[which(pos %in% d1th1)]<- 9/9
dyn_scores[which(pos %in% d1th2)]<- 8/9
dyn_scores[which(pos %in% d2th1)]<- 7/9
dyn_scores[which(pos %in% d1th3)]<- 6/9
dyn_scores[which(pos %in% d3th1)]<- 5/9
dyn_scores[which(pos %in% d2th2)]<- 4/9
dyn_scores[which(pos %in% d2th3)]<- 3/9
dyn_scores[which(pos %in% d3th2)]<- 2/9
dyn_scores[which(pos %in% d3th3)]<- 1/9
return(list(qdim.d1 = dim[d1], qdim.d2 = dim[d2], qdim.d3 = dim[d3],
pos.d1 = d1, pos.d2 = d2, pos.d3 =d3,
qtheta.th1 = theta[th1], qtheta.th2 = theta[th2], qtheta.th3 = theta[th3],
pos.th1 = th1, pos.th2 = th2, pos.th3 = th3,
dyn_scores = dyn_scores))
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/CST_DynBiasCorrection.R
\name{CST_DynBiasCorrection}
\alias{CST_DynBiasCorrection}
\title{Performing a Bias Correction conditioned by the dynamical
properties of the data.}
\usage{
CST_DynBiasCorrection(
exp,
obs,
method = "QUANT",
wetday = FALSE,
proxy = "dim",
quanti,
ncores = NULL
)
}
\arguments{
\item{exp}{an s2v_cube object with the experiment data}
\item{obs}{an s2dv_cube object with the reference data}
\item{method}{a character string indicating the method to apply bias
correction among these ones: "PTF","RQUANT","QUANT","SSPLIN"}
\item{wetday}{logical indicating whether to perform wet day correction
or not OR a numeric threshold below which all values are set to zero (by
default is set to 'FALSE').}
\item{proxy}{a character string indicating the proxy for local dimension
'dim' or inverse of persistence 'theta' to apply the dynamical
conditioned bias correction method.}
\item{quanti}{a number lower than 1 indicating the quantile to perform
the computation of local dimension and theta}
\item{ncores}{The number of cores to use in parallel computation}
}
\value{
dynbias an s2dvcube object with a bias correction performed
conditioned by local dimension 'dim' or inverse of persistence 'theta'
}
\description{
This function perform a bias correction conditioned by the
dynamical properties of the dataset. This function internally uses the functions
'Predictability' to divide in terciles the two dynamical proxies
computed with 'CST_ProxiesAttractor'. A bias correction
between the model and the observations is performed using the division into
terciles of the local dimension 'dim' and inverse of the persistence 'theta'.
For instance, model values with lower 'dim' will be corrected with observed
values with lower 'dim', and the same for theta. The function gives two options
of bias correction: one for 'dim' and/or one for 'theta'
}
\examples{
# example 1: simple data s2dvcube style
set.seed(1)
expL <- rnorm(1:2000)
dim (expL) <- c(time =100,lat = 4, lon = 5)
obsL <- c(rnorm(1:1980),expL[1,,]*1.2)
dim (obsL) <- c(time = 100,lat = 4, lon = 5)
time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-")
time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-")
lon <- seq(-1,5,1.5)
lat <- seq(30,35,1.5)
# qm=0.98 # too high for this short dataset, it is possible that doesn't
# get the requirement, in that case it would be necessary select a lower qm
# for instance qm=0.60
expL <- s2dv_cube(data = expL, lat = lat, lon = lon,
Dates = list(start = time_expL, end = time_expL))
obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon,
Dates = list(start = time_obsL, end = time_obsL))
# to use DynBiasCorrection
dynbias1 <- DynBiasCorrection(exp = expL$data, obs = obsL$data, proxy= "dim",
quanti = 0.6)
# to use CST_DynBiasCorrection
dynbias2 <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim",
quanti = 0.6)
}
\references{
Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
scale atmospheric predictability.Nature Communications, 10(1), 1316.
DOI = https://doi.org/10.1038/s41467-019-09305-8 "
Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
Dynamical proxies of North Atlantic predictability and extremes.
Scientific Reports, 7-41278, 2017.
}
\author{
Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
Veronica Torralba, \email{veronica.torralba@cmcc.it}
Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/CST_ProxiesAttractor.R
\name{CST_ProxiesAttractor}
\alias{CST_ProxiesAttractor}
\title{Computing two dinamical proxies of the attractor in s2dv_cube.}
\usage{
CST_ProxiesAttractor(data, quanti, ncores = NULL)
}
\arguments{
\item{data}{a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow
and the grids in ncol(dat(time,grids)}
\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta}
\item{ncores}{The number of cores to use in parallel computation}
}
\value{
dim and theta
}
\description{
This function computes two dinamical proxies of the attractor:
The local dimension (d) and the inverse of the persistence (theta) for an
's2dv_cube' object.
These two parameters will be used as a condition for the computation of dynamical
scores to measure predictability and to compute bias correction conditioned by
the dynamics with the function DynBiasCorrection
Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in
}
\examples{
# Example 1: Computing the attractor using simple s2dv data
attractor <- CST_ProxiesAttractor(data = lonlat_data$obs, quanti = 0.6)
}
\references{
Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019).
The hammam effect or how a warm ocean enhances large scale atmospheric predictability.
Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 "
Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
Dynamical proxies of North Atlantic predictability and extremes.
Scientific Reports, 7-41278, 2017.
}
\author{
Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
Veronica Torralba, \email{veronica.torralba@cmcc.it}
Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/CST_DynBiasCorrection.R
\name{DynBiasCorrection}
\alias{DynBiasCorrection}
\title{Performing a Bias Correction conditioned by the dynamical
properties of the data.}
\usage{
DynBiasCorrection(
exp,
obs,
method = "QUANT",
wetday = FALSE,
proxy = "dim",
quanti,
ncores = NULL
)
}
\arguments{
\item{exp}{a multidimensional array with named dimensions with the
experiment data}
\item{obs}{a multidimensional array with named dimensions with the
observation data}
\item{method}{a character string indicating the method to apply bias
correction among these ones:
"PTF","RQUANT","QUANT","SSPLIN"}
\item{wetday}{logical indicating whether to perform wet day correction
or not OR a numeric threshold below which all values are set to zero (by
default is set to 'FALSE').}
\item{proxy}{a character string indicating the proxy for local dimension
'dim' or inverse of persistence 'theta' to apply the dynamical conditioned
bias correction method.}
\item{quanti}{a number lower than 1 indicating the quantile to perform the
computation of local dimension and theta}
\item{ncores}{The number of cores to use in parallel computation}
}
\value{
a multidimensional array with named dimensions with a bias correction
performed conditioned by local dimension 'dim' or inverse of persistence 'theta'
}
\description{
This function perform a bias correction conditioned by the
dynamical properties of the dataset. This function used the functions
'CST_Predictability' to divide in terciles the two dynamical proxies
computed with 'CST_ProxiesAttractor'. A bias correction
between the model and the observations is performed using the division into
terciles of the local dimension 'dim' and inverse of the persistence 'theta'.
For instance, model values with lower 'dim' will be corrected with observed
values with lower 'dim', and the same for theta. The function gives two options
of bias correction: one for 'dim' and/or one for 'theta'
}
\examples{
expL <- rnorm(1:2000)
dim (expL) <- c(time =100,lat = 4, lon = 5)
obsL <- c(rnorm(1:1980),expL[1,,]*1.2)
dim (obsL) <- c(time = 100,lat = 4, lon = 5)
dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT',
proxy= "dim", quanti = 0.6)
}
\references{
Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
scale atmospheric predictability.Nature Communications, 10(1), 1316.
DOI = https://doi.org/10.1038/s41467-019-09305-8 "
Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
Dynamical proxies of North Atlantic predictability and extremes.
Scientific Reports, 7-41278, 2017.
}
\author{
Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
Veronica Torralba, \email{veronica.torralba@cmcc.it}
Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/Predictability.R
\name{Predictability}
\alias{Predictability}
\title{Computing scores of predictability using two dynamical proxies
based on dynamical systems theory.}
\usage{
Predictability(dim, theta, ncores = NULL)
}
\arguments{
\item{dim}{An array of N named dimensions containing the local dimension as
the output of CST_ProxiesAttractor or ProxiesAttractor.}
\item{theta}{An array of N named dimensions containing the inverse of the
persistence 'theta' as the output of CST_ProxiesAttractor or ProxiesAttractor.}
\item{ncores}{The number of cores to use in parallel computation}
}
\value{
A list of length 2:
\itemize{
\item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list
contains values of local dimension 'dim' divided by terciles:
d1: lower tercile (high predictability),
d2: middle tercile,
d3: higher tercile (low predictability)
The 'pos.d' list contains the position of each tercile in parameter 'dim'}
\item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'.
The 'qtheta' list contains values of the inverse of persistence 'theta'
divided by terciles:
th1: lower tercile (high predictability),
th2: middle tercile,
th3: higher tercile (low predictability)
The 'pos.t' list contains the position of each tercile in parameter 'theta'}
}
dyn_scores values from 0 to 1. A dyn_score of 1 indicates the highest
predictability.
}
\description{
This function divides in terciles the two dynamical proxies
computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will
be used to measure the predictability of the system in dyn_scores. When the
local dimension 'dim' is small and the inverse of persistence 'theta' is
small the predictability is high, and viceversa.
}
\examples{
# Creating an example of matrix dat(time,grids):
m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40)
names(dim(m)) <- c('time', 'grid')
# imposing a threshold
quanti <- 0.90
# computing dyn_scores from parameters dim and theta of the attractor
attractor <- ProxiesAttractor(dat = m, quanti = 0.60)
predyn <- Predictability(dim = attractor$dim, theta = attractor$theta)
}
\references{
Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D.,
and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large
scale atmospheric predictability.Nature Communications, 10(1), 1316.
DOI = https://doi.org/10.1038/s41467-019-09305-8 "
Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
Dynamical proxies of North Atlantic predictability and extremes.
Scientific Reports, 7-41278, 2017.
}
\author{
Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
Veronica Torralba, \email{veronica.torralba@cmcc.it}
Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
}
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/CST_ProxiesAttractor.R
\name{ProxiesAttractor}
\alias{ProxiesAttractor}
\title{Computing two dinamical proxies of the attractor.}
\usage{
ProxiesAttractor(data, quanti, ncores = NULL)
}
\arguments{
\item{data}{a multidimensional array with named dimensions to create the attractor. It requires a temporal dimension named 'time' and spatial dimensions called 'lat' and 'lon', or 'latitude' and 'longitude' or 'grid'.}
\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta}
\item{ncores}{The number of cores to use in parallel computation.}
}
\value{
dim and theta
}
\description{
This function computes two dinamical proxies of the attractor:
The local dimension (d) and the inverse of the persistence (theta).
These two parameters will be used as a condition for the computation of dynamical
scores to measure predictability and to compute bias correction conditioned by
the dynamics with the function DynBiasCorrection.
Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in:
}
\examples{
# Example 1: Computing the attractor using simple data
# Creating an example of matrix data(time,grids):
mat <- array(rnorm(36 * 40), c(time = 36, grid = 40))
qm <- 0.90 # imposing a threshold
Attractor <- ProxiesAttractor(data = mat, quanti = qm)
# to plot the result
time = c(1:length(Attractor$theta))
layout(matrix(c(1, 3, 2, 3), 2, 2))
plot(time, Attractor$dim, xlab = 'time', ylab = 'd',
main = 'local dimension', type = 'l')
plot(time, Attractor$theta, xlab = 'time', ylab = 'theta', main = 'theta')
plot(Attractor$dim, Attractor$theta, col = 'blue',
main = "Proxies of the Attractor",
xlab = "local dimension", ylab = "theta", lwd = 8, 'p')
}
\references{
Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019).
The hammam effect or how a warm ocean enhances large scale atmospheric predictability.
Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 "
Faranda, D., Gabriele Messori and Pascal Yiou. (2017).
Dynamical proxies of North Atlantic predictability and extremes.
Scientific Reports, 7-41278, 2017.
}
\author{
Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it}
Veronica Torralba, \email{veronica.torralba@cmcc.it}
Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}
}
......@@ -156,7 +156,7 @@ dateseq <- format(seq(start, end, by = "year"), "%Y%m%d")
Using the `CST_Load` function from **CSTool package**, the data available in our
data store can be loaded. The following lines show how this function can be
used. The experimental datasets are interpolated to the ERA5 grid by specifying the 'grid' parameter while ERA5 doesn't need to be interpolated. While parameter leadtimemax is set to 1 for the experimental dataset, it is set to 31 for the observations, returning the daily observations for October for the years requested in 'sdate' (2000-2006).
Download the data to run the recipe in the link https://downloads.cmcc.bo.it/d_chaves/ANALOGS/data_for_Analogs.Rdat or ask carmen.alvarez-castro at cmcc.it or nuria.perez at bsc.es.
Download the data to run the recipe under the HTTPS: downloads.cmcc.bo.it/d_chaves/ANALOGS/data_for_Analogs.Rdat or ask carmen.alvarez-castro at cmcc.it or nuria.perez at bsc.es.
```
exp <- list(name = 'ECMWF_system4_m1',
......@@ -373,4 +373,4 @@ down4 <- CST_Analogs(expL = expPSL, obsL = obsPSL, AnalogsInfo = TRUE,
In this case, the best analog is still being 7th of October, 2005.
*Note: You can compute the anomalies values before applying the criterias (as in Yiou et al, 2013) using `CST_Anomaly` of CSTools package*
\ No newline at end of file
*Note: You can compute the anomalies values before applying the criterias (as in Yiou et al, 2013) using `CST_Anomaly` of CSTools package*
......@@ -118,7 +118,7 @@ RainFARM has downscaled the original field with a realistic fine-scale correlati
The area of interest in our example presents a complex orography, but the basic RainFARM algorithm used does not consider topographic elevation in deciding how to distribute fine-scale precipitation. A long term climatology of the downscaled fields would have a resolution comparable to that of the original coarse fields and would not resemble the fine-scale structure of an observed climatology. If an external fine-scale climatology of precipitation is available, we can use the method discussed in Terzago et al. (2018) to change the distribution of precipitation by RainFARM for each timestep, so that the long-term average is close to this reference climatology in terms of precipitation distribution (while the total precipitation amount of the original fields to downscale is preserved).
Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (https://www.worldclim.org) or CHELSA (https://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://gdal.org).
Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (https://www.worldclim.org) or CHELSA (chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://gdal.org).
We will assume that a copy of the WORLDCLIM precipitation climatology at 30 arcseconds (about 1km resolution) is available in the local file `medscope.nc`. From this file we can derive suitable weights to be used with RainFARM using the `CST_RFWeights` functions as follows:
```{r}
ww <- CST_RFWeights("./worldclim.nc", nf = 20, lon = exp$lon, lat = exp$lat)
......