From 37bff7ce511dc8e5d2123e45c2393ade8fc60cfb Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Tue, 14 May 2019 14:04:59 +0200 Subject: [PATCH 01/48] New function to compute the properties of the underlying attractor. Dynamical conditions for the bias correction. --- R/ProxiesAttractor.R | 87 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 87 insertions(+) create mode 100644 R/ProxiesAttractor.R diff --git a/R/ProxiesAttractor.R b/R/ProxiesAttractor.R new file mode 100644 index 00000000..73f45227 --- /dev/null +++ b/R/ProxiesAttractor.R @@ -0,0 +1,87 @@ +#'ProxiesAttractor +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@description This function computes two dinamical proxies of the attractor: The local dimension (d) and the inverse of the persistence (theta). +#'These two parameters will be used as a condition for the bias correction function DynBiasCorrection +#'Funtion based on the matlab code (@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}) used in "Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +#'The hammam effect or how a warm ocean enhances large scale atmospheric predictability. Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#' +#'@param dat data to create the attractor. Must be a matrix with the timesteps in rows and gridpoints in columns -> dat(time,grids) +# +#'@param quanti threshold for the computation of the attractor +#' +#'@param iplot to have a quick view of the proxies and the attractor +#' +#'@return dim and theta +#' +#'@return A list of 2: +#'\itemize{ +#'\item\code{$dim} {A vector with the same length than the timesteps: nrow(dat)} +#'\item\code{$theta} {A vector with the same length than the timesteps: nrow(dat)}} +#'@references +#' \url{https://doi.org/10.1038/s41467-019-09305-8} +#' +#'@import s2dverification +#'@import fields +#'@import pracma +#' +#'@examples +#' Creating an example of matrix dat(time,grids): +tm=24*6 # time +gm=5000 # grids +m=matrix(rand(1,tm*gm),nrow=tm,ncol=gm) +qm=0.90 # imposing a threshold +#'# selecting the option to create a plot to see the attractor 'iplot=TRUE' +#' example +ExampleAttractor=ProxiesAttractor(dat=m,quanti=qm,iplot=TRUE) +#' +ProxiesAttractor <- function(dat,quanti,iplot=FALSE){ + npoints=nrow(dat) + dim=numeric(npoints) + theta=numeric(npoints) + time=seq(1:npoints) + l=1 + for (l in 1:npoints){ + #computation of the distances from the iteration l + distance=pdist2(dat[l,],dat) + #Taking the logarithm of the distance + logdista=-log(distance) + n=1 + for(n in 1:npoints){ + if(logdista[n]=="Inf"){logdista[n]=NaN} + n=n+1} + #Compute the thheshold corresponding to the quantile + thresh=quantile(logdista, quanti,na.rm=TRUE) + + #Computation of the inverse of persistence 'theta' + Li<-which(logdista>thresh) + #Length of each cluster + Ti<-diff(Li) + N=length(Ti) + q=1-quanti + Si=Ti-1 + Nc=length(which(Si>0)) + N=length(Ti) + theta[l]=(sum(q*Si)+N+Nc-sqrt(((sum(q*Si)+N+Nc)^2)-8*Nc*sum(q*Si)))/(2*sum(q*Si)) + + #computation of local dimension 'dim' + #Sort the exceedances + logdista=sort(logdista) + #Find all the Peaks over Thresholds. + findidx=which(logdista>thresh) + logextr=logdista[findidx[[1]]:(length(logdista)-1)] + #The inverse of the dimension is just the average of the exceedances + dim[l]=1/mean(logextr-thresh) + l=l+1 + if(l Date: Fri, 26 Feb 2021 17:37:25 +0100 Subject: [PATCH 02/48] fixed bug with the dimensions when method==rpc-based, apply_to==sign, cross_validation==TRUE and the correlation is not significant --- R/CST_Calibration.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/CST_Calibration.R b/R/CST_Calibration.R index 58ef7207..b9e9f034 100644 --- a/R/CST_Calibration.R +++ b/R/CST_Calibration.R @@ -72,7 +72,7 @@ CST_Calibration <- function(exp, obs, cal.method = "mse_min", #' #'@author Verónica Torralba, \email{veronica.torralba@bsc.es} #'@author Bert Van Schaeybroeck, \email{bertvs@meteo.be} -#'@description Four types of member-by-member bias correction can be performed. The \code{"bias"} method corrects the bias only, the \code{"evmos"} method applies a variance inflation technique to ensure the correction of the bias and the correspondence of variance between forecast and observation (Van Schaeybroeck and Vannitsem, 2011). The ensemble calibration methods \code{"mse_min"} and \code{"crps_min"} correct the bias, the overall forecast variance and the ensemble spread as described in Doblas-Reyes et al. (2005) and Van Schaeybroeck and Vannitsem (2015), respectively. While the \code{"mse_min"} method minimizes a constrained mean-squared error using three parameters, the \code{"crps_min"} method features four parameters and minimizes the Continuous Ranked Probability Score (CRPS). The \code{"rpc-based"} method adjusts the forecast variance ensuring that the ratio of predictable components (RPC) is equal to one, as in Eade et al. (2014). +#'@description Five types of member-by-member bias correction can be performed. The \code{"bias"} method corrects the bias only, the \code{"evmos"} method applies a variance inflation technique to ensure the correction of the bias and the correspondence of variance between forecast and observation (Van Schaeybroeck and Vannitsem, 2011). The ensemble calibration methods \code{"mse_min"} and \code{"crps_min"} correct the bias, the overall forecast variance and the ensemble spread as described in Doblas-Reyes et al. (2005) and Van Schaeybroeck and Vannitsem (2015), respectively. While the \code{"mse_min"} method minimizes a constrained mean-squared error using three parameters, the \code{"crps_min"} method features four parameters and minimizes the Continuous Ranked Probability Score (CRPS). The \code{"rpc-based"} method adjusts the forecast variance ensuring that the ratio of predictable components (RPC) is equal to one, as in Eade et al. (2014). #'@description Both in-sample or our out-of-sample (leave-one-out cross validation) calibration are possible. #'@references Doblas-Reyes F.J, Hagedorn R, Palmer T.N. The rationale behind the success of multi-model ensembles in seasonal forecasting-II calibration and combination. Tellus A. 2005;57:234-252. doi:10.1111/j.1600-0870.2005.00104.x #'@references Eade, R., Smith, D., Scaife, A., Wallace, E., Dunstone, N., Hermanson, L., & Robinson, N. (2014). Do seasonal-to-decadal climate predictions underestimate the predictability of the read world? Geophysical Research Letters, 41(15), 5620-5628. doi: 10.1002/2014GL061146 @@ -314,7 +314,7 @@ Calibration <- function(exp, obs, cal.method = "mse_min", order = names(dims.fc)) dim(var.cor.fc) <- dims.fc } else { ## no significant -> replacing with observed climatology - var.cor.fc[ , eval.dexes] <- array(data = mean(obs.tr, na.rm = na.rm), dim = dim(fc.tr)) + var.cor.fc[ , eval.dexes] <- array(data = mean(obs.tr, na.rm = na.rm), dim = dim(fc.ev)) } } else { stop("unknown calibration method: ",cal.method) -- GitLab From 5127d274e9ad3a044ba1e163ad5bfae0d329dcef Mon Sep 17 00:00:00 2001 From: lpalma Date: Tue, 2 Mar 2021 10:59:02 +0100 Subject: [PATCH 03/48] 1st working version --- R/CST_BiasCorrection.R | 41 +++++++++++++++++++++++++++++------------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/R/CST_BiasCorrection.R b/R/CST_BiasCorrection.R index 9baf897b..bda531ba 100644 --- a/R/CST_BiasCorrection.R +++ b/R/CST_BiasCorrection.R @@ -90,20 +90,37 @@ BiasCorrection <- function (exp, obs , na.rm = FALSE) { return(BiasCorrected) } -.sbc <- function(var_obs, var_exp , na.rm = FALSE) { +.sbc <- function(var_obs, var_exp ,var_cor=NULL, na.rm = FALSE) { nmembers <- dim(var_exp)['member'][] ntime <- dim(var_exp)['sdate'][] - if (all(names(dim(var_exp)) != c('member','sdate'))) { - var_exp <- t(var_exp) - } + #if (all(names(dim(var_exp)) != c('member','sdate'))) { + # var_exp <- t(var_exp) + #} corrected <- NA * var_exp - - for (t in 1 : ntime) { - # defining forecast,hindcast and observation in cross-validation - fcst <- var_exp[ , t] - hcst <- var_exp[ , -t] - obs <- var_obs[-t] + + if (is.null(var_cor)){ + for (t in 1 : ntime) { + # defining forecast,hindcast and observation in cross-validation + fcst <- var_exp[ , t] + hcst <- var_exp[ , -t] + obs <- var_obs[-t] + + # parameters + sd_obs <- sd(obs , na.rm = na.rm) + sd_exp <- sd(hcst , na.rm = na.rm) + clim_exp <- mean(hcst , na.rm = na.rm) + clim_obs <- mean(obs , na.rm = na.rm) + + # bias corrected forecast + corrected[ , t] <- ((fcst - clim_exp) * (sd_obs / sd_exp)) + clim_obs + names(dim(corrected)) <- c('member', 'sdate') + } + } else { + # defning forecast,hindcast and observation in cross-validation + fcst <- var_cor + hcst <- var_exp + obs <- var_obs # parameters sd_obs <- sd(obs , na.rm = na.rm) @@ -112,8 +129,8 @@ BiasCorrection <- function (exp, obs , na.rm = FALSE) { clim_obs <- mean(obs , na.rm = na.rm) # bias corrected forecast - corrected[ , t] <- ((fcst - clim_exp) * (sd_obs / sd_exp)) + clim_obs + corrected <- ((fcst - clim_exp) * (sd_obs / sd_exp)) + clim_obs + names(dim(corrected)) <- c('member') } - names(dim(corrected)) <- c('member', 'sdate') return(corrected) } -- GitLab From 91bd1cc8ab0a17d2680bf99695687d2fc33de85e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ver=C3=B3nica=20Torralba-Fern=C3=A1ndez?= Date: Fri, 19 Mar 2021 09:03:50 +0100 Subject: [PATCH 04/48] Including abs to retain the absolute value of the correlation --- R/CST_Calibration.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/CST_Calibration.R b/R/CST_Calibration.R index 58ef7207..34607bad 100644 --- a/R/CST_Calibration.R +++ b/R/CST_Calibration.R @@ -416,7 +416,7 @@ Calibration <- function(exp, obs, cal.method = "mse_min", } else { par.out[3] <- with(quant.obs.fc, obs.sd * sqrt(1. - cor.obs.fc^2) / fc.dev.sd) } - par.out[2] <- with(quant.obs.fc, cor.obs.fc * obs.sd / fc.ens.av.sd) + par.out[2] <- with(quant.obs.fc, abs(cor.obs.fc) * obs.sd / fc.ens.av.sd) par.out[1] <- with(quant.obs.fc, obs.av - par.out[2] * fc.ens.av.av, na.rm = na.rm) return(par.out) @@ -473,4 +473,4 @@ Calibration <- function(exp, obs, cal.method = "mse_min", .CalibrationMembersRPC <- function(exp, ens_mean, ens_mean_cal, var_obs, var_noise, r){ member_cal <- (exp - ens_mean) * sqrt(var_obs) * sqrt(1 - r^2) / sqrt(var_noise) + ens_mean_cal return(member_cal) -} \ No newline at end of file +} -- GitLab From e20b6e62b7c26a7dd6218838871ba83445f4aff7 Mon Sep 17 00:00:00 2001 From: lpalma Date: Tue, 30 Mar 2021 19:47:59 +0200 Subject: [PATCH 05/48] Removed redundant vars --- R/CST_BiasCorrection.R | 30 ++++++++++-------------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/R/CST_BiasCorrection.R b/R/CST_BiasCorrection.R index bda531ba..45db9d2c 100644 --- a/R/CST_BiasCorrection.R +++ b/R/CST_BiasCorrection.R @@ -101,35 +101,25 @@ BiasCorrection <- function (exp, obs , na.rm = FALSE) { if (is.null(var_cor)){ for (t in 1 : ntime) { - # defining forecast,hindcast and observation in cross-validation - fcst <- var_exp[ , t] - hcst <- var_exp[ , -t] - obs <- var_obs[-t] - # parameters - sd_obs <- sd(obs , na.rm = na.rm) - sd_exp <- sd(hcst , na.rm = na.rm) - clim_exp <- mean(hcst , na.rm = na.rm) - clim_obs <- mean(obs , na.rm = na.rm) + sd_obs <- sd(var_obs[-t], na.rm = na.rm) + sd_exp <- sd(var_exp[ , -t], na.rm = na.rm) + clim_exp <- mean(var_exp[ , -t], na.rm = na.rm) + clim_obs <- mean(var_obs[-t], na.rm = na.rm) # bias corrected forecast - corrected[ , t] <- ((fcst - clim_exp) * (sd_obs / sd_exp)) + clim_obs + corrected[ , t] <- ((var_exp[ , t] - clim_exp) * (sd_obs / sd_exp)) + clim_obs names(dim(corrected)) <- c('member', 'sdate') } } else { - # defning forecast,hindcast and observation in cross-validation - fcst <- var_cor - hcst <- var_exp - obs <- var_obs - # parameters - sd_obs <- sd(obs , na.rm = na.rm) - sd_exp <- sd(hcst , na.rm = na.rm) - clim_exp <- mean(hcst , na.rm = na.rm) - clim_obs <- mean(obs , na.rm = na.rm) + sd_obs <- sd(var_obs, na.rm = na.rm) + sd_exp <- sd(var_exp, na.rm = na.rm) + clim_exp <- mean(var_exp, na.rm = na.rm) + clim_obs <- mean(var_obs, na.rm = na.rm) # bias corrected forecast - corrected <- ((fcst - clim_exp) * (sd_obs / sd_exp)) + clim_obs + corrected <- ((var_cor - clim_exp) * (sd_obs / sd_exp)) + clim_obs names(dim(corrected)) <- c('member') } return(corrected) -- GitLab From 9a3029d13efe3bfa979773fad6a2df0adb072832 Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 31 Mar 2021 17:17:52 +0200 Subject: [PATCH 06/48] FInclude exp_cor to be called by Apply and CST_ functions --- .Rbuildignore | 2 +- DESCRIPTION | 3 +- NAMESPACE | 1 + NEWS.md | 9 +++- R/CST_BiasCorrection.R | 87 +++++++++++++++++++++++++++++++-------- man/BiasCorrection.Rd | 41 ++++++++++++++++++ man/CST_BiasCorrection.Rd | 4 +- 7 files changed, 126 insertions(+), 21 deletions(-) create mode 100644 man/BiasCorrection.Rd diff --git a/.Rbuildignore b/.Rbuildignore index fa596e70..b2d8e5fc 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -5,4 +5,4 @@ ./.nc$ .*^(?!data)\.RData$ .*\.gitlab-ci.yml$ -^tests$ +#^tests$ diff --git a/DESCRIPTION b/DESCRIPTION index 7fe8d681..aecd89b3 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,7 +1,7 @@ Package: CSTools Title: Assessing Skill of Climate Forecasts on Seasonal-to-Decadal Timescales -Version: 4.0.0 +Version: 4.0.1 Authors@R: c( person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-8568-3071")), person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "aut", comment = c(ORCID = "0000-0001-5221-0147")), @@ -10,6 +10,7 @@ Authors@R: c( person("Jost", "von Hardenberg", , email = c("j.vonhardenberg@isac.cnr.it", "jost.hardenberg@polito.it"), role = "aut", comment = c(ORCID = "0000-0002-5312-8070")), person("Llorenç", "LLedo", , "llledo@bsc.es", role = "aut"), person("Nicolau", "Manubens", , "nicolau.manubens@bsc.es", role = "aut"), + person("Lluís", "Palma", , "lluis.palma@bsc.es", role = "aut"), person("Eroteida", "Sanchez-Garcia", , "esanchezg@aemet.es", role = "aut"), person("Bert", "van Schaeybroeck", , "bertvs@meteo.be", role = "aut"), person("Veronica", "Torralba", , "veronica.torralba@bsc.es", role = "aut"), diff --git a/NAMESPACE b/NAMESPACE index 7e313bc5..17befef4 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -4,6 +4,7 @@ export(AdamontAnalog) export(Analogs) export(BEI_PDFBest) export(BEI_Weights) +export(BiasCorrection) export(CST_Analogs) export(CST_AnalogsPredictors) export(CST_Anomaly) diff --git a/NEWS.md b/NEWS.md index 23d19889..dd228936 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,12 @@ +### CSTools 4.0.1 +**Submission date to CRAN: 23-02-2021** + +- New features: + + CST_BiasCorrection and BiasCorrection allows to calibrate a forecast given the calibration in the hindcast by using parameter 'exp_cor'. + + ### CSTools 4.0.0 -**Submission date to CRAN: XX-12-2020** +**Submission date to CRAN: 23-02-2021** - New features: + ADAMONT downscaling method: requires CST_AdamontAnalogs and CST_AdamontQQCor functions diff --git a/R/CST_BiasCorrection.R b/R/CST_BiasCorrection.R index 45db9d2c..b7bd3e1d 100644 --- a/R/CST_BiasCorrection.R +++ b/R/CST_BiasCorrection.R @@ -5,6 +5,7 @@ #' #'@param exp an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the seasonal forecast experiment data in the element named \code{$data} #'@param obs an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the observed data in the element named \code{$data}. +#'@param exp_cor an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the seasonl forecast experiment to be corrected. If it is NULL, the 'exp' forecast will be corrected. #'@param na.rm a logical value indicating whether missing values should be stripped before the computation proceeds, by default it is set to FALSE. #' #'@return an object of class \code{s2dv_cube} containing the bias corrected forecasts in the element called \code{$data} with the same dimensions of the experimental data. @@ -30,28 +31,70 @@ #'a <- CST_BiasCorrection(exp = exp, obs = obs) #'str(a) #'@export -CST_BiasCorrection <- function(exp, obs, na.rm = FALSE) { +CST_BiasCorrection <- function(exp, obs, exp_cor = NULL, na.rm = FALSE) { if (!inherits(exp, 's2dv_cube') || !inherits(obs, 's2dv_cube')) { stop("Parameter 'exp' and 'obs' must be of the class 's2dv_cube', ", "as output by CSTools::CST_Load.") } - if (dim(obs$data)['member'] != 1) { stop("The length of the dimension 'member' in the component 'data' ", "of the parameter 'obs' must be equal to 1.") } - dimnames <- names(dim(exp$data)) - BiasCorrected <- BiasCorrection(exp = exp$data, obs = obs$data, na.rm = na.rm) - pos <- match(dimnames, names(dim(BiasCorrected))) - BiasCorrected <- aperm(BiasCorrected, pos) - names(dim(BiasCorrected)) <- dimnames - exp$data <- BiasCorrected - exp$Datasets <- c(exp$Datasets, obs$Datasets) - exp$source_files <- c(exp$source_files, obs$source_files) - return(exp) + if (is.null(exp_cor)) { + dimnames <- names(dim(exp$data)) + BiasCorrected <- BiasCorrection(exp = exp$data, obs = obs$data, na.rm = na.rm) + pos <- match(dimnames, names(dim(BiasCorrected))) + BiasCorrected <- aperm(BiasCorrected, pos) + names(dim(BiasCorrected)) <- dimnames + exp$data <- BiasCorrected + exp$Datasets <- c(exp$Datasets, obs$Datasets) + exp$source_files <- c(exp$source_files, obs$source_files) + return(exp) + } else { + if (!inherits(exp_cor, 's2dv_cube')) { + stop("Parameter 'var_cor' must be of the class 's2dv_cube'.") + } + dimnames <- names(dim(exp_cor$data)) + BiasCorrected <- BiasCorrection(exp = exp$data, obs = obs$data, + exp_cor = exp_cor$data, na.rm = na.rm) + pos <- match(dimnames, names(dim(BiasCorrected))) + BiasCorrected <- aperm(BiasCorrected, pos) + names(dim(BiasCorrected)) <- dimnames + exp_cor$data <- BiasCorrected + exp_cor$Datasets <- c(exp_cor$Datasets, exp$Datasets, obs$Datasets) + exp_cor$source_files <- c(exp_cor$source_files, + exp$source_files, obs$source_files) + return(exp_cor) + } } - -BiasCorrection <- function (exp, obs , na.rm = FALSE) { +#' Bias Correction based on the mean and standard deviation adjustment +#' +#'@author Verónica Torralba, \email{veronica.torralba@bsc.es} +#'@description This function applies the simple bias adjustment technique described in Torralba et al. (2017). The adjusted forecasts have an equivalent standard deviation and mean to that of the reference dataset. +#' +#'@param exp a multidimensional array with named dimensions containing the seasonal forecast experiment data with at least 'member' and 'sdate' dimensions. +#'@param obs a multidimensional array with named dimensions containing the observed data with at least 'sdate' dimension. +#'@param exp_cor a multidimensional array with named dimensions containing the seasonl forecast experiment to be corrected. If it is NULL, the 'exp' forecast will be corrected. +#'@param na.rm a logical value indicating whether missing values should be stripped before the computation proceeds, by default it is set to FALSE. +#' +#'@return an object of class \code{s2dv_cube} containing the bias corrected forecasts in the element called \code{$data} with the same dimensions of the experimental data. +#' +#'@references Torralba, V., F.J. Doblas-Reyes, D. MacLeod, I. Christel and M. Davis (2017). Seasonal climate prediction: a new source of information for the management of wind energy resources. Journal of Applied Meteorology and Climatology, 56, 1231-1247, doi:10.1175/JAMC-D-16-0204.1. (CLIM4ENERGY, EUPORIAS, NEWA, RESILIENCE, SPECS) +#' +#'@import multiApply +#'@examples +#' +#'# Example +#'# Creation of sample s2dverification objects. These are not complete +#'# s2dverification objects though. The Load function returns complete objects. +#'mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7) +#'dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7) +#'obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7) +#'dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7) +#'a <- BiasCorrection(exp = mod1, obs = obs1) +#'str(a) +#'@export +BiasCorrection <- function (exp, obs, exp_cor = NULL, na.rm = FALSE) { if (!all(c('member', 'sdate') %in% names(dim(exp)))) { stop("Parameter 'exp' must have the dimensions 'member' and 'sdate'.") @@ -83,14 +126,24 @@ BiasCorrection <- function (exp, obs , na.rm = FALSE) { if ('member' %in% names(dim(obs))) { target_dims_obs <- c('member', target_dims_obs) } - - BiasCorrected <- Apply(data = list(var_obs = obs, var_exp = exp), - target_dims = list(target_dims_obs, c('member', 'sdate')), + if (is.null(exp_cor)) { + BiasCorrected <- Apply(data = list(var_obs = obs, var_exp = exp), + target_dims = list(target_dims_obs, + c('member', 'sdate')), fun = .sbc , na.rm = na.rm)$output1 + } else { + BiasCorrected <- Apply(data = list(var_obs = obs, + var_exp = exp, + var_cor = exp_cor), + target_dims = list(target_dims_obs, + c('member', 'sdate'), + c('member', 'sdate')), + fun = .sbc , na.rm = na.rm)$output1 + } return(BiasCorrected) } -.sbc <- function(var_obs, var_exp ,var_cor=NULL, na.rm = FALSE) { +.sbc <- function(var_obs, var_exp , var_cor = NULL, na.rm = FALSE) { nmembers <- dim(var_exp)['member'][] ntime <- dim(var_exp)['sdate'][] #if (all(names(dim(var_exp)) != c('member','sdate'))) { diff --git a/man/BiasCorrection.Rd b/man/BiasCorrection.Rd new file mode 100644 index 00000000..8b60a6de --- /dev/null +++ b/man/BiasCorrection.Rd @@ -0,0 +1,41 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CST_BiasCorrection.R +\name{BiasCorrection} +\alias{BiasCorrection} +\title{Bias Correction based on the mean and standard deviation adjustment} +\usage{ +BiasCorrection(exp, obs, exp_cor = NULL, na.rm = FALSE) +} +\arguments{ +\item{exp}{a multidimensional array with named dimensions containing the seasonal forecast experiment data with at least 'member' and 'sdate' dimensions.} + +\item{obs}{a multidimensional array with named dimensions containing the observed data with at least 'sdate' dimension.} + +\item{exp_cor}{a multidimensional array with named dimensions containing the seasonl forecast experiment to be corrected. If it is NULL, the 'exp' forecast will be corrected.} + +\item{na.rm}{a logical value indicating whether missing values should be stripped before the computation proceeds, by default it is set to FALSE.} +} +\value{ +an object of class \code{s2dv_cube} containing the bias corrected forecasts in the element called \code{$data} with the same dimensions of the experimental data. +} +\description{ +This function applies the simple bias adjustment technique described in Torralba et al. (2017). The adjusted forecasts have an equivalent standard deviation and mean to that of the reference dataset. +} +\examples{ + +# Example +# Creation of sample s2dverification objects. These are not complete +# s2dverification objects though. The Load function returns complete objects. +mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7) +dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7) +obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7) +dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7) +a <- BiasCorrection(exp = mod1, obs = obs1) +str(a) +} +\references{ +Torralba, V., F.J. Doblas-Reyes, D. MacLeod, I. Christel and M. Davis (2017). Seasonal climate prediction: a new source of information for the management of wind energy resources. Journal of Applied Meteorology and Climatology, 56, 1231-1247, doi:10.1175/JAMC-D-16-0204.1. (CLIM4ENERGY, EUPORIAS, NEWA, RESILIENCE, SPECS) +} +\author{ +Verónica Torralba, \email{veronica.torralba@bsc.es} +} diff --git a/man/CST_BiasCorrection.Rd b/man/CST_BiasCorrection.Rd index 55c325a2..adfc2798 100644 --- a/man/CST_BiasCorrection.Rd +++ b/man/CST_BiasCorrection.Rd @@ -4,13 +4,15 @@ \alias{CST_BiasCorrection} \title{Bias Correction based on the mean and standard deviation adjustment} \usage{ -CST_BiasCorrection(exp, obs, na.rm = FALSE) +CST_BiasCorrection(exp, obs, exp_cor = NULL, na.rm = FALSE) } \arguments{ \item{exp}{an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the seasonal forecast experiment data in the element named \code{$data}} \item{obs}{an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the observed data in the element named \code{$data}.} +\item{exp_cor}{an object of class \code{s2dv_cube} as returned by \code{CST_Load} function, containing the seasonl forecast experiment to be corrected. If it is NULL, the 'exp' forecast will be corrected.} + \item{na.rm}{a logical value indicating whether missing values should be stripped before the computation proceeds, by default it is set to FALSE.} } \value{ -- GitLab From 3c369d8440c3b4ad5399a8829df9440df5b122ad Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 14 May 2021 20:51:06 +0200 Subject: [PATCH 07/48] add output_dims to get correct dimnames --- R/CST_BiasCorrection.R | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/R/CST_BiasCorrection.R b/R/CST_BiasCorrection.R index b7bd3e1d..0b837513 100644 --- a/R/CST_BiasCorrection.R +++ b/R/CST_BiasCorrection.R @@ -138,7 +138,8 @@ BiasCorrection <- function (exp, obs, exp_cor = NULL, na.rm = FALSE) { target_dims = list(target_dims_obs, c('member', 'sdate'), c('member', 'sdate')), - fun = .sbc , na.rm = na.rm)$output1 + fun = .sbc , output_dims = c('member','sdate'), + na.rm = na.rm)$output1 } return(BiasCorrected) } -- GitLab From 9e963ec1cad6ed6f41a76f1f8e481a0f9237807f Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Mon, 17 May 2021 13:45:20 +0000 Subject: [PATCH 08/48] Update ProxiesAttractor.R --- R/ProxiesAttractor.R | 192 ++++++++++++++++++++++++++++--------------- 1 file changed, 126 insertions(+), 66 deletions(-) diff --git a/R/ProxiesAttractor.R b/R/ProxiesAttractor.R index 73f45227..f502cec1 100644 --- a/R/ProxiesAttractor.R +++ b/R/ProxiesAttractor.R @@ -1,87 +1,147 @@ #'ProxiesAttractor #' #'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} -#'@description This function computes two dinamical proxies of the attractor: The local dimension (d) and the inverse of the persistence (theta). -#'These two parameters will be used as a condition for the bias correction function DynBiasCorrection -#'Funtion based on the matlab code (@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr}) used in "Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). -#'The hammam effect or how a warm ocean enhances large scale atmospheric predictability. Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} #' -#'@param dat data to create the attractor. Must be a matrix with the timesteps in rows and gridpoints in columns -> dat(time,grids) -# -#'@param quanti threshold for the computation of the attractor +#'@description This function computes two dinamical proxies of the attractor: +#'The local dimension (d) and the inverse of the persistence (theta). +#'These two parameters will be used as a condition for the computation of dynamical +#'scores to measure predictability and to compute bias correction conditioned by +#'the dynamics with the function DynBiasCorrection +#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in +#'"Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +#' The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +#' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#' Davide Faranda, Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. #' -#'@param iplot to have a quick view of the proxies and the attractor +#'@param data data to create the attractor. Must be a matrix with the timesteps in nrow +#'and the grids in ncol(dat(time,grids) +# +#'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to +#' be provided as a list of length 2 with longitudes and latitudes +#' +#'@param iplot FALSE by default. If TRUE the function returns a simple plot +#'with three pannels on the top left a plot for local dimension 'd', on the top +#'right a plot for the inverse of the persistence 'theta', on the bottom a plot +#'for the 'attractor' plotted with the two properties 'd' (x axis) and 'theta' +#'(y axis) #' #'@return dim and theta -#' -#'@return A list of 2: -#'\itemize{ -#'\item\code{$dim} {A vector with the same length than the timesteps: nrow(dat)} -#'\item\code{$theta} {A vector with the same length than the timesteps: nrow(dat)}} -#'@references -#' \url{https://doi.org/10.1038/s41467-019-09305-8} -#' #'@import s2dverification #'@import fields -#'@import pracma #' #'@examples -#' Creating an example of matrix dat(time,grids): -tm=24*6 # time -gm=5000 # grids -m=matrix(rand(1,tm*gm),nrow=tm,ncol=gm) -qm=0.90 # imposing a threshold -#'# selecting the option to create a plot to see the attractor 'iplot=TRUE' -#' example -ExampleAttractor=ProxiesAttractor(dat=m,quanti=qm,iplot=TRUE) +#'# Example 1: computing the attractor using simple data +#'# Creating an example of matrix data(time,grids): +#'tm=2*6*3 # time +#'gm=4*10 # grids +#'m=matrix(rand(1,tm*gm),nrow=tm,ncol=gm) +#'qm=0.90 # imposing a threshold +#'Attractor=ProxiesAttractor(data=m,quanti=qm,iplot=FALSE) +#' +#'# Example 2: computing the attractor using simple s2dv data +#'expL <- rnorm(1:200) +#'dim(expL) <- c(member=10,lat = 4, lon = 5) +#'obsL <- c(rnorm(1:180),expL[1,,]*1.2) +#'dim(obsL) <- c(time = 10,lat = 4, lon = 5) +#'time_obsL <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-") +#'time_expL <- time_obsL[1] +#'lon <- seq(-1,5,1.5) +#'lat <- seq(30,35,1.5) +#'qm=0.98 # too high for this short dataset, it is possible that doesn't +#'get the requirement, in that case it would be necessary select a lower qm +#'for instance qm=0.60 +#'qm=0.60 +#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' Dates = list(start = time_expL, end = time_expL)) +#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' Dates = list(start = time_obsL, end = time_obsL)) +#' +#'attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) +#' #' -ProxiesAttractor <- function(dat,quanti,iplot=FALSE){ - npoints=nrow(dat) - dim=numeric(npoints) - theta=numeric(npoints) - time=seq(1:npoints) - l=1 - for (l in 1:npoints){ - #computation of the distances from the iteration l - distance=pdist2(dat[l,],dat) - #Taking the logarithm of the distance - logdista=-log(distance) - n=1 - for(n in 1:npoints){ - if(logdista[n]=="Inf"){logdista[n]=NaN} - n=n+1} +#' +#' +CST_ProxiesAttractor <- function(data,quanti,ncores=NULL){ + + if (!inherits(data, 's2dv_cube')) { + stop("Parameter 'data' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + if (any(names(dim(data$data)) %in% 'sdate')) { + if (any(names(dim(data$data)) %in% 'ftime')) { + data$data <- MergeDims(data$data, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + if (!(any(names(dim(data$data)) %in% 'time'))){ + stop('dimension time is mandatory') + } + if (any(names(dim(data$data)) %in% 'lat')) { + if (any(names(dim(data$data)) %in% 'lon')) { + data$data <- MergeDims(data$data, merge_dims = c('lon', 'lat'), + rename_dim = 'grid') + } + } + if(!(any(names(dim(data$data)) %in% 'grid'))){ + stop('dimension grid is mandatory') + } + + attractor <-Apply(data$data,target_dims=c('time','grid'), fun=ProxiesAttractor, + quanti=quanti,iplot=FALSE, ncores=ncores) + # rename dimensions + attractor <- lapply(attractor, FUN=function(x,dimname){names(dim(x))[dimname] <- 'time' + return(x)}, dimname = which(names(dim(attractor[[1]]))=='dim2')) + + return(list(dim=attractor$dim,theta=attractor$theta)) +} + +ProxiesAttractor <- function(data,quanti, iplot=FALSE,ncores=NULL){ + + logdista <- apply(data,1,function(x,y){ + -log(colMeans((y-x)^2))}, + y=t(data)) + + #Computation of theta + Theta <- function(logdista,quanti){ #Compute the thheshold corresponding to the quantile - thresh=quantile(logdista, quanti,na.rm=TRUE) - - #Computation of the inverse of persistence 'theta' - Li<-which(logdista>thresh) + thresh <- quantile(logdista,quanti,na.rm=TRUE) + logdista[which(logdista=='Inf')] <- NaN + Li <- which(as.vector(logdista) > as.numeric(thresh)) #Length of each cluster - Ti<-diff(Li) - N=length(Ti) - q=1-quanti - Si=Ti-1 - Nc=length(which(Si>0)) - N=length(Ti) - theta[l]=(sum(q*Si)+N+Nc-sqrt(((sum(q*Si)+N+Nc)^2)-8*Nc*sum(q*Si)))/(2*sum(q*Si)) - - #computation of local dimension 'dim' + Ti <- diff(Li) + N <- length(Ti) + q <- 1-quanti + Si <- Ti-1 + Nc <- length(which(Si>0)) + N <- length(Ti) + theta <- (sum(q*Si)+N+Nc-sqrt(((sum(q*Si)+N+Nc)^2)-8*Nc*sum(q*Si)))/(2*sum(q*Si)) #Sort the exceedances - logdista=sort(logdista) - #Find all the Peaks over Thresholds. - findidx=which(logdista>thresh) - logextr=logdista[findidx[[1]]:(length(logdista)-1)] + logdista <- sort(logdista) + #Find all the Peaks over Thresholds. + findidx <- which(as.vector(logdista) > as.numeric(thresh)) + if(length(findidx) < 1){ + stop("quanti too high for the length of the dataset") + } + logextr <- logdista[findidx[[1]]:(length(logdista)-1)] #The inverse of the dimension is just the average of the exceedances - dim[l]=1/mean(logextr-thresh) - l=l+1 - if(l Date: Tue, 18 May 2021 07:54:45 +0000 Subject: [PATCH 09/48] Update ProxiesAttractor.R --- R/ProxiesAttractor.R | 90 ++++++++++++-------------------------------- 1 file changed, 24 insertions(+), 66 deletions(-) diff --git a/R/ProxiesAttractor.R b/R/ProxiesAttractor.R index f502cec1..c400b98c 100644 --- a/R/ProxiesAttractor.R +++ b/R/ProxiesAttractor.R @@ -1,5 +1,7 @@ -#'ProxiesAttractor + +#'@rdname ProxiesAttractor #' +#'@title Computing two dinamical proxies of the attractor. #'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} #'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} #'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} @@ -9,12 +11,12 @@ #'The local dimension (d) and the inverse of the persistence (theta). #'These two parameters will be used as a condition for the computation of dynamical #'scores to measure predictability and to compute bias correction conditioned by -#'the dynamics with the function DynBiasCorrection -#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in -#'"Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +#'the dynamics with the function DynBiasCorrection. +#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in: +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). #' The hammam effect or how a warm ocean enhances large scale atmospheric predictability. #' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " -#' Davide Faranda, Gabriele Messori and Pascal Yiou. (2017). +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' @@ -24,6 +26,8 @@ #'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to #' be provided as a list of length 2 with longitudes and latitudes #' +#'@param ncores The number of cores to use in parallel computation +#' #'@param iplot FALSE by default. If TRUE the function returns a simple plot #'with three pannels on the top left a plot for local dimension 'd', on the top #'right a plot for the inverse of the persistence 'theta', on the bottom a plot @@ -32,10 +36,11 @@ #' #'@return dim and theta #'@import s2dverification +#'@import multiApply #'@import fields #' #'@examples -#'# Example 1: computing the attractor using simple data +#'# Example 1: Computing the attractor using simple data #'# Creating an example of matrix data(time,grids): #'tm=2*6*3 # time #'gm=4*10 # grids @@ -43,68 +48,21 @@ #'qm=0.90 # imposing a threshold #'Attractor=ProxiesAttractor(data=m,quanti=qm,iplot=FALSE) #' -#'# Example 2: computing the attractor using simple s2dv data -#'expL <- rnorm(1:200) -#'dim(expL) <- c(member=10,lat = 4, lon = 5) -#'obsL <- c(rnorm(1:180),expL[1,,]*1.2) -#'dim(obsL) <- c(time = 10,lat = 4, lon = 5) -#'time_obsL <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-") -#'time_expL <- time_obsL[1] -#'lon <- seq(-1,5,1.5) -#'lat <- seq(30,35,1.5) -#'qm=0.98 # too high for this short dataset, it is possible that doesn't -#'get the requirement, in that case it would be necessary select a lower qm -#'for instance qm=0.60 -#'qm=0.60 -#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, -#' Dates = list(start = time_expL, end = time_expL)) -#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, -#' Dates = list(start = time_obsL, end = time_obsL)) -#' -#'attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) -#' -#' -#' -#' -CST_ProxiesAttractor <- function(data,quanti,ncores=NULL){ - - if (!inherits(data, 's2dv_cube')) { - stop("Parameter 'data' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") - } - if (any(names(dim(data$data)) %in% 'sdate')) { - if (any(names(dim(data$data)) %in% 'ftime')) { - data$data <- MergeDims(data$data, merge_dims = c('ftime', 'sdate'), - rename_dim = 'time') - } - } - if (!(any(names(dim(data$data)) %in% 'time'))){ - stop('dimension time is mandatory') - } - if (any(names(dim(data$data)) %in% 'lat')) { - if (any(names(dim(data$data)) %in% 'lon')) { - data$data <- MergeDims(data$data, merge_dims = c('lon', 'lat'), - rename_dim = 'grid') - } - } - if(!(any(names(dim(data$data)) %in% 'grid'))){ - stop('dimension grid is mandatory') - } - - attractor <-Apply(data$data,target_dims=c('time','grid'), fun=ProxiesAttractor, - quanti=quanti,iplot=FALSE, ncores=ncores) - # rename dimensions - attractor <- lapply(attractor, FUN=function(x,dimname){names(dim(x))[dimname] <- 'time' - return(x)}, dimname = which(names(dim(attractor[[1]]))=='dim2')) - - return(list(dim=attractor$dim,theta=attractor$theta)) -} + ProxiesAttractor <- function(data,quanti, iplot=FALSE,ncores=NULL){ - - logdista <- apply(data,1,function(x,y){ - -log(colMeans((y-x)^2))}, - y=t(data)) + if (is.null(data)) { + stop("Parameter 'data' is mandatory") + } + if (is.null(quanti)) { + stop("Parameter 'quanti' is mandatory") + } + if (is.null(iplot)) { + stop("Parameter 'iplot' is mandatory and it is set to FALSE by default") + } + logdista <- Apply(data,target_dims =2, + fun = function(x,y){-log(colMeans((y-as.vector(x))^2))}, + y=t(data),ncores=ncores)[[1]] #Computation of theta Theta <- function(logdista,quanti){ -- GitLab From 6d2ecf5a406d1d89e1f01cffad33d7ec1d3cf078 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Tue, 18 May 2021 07:55:35 +0000 Subject: [PATCH 10/48] Add new file CST_ProxiesAttractor --- R/CST_ProxiesAttractor | 94 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 R/CST_ProxiesAttractor diff --git a/R/CST_ProxiesAttractor b/R/CST_ProxiesAttractor new file mode 100644 index 00000000..5ec3373e --- /dev/null +++ b/R/CST_ProxiesAttractor @@ -0,0 +1,94 @@ +#'@rdname CST_ProxiesAttractor +#'@title Computing two dinamical proxies of the attractor in s2dvcube. +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +#' +#'@description This function computes two dinamical proxies of the attractor: +#'The local dimension (d) and the inverse of the persistence (theta) for an +#''s2dvcube' object. +#'These two parameters will be used as a condition for the computation of dynamical +#'scores to measure predictability and to compute bias correction conditioned by +#'the dynamics with the function DynBiasCorrection +#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +#' The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +#' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param data data to create the attractor. Must be a matrix with the timesteps in nrow +#'and the grids in ncol(dat(time,grids) +# +#'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to +#' be provided as a list of length 2 with longitudes and latitudes +#' +#'@param ncores The number of cores to use in parallel computation +#' +#'@return dim and theta +#'@import s2dverification +#'@import fields +#'@import multiApply +#' +#'@examples +#'# Example 1: Computing the attractor using simple s2dv data +#'expL <- rnorm(1:200) +#'dim(expL) <- c(member=10,lat = 4, lon = 5) +#'obsL <- c(rnorm(1:180),expL[1,,]*1.2) +#'dim(obsL) <- c(time = 10,lat = 4, lon = 5) +#'time_obsL <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-") +#'time_expL <- time_obsL[1] +#'lon <- seq(-1,5,1.5) +#'lat <- seq(30,35,1.5) +#'qm=0.98 # too high for this short dataset, it is possible that doesn't +#'get the requirement, in that case it would be necessary select a lower qm +#'for instance qm=0.60 +#'qm=0.60 +#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' Dates = list(start = time_expL, end = time_expL)) +#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' Dates = list(start = time_obsL, end = time_obsL)) +#' +#'attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) +#' +CST_ProxiesAttractor <- function(data,quanti,ncores=NULL){ + if (is.null(data)) { + stop("Parameter 'data' is mandatory") + } + if (!inherits(data, 's2dv_cube')) { + stop("Parameter 'data' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + if (is.null(quanti)) { + stop("Parameter 'quanti' is mandatory") + } + if (any(names(dim(data$data)) %in% 'sdate')) { + if (any(names(dim(data$data)) %in% 'ftime')) { + data$data <- MergeDims(data$data, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + if (!(any(names(dim(data$data)) %in% 'time'))){ + stop('dimension time is mandatory') + } + if (any(names(dim(data$data)) %in% 'lat')) { + if (any(names(dim(data$data)) %in% 'lon')) { + data$data <- MergeDims(data$data, merge_dims = c('lon', 'lat'), + rename_dim = 'grid') + } + } + if(!(any(names(dim(data$data)) %in% 'grid'))){ + stop('dimension grid is mandatory') + } + + attractor <-Apply(data$data,target_dims=c('time','grid'), fun=ProxiesAttractor, + quanti=quanti,iplot=FALSE, ncores=ncores) + # rename dimensions + attractor <- lapply(attractor, FUN=function(x,dimname){names(dim(x))[dimname] <- 'time' + return(x)}, dimname = which(names(dim(attractor[[1]]))=='dim2')) + + return(list(dim=attractor$dim,theta=attractor$theta)) +} -- GitLab From c62db3be28119f2c99b0de7976d0876904ac9da2 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Tue, 18 May 2021 07:55:56 +0000 Subject: [PATCH 11/48] Update CST_ProxiesAttractor --- R/{CST_ProxiesAttractor => CST_ProxiesAttractor.R} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename R/{CST_ProxiesAttractor => CST_ProxiesAttractor.R} (100%) diff --git a/R/CST_ProxiesAttractor b/R/CST_ProxiesAttractor.R similarity index 100% rename from R/CST_ProxiesAttractor rename to R/CST_ProxiesAttractor.R -- GitLab From 41a5bb25e17246531717f6d356185df3b30a00cd Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Fri, 21 May 2021 16:56:03 +0000 Subject: [PATCH 12/48] Add new file --- R/CST_Predictability.R | 1 + 1 file changed, 1 insertion(+) create mode 100644 R/CST_Predictability.R diff --git a/R/CST_Predictability.R b/R/CST_Predictability.R new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/R/CST_Predictability.R @@ -0,0 +1 @@ + -- GitLab From 01a42b6717d6287527b05d4150a6ca278d7d9cd6 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Fri, 21 May 2021 16:56:33 +0000 Subject: [PATCH 13/48] Update CST_Predictability.R --- R/CST_Predictability.R | 118 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) diff --git a/R/CST_Predictability.R b/R/CST_Predictability.R index 8b137891..fffa1c64 100644 --- a/R/CST_Predictability.R +++ b/R/CST_Predictability.R @@ -1 +1,119 @@ +#'@rdname CST_Predictability +#'@title Computing scores of predictability using two dinamical proxies +#'based on dynamical systems theory. +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +#' +#'@description This function divides in terciles the two dynamical proxies +#'computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will +#'be used to measure the predictability of the system in dyn_scores. When the +#'local dimension 'dim' is small and the inverse of persistence 'theta' is +#'small the predictability is higher, and viceversa. +#' +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +#'scale atmospheric predictability.Nature Communications, 10(1), 1316. +#'DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param dim data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol +#'for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints +# +#'@param theta list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes +#' +#'@param ncores The number of cores to use in parallel computation +#' +#'@return pred.dim a list of two lists 'qdim' and 'pos.d'. The 'qdim' list +#'contains values of local dimension 'dim' divided by terciles: +#'d1: lower tercile and more predictability, +#'d2: middle tercile, +#'d3: higher tercile and less predictability +#'The 'pos.d' list contains the position of each tercile in parameter 'dim' +#' +#'@return pred.theta a list of two lists 'qtheta' and 'pos.t'. +#'The 'qtheta' list contains values of the inverse of persistence 'theta' +#'divided by terciles: +#'th1: lower tercile and more predictability, +#'th2: middle tercile, +#'th3: higher tercile and less predictability +#'The 'pos.t' list contains the position of each tercile in parameter 'theta' +#' +#'@return dyn_scores values from 0 to 1. A dyn_score of 1 indicates higher +#'predictability. +#' +#'@import s2dverification +#'@import multiApply +#'@import CSTools +#' +#'@examples +#'# Creating an example of matrix dat(time,grids): +#' m=matrix(rand(1,2000)*10,nrow=50,ncol=40) +#'# imposing a threshold +#' quanti=0.90 +#'# computing dyn_scores from parameters dim and theta of the attractor +#' attractor = ProxiesAttractor(dat=m,quanti=0.60,iplot=FALSE) +#' predyn = CST_Predictability(dim=attractor$dim,theta=attractor$theta) +#' +CST_Predictability <- function(dim,theta,ncores=NULL){ + if (is.null(dim)) { + stop("Parameter 'dim' is mandatory") + } + if (is.null(theta)) { + stop("Parameter 'theta' is mandatory") + } + if (length(dim)!=length(theta)) { + stop("Parameters 'dim' and 'theta' must have the same length") + } + pos <- c(1:length(dim)) + + # dim + qd1 <- quantile(dim,0.33,na.rm=TRUE) + qd3 <- quantile(dim,0.66,na.rm=TRUE) + d3 <- which(dim>=qd3) + d1 <- which(dim<=qd1) + d2 <- which(dim>qd1 & dim=qt3) + th1 <- which(theta<=qt1) + th2 <- which(theta>qt1 & theta Date: Fri, 21 May 2021 17:25:13 +0000 Subject: [PATCH 14/48] Add new file --- R/DynBiasCorrection.R | 92 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 R/DynBiasCorrection.R diff --git a/R/DynBiasCorrection.R b/R/DynBiasCorrection.R new file mode 100644 index 00000000..36f7864f --- /dev/null +++ b/R/DynBiasCorrection.R @@ -0,0 +1,92 @@ +#'@rdname DynBiasCorrection +#'@title Performing a Bias Correction conditioned by the dynamical +#'properties of the data. +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#' +#'@description This function perform a bias correction conditioned by the +#'dynamical properties of the dataset. This function used the functions +#''CST_Predictability' to divide in terciles the two dynamical proxies +#'computed with CST_ProxiesAttractor or ProxiesAttractor. A bias correction +#'between the model and the observations is performed using the division into +#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'. +#'For instance, model values with lower 'dim' will be corrected with observed +#'values with lower 'dim', and the same for theta. The function gives two options +#'of bias correction: one for 'dim' and one for 'theta' +#' +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +#'scale atmospheric predictability.Nature Communications, 10(1), 1316. +#'DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param pred.dim.mod output of CST_Predictability or a list of two lists 'qdim' and 'pos.d'. The 'qdim' list +#'contains values of local dimension 'dim' divided by terciles: +#'d1: lower tercile and more predictability, +#'d2: middle tercile, +#'d3: higher tercile and less predictability +#'The 'pos.d' list contains the position of each tercile in parameter 'dim' +#'@param pred.theta.mod output of CST_Predictability or a list of two lists 'qtheta' and 'pos.t'. +#'The 'qtheta' list contains values of the inverse of persistence 'theta' +#'divided by terciles: +#'th1: lower tercile and more predictability, +#'th2: middle tercile, +#'th3: higher tercile and less predictability +#'The 'pos.t' list contains the position of each tercile in parameter 'theta' +#' +#'@param pred.dim.obs same than pred.dim.obs but for the observations +#' +#'@param pred.theta.obs same than pred.theta.obs but for the observations +#' +#'@param ncores The number of cores to use in parallel computation +#' +#'@return dim.bias an s2dvcube object with a bias correction performed +#'conditioned by local dimension 'dim' +#' +#'@return theta.bias an s2dvcube object with a bias correction performed +#'conditioned by the inverse of the persistence 'theta' +#' +#'@import s2dverification +#'@import multiApply +#'@import CSTools +#' +#'@examples +#'# example 1: simple data s2dvcube style +# expL <- rnorm(1:200) +# dim(expL) <- c(member=10,lat = 4, lon = 5) +# obsL <- c(rnorm(1:180),expL[1,,]*1.2) +# dim(obsL) <- c(time = 10,lat = 4, lon = 5) +# time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") +# time_expL <- time_obsL[1] +# lon <- seq(-1,5,1.5) +# lat <- seq(30,35,1.5) +# qm=0.98 +# expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +# Dates = list(start = time_expL, end = time_expL)) +# obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +# Dates = list(start = time_obsL, end = time_obsL)) +# attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) +# predynObs = CST_Predictability(dim=attractor$dim,theta=attractor$theta) +# attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) +# predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) +# dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, +# pred.dim.mod=predynObs$pred.dim,pred.theta.mod=predynMod$pred.theta) + +DynBiasCorrection<- function(pred.dim.obs,pred.theta.obs,pred.dim.mod, + pred.theta.mod,ncores=NULL){ + if (is.null(pred.dim.mod)) { + stop("Parameter 'pred.dim.mod' is mandatory") + } + if (is.null(pred.dim.obs)) { + stop("Parameter 'pred.dim.obs' is mandatory") + } + if (is.null(pred.theta.mod)) { + stop("Parameter 'pred.dim.mod' is mandatory") + } + if (is.null(pred.theta.obs)) { + stop("Parameter 'pred.dim.obs' is mandatory") + } + return(list(dim.bias=dim.bias,theta.bias=theta.bias)) +} -- GitLab From 50a4fe8547554ba40c976ca0add3d5667f79c94c Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 25 May 2021 16:19:45 +0200 Subject: [PATCH 15/48] one automatic test to biascorrection --- tests/testthat/test-CST_BiasCorrection.R | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/testthat/test-CST_BiasCorrection.R b/tests/testthat/test-CST_BiasCorrection.R index c5525c57..fc6405ce 100644 --- a/tests/testthat/test-CST_BiasCorrection.R +++ b/tests/testthat/test-CST_BiasCorrection.R @@ -42,4 +42,9 @@ test_that("Sanity checks", { expect_warning(CST_BiasCorrection(exp = exp2, obs = obs2), "Parameter 'obs' contains NA values", "Parameter 'exp' contains NA values.") + + hinc <- array(1:6, c(sdate = 3, member = 2)) + obs <- array(3:6, c(sdate = 3, member = 1)) + res <- round(BiasCorrection(exp = hinc, obs = obs, exp_cor = hinc), 2) + expect_equal(res, array(c(2.66, 4.27, 3.2, 4.8, 3.73, 5.34), c(member = 2, sdate = 3))) }) -- GitLab From bb46beac69b78020272073fef3b61a24738ac94e Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 25 May 2021 16:27:52 +0200 Subject: [PATCH 16/48] Update NEWS and version number --- DESCRIPTION | 2 +- NEWS.md | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/DESCRIPTION b/DESCRIPTION index 7fe8d681..6bc5898f 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,7 +1,7 @@ Package: CSTools Title: Assessing Skill of Climate Forecasts on Seasonal-to-Decadal Timescales -Version: 4.0.0 +Version: 4.0.1 Authors@R: c( person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = c("aut", "cre"), comment = c(ORCID = "0000-0001-8568-3071")), person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "aut", comment = c(ORCID = "0000-0001-5221-0147")), diff --git a/NEWS.md b/NEWS.md index 23d19889..6dbf7005 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,9 @@ +### CSTools 4.0.1 +**Submission date to CRAN: XX-06-2021** + +- Fixes: + + Calibration retains correlation absolute value + ### CSTools 4.0.0 **Submission date to CRAN: XX-12-2020** -- GitLab From 2aefaba8eebd22cf982eed2a1957674f7ecdd265 Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 25 May 2021 17:42:29 +0200 Subject: [PATCH 17/48] Update NEWS --- NEWS.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/NEWS.md b/NEWS.md index 6dbf7005..60d119cf 100644 --- a/NEWS.md +++ b/NEWS.md @@ -3,6 +3,8 @@ - Fixes: + Calibration retains correlation absolute value + + Calibration fixed when cal.methodi == rpc-based, apply_to == sign, + eval.method == 'leave-one-out' and the correlation is not significant ### CSTools 4.0.0 **Submission date to CRAN: XX-12-2020** -- GitLab From 356d1204abbbf5f0d32eca539ed0606fed17c8c6 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 28 May 2021 15:34:31 +0200 Subject: [PATCH 18/48] Plot dots --- R/PlotCombinedMap.R | 25 +++++++++++++++++++++++-- man/Calibration.Rd | 2 +- man/PlotCombinedMap.Rd | 8 ++++++++ 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/R/PlotCombinedMap.R b/R/PlotCombinedMap.R index 2aee71b8..f43f63bf 100644 --- a/R/PlotCombinedMap.R +++ b/R/PlotCombinedMap.R @@ -14,6 +14,12 @@ #'@param col_unknown_map Colour to use to paint the grid cells for which a map is not possible to be chosen according to 'map_select_fun' or for those values that go beyond 'display_range'. Takes the value 'white' by default. #'@param mask Optional numeric array with dimensions (latitude, longitude), with values in the range [0, 1], indicating the opacity of the mask over each grid point. Cells with a 0 will result in no mask, whereas cells with a 1 will result in a totally opaque superimposed pixel coloured in 'col_mask'. #'@param col_mask Colour to be used for the superimposed mask (if specified in 'mask'). Takes the value 'grey' by default. +#'@param dots Array of same dimensions as 'var' or with dimensions +#' c(n, dim(var)), where n is the number of dot/symbol layers to add to the +#' plot. A value of TRUE at a grid cell will draw a dot/symbol on the +#' corresponding square of the plot. By default all layers provided in 'dots' +#' are plotted with dots, but a symbol can be specified for each of the +#' layers via the parameter 'dot_symbol'. #'@param bar_titles Optional vector of character strings providing the titles to be shown on top of each of the colour bars. #'@param legend_scale Scale factor for the size of the colour bar labels. Takes 1 by default. #'@param fileout File where to save the plot. If not specified (default) a graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, bmp and tiff @@ -61,6 +67,7 @@ PlotCombinedMap <- function(maps, lon, lat, brks = NULL, cols = NULL, col_unknown_map = 'white', mask = NULL, col_mask = 'grey', + dots = NULL, bar_titles = NULL, legend_scale = 1, fileout = NULL, width = 8, height = 5, size_units = 'in', res = 100, @@ -280,7 +287,17 @@ PlotCombinedMap <- function(maps, lon, lat, stop("Parameter 'mask' must have dimensions c(lat, lon).") } } - + # Check dots + if (!is.null(dots)) { + if (length(dim(dots)) != 2) { + stop("Parameter 'mask' must have two dimensions.") + } + if ((dim(dots)[1] != dim(maps)[lat_dim]) || + (dim(dots)[2] != dim(maps)[lon_dim])) { + stop("Parameter 'mask' must have dimensions c(lat, lon).") + } + } + #---------------------- # Identify the most likely map #---------------------- @@ -327,6 +344,10 @@ PlotCombinedMap <- function(maps, lon, lat, if (!is.null(mask)){ mask <- mask[nlat:1, ] } + if (!is.null(dots)){ + dots <- dots[nlat:1,] +print(dots) + } } #---------------------- @@ -353,7 +374,7 @@ PlotCombinedMap <- function(maps, lon, lat, tbrks <- c(-1, brks_norm + rep(1:nmap, each = length(brks))) PlotEquiMap(var = ml_map, lon = lon, lat = lat, brks = tbrks, cols = tcols, drawleg = FALSE, - filled.continents = FALSE, ...) + filled.continents = FALSE, dots = dots, ...) #---------------------- # Add overplot on top diff --git a/man/Calibration.Rd b/man/Calibration.Rd index f61a3cd3..7ac9cc2d 100644 --- a/man/Calibration.Rd +++ b/man/Calibration.Rd @@ -48,7 +48,7 @@ Calibration( an array containing the calibrated forecasts with the same dimensions as the \code{exp} array. } \description{ -Four types of member-by-member bias correction can be performed. The \code{"bias"} method corrects the bias only, the \code{"evmos"} method applies a variance inflation technique to ensure the correction of the bias and the correspondence of variance between forecast and observation (Van Schaeybroeck and Vannitsem, 2011). The ensemble calibration methods \code{"mse_min"} and \code{"crps_min"} correct the bias, the overall forecast variance and the ensemble spread as described in Doblas-Reyes et al. (2005) and Van Schaeybroeck and Vannitsem (2015), respectively. While the \code{"mse_min"} method minimizes a constrained mean-squared error using three parameters, the \code{"crps_min"} method features four parameters and minimizes the Continuous Ranked Probability Score (CRPS). The \code{"rpc-based"} method adjusts the forecast variance ensuring that the ratio of predictable components (RPC) is equal to one, as in Eade et al. (2014). +Five types of member-by-member bias correction can be performed. The \code{"bias"} method corrects the bias only, the \code{"evmos"} method applies a variance inflation technique to ensure the correction of the bias and the correspondence of variance between forecast and observation (Van Schaeybroeck and Vannitsem, 2011). The ensemble calibration methods \code{"mse_min"} and \code{"crps_min"} correct the bias, the overall forecast variance and the ensemble spread as described in Doblas-Reyes et al. (2005) and Van Schaeybroeck and Vannitsem (2015), respectively. While the \code{"mse_min"} method minimizes a constrained mean-squared error using three parameters, the \code{"crps_min"} method features four parameters and minimizes the Continuous Ranked Probability Score (CRPS). The \code{"rpc-based"} method adjusts the forecast variance ensuring that the ratio of predictable components (RPC) is equal to one, as in Eade et al. (2014). Both in-sample or our out-of-sample (leave-one-out cross validation) calibration are possible. } diff --git a/man/PlotCombinedMap.Rd b/man/PlotCombinedMap.Rd index c45d1afb..e631761e 100644 --- a/man/PlotCombinedMap.Rd +++ b/man/PlotCombinedMap.Rd @@ -16,6 +16,7 @@ PlotCombinedMap( col_unknown_map = "white", mask = NULL, col_mask = "grey", + dots = NULL, bar_titles = NULL, legend_scale = 1, fileout = NULL, @@ -49,6 +50,13 @@ PlotCombinedMap( \item{col_mask}{Colour to be used for the superimposed mask (if specified in 'mask'). Takes the value 'grey' by default.} +\item{dots}{Array of same dimensions as 'var' or with dimensions +c(n, dim(var)), where n is the number of dot/symbol layers to add to the +plot. A value of TRUE at a grid cell will draw a dot/symbol on the +corresponding square of the plot. By default all layers provided in 'dots' +are plotted with dots, but a symbol can be specified for each of the +layers via the parameter 'dot_symbol'.} + \item{bar_titles}{Optional vector of character strings providing the titles to be shown on top of each of the colour bars.} \item{legend_scale}{Scale factor for the size of the colour bar labels. Takes 1 by default.} -- GitLab From 30cde4fdc2fa85036e81378a8c8f3b684737e957 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 28 May 2021 16:17:34 +0200 Subject: [PATCH 19/48] News and remove print --- NEWS.md | 1 + R/PlotCombinedMap.R | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index ae7955dd..9a4c78e0 100644 --- a/NEWS.md +++ b/NEWS.md @@ -9,6 +9,7 @@ + Calibration retains correlation absolute value + Calibration fixed when cal.methodi == rpc-based, apply_to == sign, eval.method == 'leave-one-out' and the correlation is not significant + + PlotMostLikelyQuantileMap reoder latitudes of an array provided in 'dots' parameter. ### CSTools 4.0.0 **Submission date to CRAN: 23-02-2021** diff --git a/R/PlotCombinedMap.R b/R/PlotCombinedMap.R index f43f63bf..7169e4c4 100644 --- a/R/PlotCombinedMap.R +++ b/R/PlotCombinedMap.R @@ -346,7 +346,6 @@ PlotCombinedMap <- function(maps, lon, lat, } if (!is.null(dots)){ dots <- dots[nlat:1,] -print(dots) } } -- GitLab From 514a192a729bcd1656b78d9e2ba91313f8f01c4e Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 28 May 2021 17:42:19 +0200 Subject: [PATCH 20/48] UseCase Wind --- inst/doc/UseCase1_WindEvent_March2018.R | 159 ++++++++++++++++++++++++ 1 file changed, 159 insertions(+) create mode 100644 inst/doc/UseCase1_WindEvent_March2018.R diff --git a/inst/doc/UseCase1_WindEvent_March2018.R b/inst/doc/UseCase1_WindEvent_March2018.R new file mode 100644 index 00000000..2baa597b --- /dev/null +++ b/inst/doc/UseCase1_WindEvent_March2018.R @@ -0,0 +1,159 @@ + +library(CSTools) +library(s2dv) +library(ragg) + +exp_path <- list(name = "ECMWFS5", + path = "/esarchive/exp/ecmwf/system5c3s/$STORE_FREQ$_mean/$VAR_NAME$_f6h/$VAR_NAME$_$START_DATE$.nc") +obs_path <- list(name = "ERA5", + path = "/esarchive/recon/ecmwf/era5/$STORE_FREQ$_mean/$VAR_NAME$_f1h/$VAR_NAME$_$YEAR$$MONTH$.nc") +#source("/esarchive/scratch/nperez/git/cstools/R/CST_BiasCorrection.R") +#library(multiApply) + # Target months March (3) + # Assess forecast from 1 to 3 months in advance +months_in_advance <- c('02', '01', '12') +wind_fsct_BC <- list() +wind_ref_terciles <- NULL +wind_ref_extremes <- NULL + # Observations March 2018 + wind_obs <- CSTools::CST_Load(var = 'windagl100', obs = list(obs_path), + sdates = '20180301', nmember = 1, + leadtimemin = 1, leadtimemax = 1, + storefreq = "monthly", sampleperiod = 1, + latmin = 36, latmax = 44, lonmin = -10, lonmax = 4, +# latmin = 42, latmax = 44, lonmin = -10, lonmax = 1, + output = 'lonlat', nprocs = 1, grid = 'r360x181') + # For each month in advance: +for (mm in 1:3) { + print("Three initializations:") + print(mm) + print(paste('Initialization', months_in_advance[mm])) + # Generate the start dates of hindcast period + year <- ifelse(mm == 3, 2017, 2018) + print(paste('Hindcast period until', year - 1)) + hcst_sdates <- paste0(1993:(year - 1), months_in_advance[mm], '01') + wind_hcst <- CSTools::CST_Load(var = 'sfcWind', exp = list(exp_path), + sdates = hcst_sdates, nmember = 25, + leadtimemin = mm + 1, leadtimemax = mm + 1, + storefreq = "monthly", sampleperiod = 1, + latmin = 36, latmax = 44, lonmin = -10, lonmax = 4, + # latmin = 42, latmax = 44, lonmin = -10, lonmax = 1, + output = 'lonlat', nprocs = 1) + str(wind_hcst$Dates) + dim(wind_hcst$data) + fcst_sdates <- paste0(year, months_in_advance[mm], '01') + wind_fcst <- CSTools::CST_Load(var = 'sfcWind', exp = list(exp_path), + sdates = fcst_sdates, nmember = 25, + leadtimemin = mm + 1, leadtimemax = mm + 1, + storefreq = "monthly", sampleperiod = 1, + latmin = 36, latmax = 44, lonmin = -10, lonmax = 4, + # latmin = 42, latmax = 44, lonmin = -10, lonmax = 1, + + output = 'lonlat', nprocs = 1) + str(wind_fcst$Dates) + dim(wind_fcst$data) + + wind_ref <- CSTools::CST_Load(var = 'windagl100', obs = list(obs_path), + sdates = hcst_sdates, nmember = 1, + leadtimemin = mm + 1, leadtimemax = mm + 1, + storefreq = "monthly", sampleperiod = 1, + latmin = 36, latmax = 44, lonmin = -10, lonmax = 4, +# latmin = 42, latmax = 44, lonmin = -10, lonmax = 1, + output = 'lonlat', nprocs = 1, + grid = 'r360x181') + str(wind_ref$Dates) + dim(wind_ref$data) + wind_ref_terciles <- rbind(wind_ref_terciles, + quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.3, 0.6))) + wind_ref_extremes <- rbind(wind_ref_extremes, + quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.1, 0.9))) + #source("/esarchive/scratch/nperez/git/cstools/R/CST_BiasCorrection.R") + library(multiApply) + wind_fsct <- CST_BiasCorrection(exp = wind_hcst, + obs = wind_ref, + exp_cor = wind_fcst) + wind_fsct_BC[[mm]] <- wind_fsct + # ----------------------------------------------- + # PLOTTING: + # PlotMostlikely + thres <- drop(Apply(list(wind_ref$data), target_dims = 'sdate', fun = function(x) { + quantile(x, c(1/3, 2/3))}, output_dims = 'probs')$output) + PB <- Apply(list(wind_fsct$data, thres), target_dims = list('member', 'probs'), + fun = function(x, z) { + result <- unlist(lapply(x, function(y) { + if (y <= z[1]) { + res <- c(1, 0, 0) + } else if (y <= z[2]) { + res <- c(0, 1, 0) + } else { + res <- c(0, 0, 1) + } + return(res)})) + dim(result) <- c(bin = 3, member = 25) + return(result) + })$output1 + Mean_PB <- drop(MeanDims(PB, 'member')) + + observed_tercile <- Apply(list(wind_obs$data, thres, Mean_PB), + target_dims = list(NULL, 'probs', 'bin'), + fun = function(x, y, z) { + val <- which.max(z) + if (val == 3) { + dot <- ifelse(x >= y[2], 1, 0) + } else if (val == 2) { + dot <- ifelse(x < y[2] && x >= y[1], 1, 0) + } else if (val == 1) { + dot <- ifelse(x < y[1], 1, 0) + } else { + stop('what') + } + return(dot) + })$output1 + + filtered_obs_terciles <- Apply(list(Mean_PB, observed_tercile), + target_dims = list('bin', NULL), + fun = function(x,y) { + if (sum(duplicated(x)) == 1) { + y <- 0 + } + return(y) })$output1 + + + source("/esarchive/scratch/nperez/git/cstools/R/PlotCombinedMap.R") + source("/esarchive/scratch/nperez/git/cstools/R/PlotMostLikelyQuantileMap.R") + agg_png(paste0("/esarchive/scratch/nperez/CSTools_manuscript/Wind/MostLikely_", mm, "_obstercile.png"), + width = 1000, height = 1000, units = 'px', res = 144) + PlotMostLikelyQuantileMap(probs = Mean_PB, lon = wind_fsct$lon, lat = wind_fsct$lat, + intylat = 2, intxlon = 2, + coast_width = 1.5, legend_scale = 0.8, cat_dim = 'bin', + dots = filtered_obs_terciles[,,1,1,1,1], + toptitle = c(paste0("Initialized on ", + month.name[as.numeric(months_in_advance[mm])]))) + dev.off() +} + +visual <- data.frame(dec = as.vector(MeanDims(wind_fsct_BC[[3]]$data, c('lat', 'lon'))), + jan = as.vector(MeanDims(wind_fsct_BC[[2]]$data, c('lat', 'lon'))), + feb = as.vector(MeanDims(wind_fsct_BC[[1]]$data, c('lat', 'lon')))) + + wind_obs <- CST_Load(var = 'windagl100', obs = list(obs_path), + sdates = '20180301', nmember = 1, + leadtimemin = 1, leadtimemax = 1, + storefreq = "monthly", sampleperiod = 1, + latmin = 36, latmax = 44, lonmin = -10, lonmax = 4, +# latmin = 42, latmax = 44, lonmin = -10, lonmax = 1, + output = 'areave', nprocs = 1) + +print("IS DATA LOADED") + +print("Wait") +agg_png("/esarchive/scratch/nperez/CSTools_manuscript/Wind/PlotForecast_IP.png", + width = 1000, height = 1000, units = 'px',res = 144) +PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, + extreme.limits = wind_ref_extremes, + var.name = "Wind Speed 100 m (m/s)", title = "Bias Corrected forecasts at IP", + fcst.names = c("December", "January", "February"), obs = as.vector(wind_obs$data)) +dev.off() + +print("DONE") + -- GitLab From db45ac04691f52ae80ba1b0f42dc940ca616fc84 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 28 May 2021 17:44:02 +0200 Subject: [PATCH 21/48] Avoiding inst folder when compiling the package --- .Rbuildignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.Rbuildignore b/.Rbuildignore index b2d8e5fc..b9c9633c 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -6,3 +6,4 @@ .*^(?!data)\.RData$ .*\.gitlab-ci.yml$ #^tests$ +^inst/doc$ -- GitLab From c525f7c3afdf44c3edd753f90e7e0ce53ac7f008 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 31 May 2021 12:35:03 +0200 Subject: [PATCH 22/48] inst/doc are included in the package --- .Rbuildignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.Rbuildignore b/.Rbuildignore index b9c9633c..b2d8e5fc 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -6,4 +6,3 @@ .*^(?!data)\.RData$ .*\.gitlab-ci.yml$ #^tests$ -^inst/doc$ -- GitLab From b5693b0be59fac55bff02645ef988fb8854b37e1 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 31 May 2021 19:14:47 +0200 Subject: [PATCH 23/48] Changes to follow guidelines --- DESCRIPTION | 6 +- NAMESPACE | 6 + R/CST_ProxiesAttractor.R | 191 ++++++++++++++----- R/DynBiasCorrection.R | 41 ++-- R/{CST_Predictability.R => Predictability.R} | 70 ++++--- man/AnoMultiMetric.Rd | 3 +- man/CST_ProxiesAttractor.Rd | 52 +++++ man/DynBiasCorrection.Rd | 89 +++++++++ man/Predictability.Rd | 72 +++++++ man/ProxiesAttractor.Rd | 95 +++++++++ 10 files changed, 516 insertions(+), 109 deletions(-) rename R/{CST_Predictability.R => Predictability.R} (72%) create mode 100644 man/CST_ProxiesAttractor.Rd create mode 100644 man/DynBiasCorrection.Rd create mode 100644 man/Predictability.Rd create mode 100644 man/ProxiesAttractor.Rd diff --git a/DESCRIPTION b/DESCRIPTION index c7355509..3c22d1e5 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -21,10 +21,12 @@ Depends: Imports: s2dverification, abind, - stats + stats, + multiApply, + fields License: LGPL-3 URL: https://earth.bsc.es/gitlab/external/cstools BugReports: https://earth.bsc.es/gitlab/external/cstools/issues Encoding: UTF-8 LazyData: true -RoxygenNote: 6.0.1.9000 +RoxygenNote: 7.0.2 diff --git a/NAMESPACE b/NAMESPACE index 576f3a5d..5bb49e11 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -1,6 +1,12 @@ # Generated by roxygen2: do not edit by hand export(AnoMultiMetric) +export(CST_ProxiesAttractor) +export(DynBiasCorrection) export(MultivarRMSE) +export(Predictability) +export(ProxiesAttractor) +import(fields) +import(multiApply) import(s2dverification) import(stats) diff --git a/R/CST_ProxiesAttractor.R b/R/CST_ProxiesAttractor.R index 5ec3373e..903f05ca 100644 --- a/R/CST_ProxiesAttractor.R +++ b/R/CST_ProxiesAttractor.R @@ -1,5 +1,5 @@ #'@rdname CST_ProxiesAttractor -#'@title Computing two dinamical proxies of the attractor in s2dvcube. +#'@title Computing two dinamical proxies of the attractor in s2dv_cube. #' #'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} #'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} @@ -8,7 +8,7 @@ #' #'@description This function computes two dinamical proxies of the attractor: #'The local dimension (d) and the inverse of the persistence (theta) for an -#''s2dvcube' object. +#''s2dv_cube' object. #'These two parameters will be used as a condition for the computation of dynamical #'scores to measure predictability and to compute bias correction conditioned by #'the dynamics with the function DynBiasCorrection @@ -20,7 +20,7 @@ #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' -#'@param data data to create the attractor. Must be a matrix with the timesteps in nrow +#'@param data a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow #'and the grids in ncol(dat(time,grids) # #'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to @@ -29,66 +29,165 @@ #'@param ncores The number of cores to use in parallel computation #' #'@return dim and theta -#'@import s2dverification -#'@import fields -#'@import multiApply #' #'@examples #'# Example 1: Computing the attractor using simple s2dv data -#'expL <- rnorm(1:200) -#'dim(expL) <- c(member=10,lat = 4, lon = 5) -#'obsL <- c(rnorm(1:180),expL[1,,]*1.2) -#'dim(obsL) <- c(time = 10,lat = 4, lon = 5) -#'time_obsL <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-") -#'time_expL <- time_obsL[1] -#'lon <- seq(-1,5,1.5) -#'lat <- seq(30,35,1.5) -#'qm=0.98 # too high for this short dataset, it is possible that doesn't -#'get the requirement, in that case it would be necessary select a lower qm -#'for instance qm=0.60 -#'qm=0.60 -#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, -#' Dates = list(start = time_expL, end = time_expL)) -#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, -#' Dates = list(start = time_obsL, end = time_obsL)) -#' -#'attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) +#'attractor <- CST_ProxiesAttractor(data = lonlat_data$obs, quanti = 0.6) #' -CST_ProxiesAttractor <- function(data,quanti,ncores=NULL){ - if (is.null(data)) { - stop("Parameter 'data' is mandatory") - } +#'@export +CST_ProxiesAttractor <- function(data, quanti, ncores = NULL){ if (!inherits(data, 's2dv_cube')) { stop("Parameter 'data' must be of the class 's2dv_cube', ", "as output by CSTools::CST_Load.") } if (is.null(quanti)) { - stop("Parameter 'quanti' is mandatory") + stop("Parameter 'quanti' cannot be NULL.") } - if (any(names(dim(data$data)) %in% 'sdate')) { - if (any(names(dim(data$data)) %in% 'ftime')) { - data$data <- MergeDims(data$data, merge_dims = c('ftime', 'sdate'), + + data$data <- ProxiesAttractor(data = data$data, quanti = quanti, ncores = ncores) + + return(data) +} +#'@rdname ProxiesAttractor +#' +#'@title Computing two dinamical proxies of the attractor. +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +#' +#'@description This function computes two dinamical proxies of the attractor: +#'The local dimension (d) and the inverse of the persistence (theta). +#'These two parameters will be used as a condition for the computation of dynamical +#'scores to measure predictability and to compute bias correction conditioned by +#'the dynamics with the function DynBiasCorrection. +#'Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in: +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +#' The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +#' Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param data a multidimensional array with named dimensions to create the attractor. It requires a temporal dimension named 'time' and spatial dimensions called 'lat' and 'lon', or 'latitude' and 'longitude' or 'grid'. +#'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to +#' be provided as a list of length 2 with longitudes and latitudes. +#'@param iplot a logical indicating if to plot results. +#'@param ncores The number of cores to use in parallel computation. +#' +#'@param iplot FALSE by default. If TRUE the function returns a simple plot +#'with three pannels on the top left a plot for local dimension 'd', on the top +#'right a plot for the inverse of the persistence 'theta', on the bottom a plot +#'for the 'attractor' plotted with the two properties 'd' (x axis) and 'theta' +#'(y axis) +#' +#'@return dim and theta +#'@import multiApply +#' +#'@examples +#'# Example 1: Computing the attractor using simple data +#'# Creating an example of matrix data(time,grids): +#'mat <- array(rnorm(36 * 40), c(time = 36, grid = 40)) +#'qm <- 0.90 # imposing a threshold +#'Attractor <- ProxiesAttractor(data = mat, quanti = qm) +#' +#'@export + +ProxiesAttractor <- function(data, quanti, iplot = FALSE, ncores = NULL){ + if (is.null(data)) { + stop("Parameter 'data' cannot be NULL.") + } + if (is.null(quanti)) { + stop("Parameter 'quanti' is mandatory") + } + if (!is.logical(iplot) || is.null(iplot)) { + stop("Parameter 'iplot' is required and needs to be TRUE or FALSE.") + } + if (any(names(dim(data)) %in% 'sdate')) { + if (any(names(dim(data)) %in% 'ftime')) { + data <- MergeDims(data, merge_dims = c('ftime', 'sdate'), rename_dim = 'time') } } - if (!(any(names(dim(data$data)) %in% 'time'))){ - stop('dimension time is mandatory') + if (!(any(names(dim(data)) %in% 'time'))){ + stop("Parameter 'data' must have a temporal dimension named 'time'.") } - if (any(names(dim(data$data)) %in% 'lat')) { - if (any(names(dim(data$data)) %in% 'lon')) { - data$data <- MergeDims(data$data, merge_dims = c('lon', 'lat'), + if (any(names(dim(data)) %in% 'lat')) { + if (any(names(dim(data)) %in% 'lon')) { + data <- MergeDims(data, merge_dims = c('lon', 'lat'), rename_dim = 'grid') } } - if(!(any(names(dim(data$data)) %in% 'grid'))){ - stop('dimension grid is mandatory') - } - - attractor <-Apply(data$data,target_dims=c('time','grid'), fun=ProxiesAttractor, - quanti=quanti,iplot=FALSE, ncores=ncores) + if (any(names(dim(data)) %in% 'latitude')) { + if (any(names(dim(data)) %in% 'longitude')) { + data <- MergeDims(data, merge_dims = c('longitude', 'latitude'), + rename_dim = 'grid') + } + } + if(!(any(names(dim(data)) %in% 'grid'))){ + stop("Parameter 'data' must have a spatial dimension named 'grid'.") + } + attractor <- Apply(data, target_dims = c('time', 'grid'), + fun = .proxiesattractor, + quanti = quanti , iplot = FALSE, ncores = ncores) # rename dimensions - attractor <- lapply(attractor, FUN=function(x,dimname){names(dim(x))[dimname] <- 'time' - return(x)}, dimname = which(names(dim(attractor[[1]]))=='dim2')) + attractor <- lapply(attractor, + FUN = function(x, dimname){ + names(dim(x))[dimname] <- 'time' + return(x)}, + dimname = which(names(dim(attractor[[1]])) == 'dim2')) + return(list(dim = attractor$dim, theta = attractor$theta)) +} + +.proxiesattractor <- function(data, quanti, iplot = FALSE) { + # expected dimensions data: time and grid + logdista <- Apply(data, target_dims = 'grid', + fun = function(x, y){ + -log(colMeans((y - as.vector(x))^2))}, + y = t(data))[[1]] + + #Computation of theta + Theta <- function(logdista, quanti){ + #Compute the thheshold corresponding to the quantile + thresh <- quantile(logdista, quanti, na.rm = TRUE) + logdista[which(logdista == 'Inf')] <- NaN + Li <- which(as.vector(logdista) > as.numeric(thresh)) + #Length of each cluster + Ti <- diff(Li) + N <- length(Ti) + q <- 1 - quanti + Si <- Ti - 1 + Nc <- length(which(Si > 0)) + N <- length(Ti) + theta <- (sum(q * Si) + N + Nc - sqrt(((sum(q * Si) + N + Nc)^2) - + 8 * Nc * sum(q * Si))) / (2 * sum(q * Si)) + #Sort the exceedances + logdista <- sort(logdista) + #Find all the Peaks over Thresholds. + findidx <- which(as.vector(logdista) > as.numeric(thresh)) + if(length(findidx) < 1) { + stop("Parameter 'quanti' is too high for the length of the data provided.") + } + logextr <- logdista[findidx[[1]]:(length(logdista) - 1)] + #The inverse of the dimension is just the average of the exceedances + dim <- 1 /mean(as.numeric(logextr) - as.numeric(thresh)) + return(list(dim = dim, theta = theta)) + } + names(dim(logdista)) <- c('dim1', 'dim2') + proxies <- Apply(data = list(logdista = logdista), + target_dims = list('dim1'), fun = Theta, quanti = quanti, + ncores = ncores) + if(iplot == TRUE) { + time = c(1:length(proxies$theta)) + layout(matrix(c(1, 3, 2, 3), 2, 2)) + plot(time, proxies$dim, xlab = 'time', ylab = 'd', + main = 'local dimension', type = 'l') + plot(time, proxies$theta, xlab = 'time', ylab = 'theta', main = 'theta') + plot(proxies$dim, proxies$theta, col = 'blue', + main = "Proxies of the Attractor", + xlab = "local dimension", ylab = "theta", lwd = 8, 'p') + } - return(list(dim=attractor$dim,theta=attractor$theta)) + return(list(dim = proxies$dim, theta = proxies$theta)) } + diff --git a/R/DynBiasCorrection.R b/R/DynBiasCorrection.R index 36f7864f..ccab645f 100644 --- a/R/DynBiasCorrection.R +++ b/R/DynBiasCorrection.R @@ -48,32 +48,29 @@ #'@return theta.bias an s2dvcube object with a bias correction performed #'conditioned by the inverse of the persistence 'theta' #' -#'@import s2dverification -#'@import multiApply -#'@import CSTools #' #'@examples #'# example 1: simple data s2dvcube style -# expL <- rnorm(1:200) -# dim(expL) <- c(member=10,lat = 4, lon = 5) -# obsL <- c(rnorm(1:180),expL[1,,]*1.2) -# dim(obsL) <- c(time = 10,lat = 4, lon = 5) -# time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") -# time_expL <- time_obsL[1] -# lon <- seq(-1,5,1.5) -# lat <- seq(30,35,1.5) -# qm=0.98 -# expL <- s2dv_cube(data = expL, lat = lat, lon = lon, -# Dates = list(start = time_expL, end = time_expL)) -# obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, -# Dates = list(start = time_obsL, end = time_obsL)) -# attractor<- CST_ProxiesAttractor(data=obsL,quanti=qm) -# predynObs = CST_Predictability(dim=attractor$dim,theta=attractor$theta) -# attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) -# predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) -# dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, +#' expL <- rnorm(1:200) +#' dim(expL) <- c(member=10,lat = 4, lon = 5) +#' obsL <- c(rnorm(1:180),expL[1,,]*1.2) +#' dim(obsL) <- c(time = 10,lat = 4, lon = 5) +#' time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") +#' time_expL <- time_obsL[1] +#' lon <- seq(-1,5,1.5) +#' lat <- seq(30,35,1.5) +#' qm=0.98 +#' expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' Dates = list(start = time_expL, end = time_expL)) +#' obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' Dates = list(start = time_obsL, end = time_obsL)) +#' attractor <- CST_ProxiesAttractor(data=obsL,quanti=qm) +#' predynObs = Predictability(dim=attractor$dim,theta=attractor$theta) +#' attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) +#' predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) +#' dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, # pred.dim.mod=predynObs$pred.dim,pred.theta.mod=predynMod$pred.theta) - +#'@export DynBiasCorrection<- function(pred.dim.obs,pred.theta.obs,pred.dim.mod, pred.theta.mod,ncores=NULL){ if (is.null(pred.dim.mod)) { diff --git a/R/CST_Predictability.R b/R/Predictability.R similarity index 72% rename from R/CST_Predictability.R rename to R/Predictability.R index fffa1c64..1989ef3d 100644 --- a/R/CST_Predictability.R +++ b/R/Predictability.R @@ -1,4 +1,4 @@ -#'@rdname CST_Predictability +#'@rdname Predictability #'@title Computing scores of predictability using two dinamical proxies #'based on dynamical systems theory. #' @@ -26,68 +26,64 @@ # #'@param theta list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes #' -#'@param ncores The number of cores to use in parallel computation -#' -#'@return pred.dim a list of two lists 'qdim' and 'pos.d'. The 'qdim' list +#'@return A list of length 2: +#' \itemize{ +#' \item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list #'contains values of local dimension 'dim' divided by terciles: #'d1: lower tercile and more predictability, #'d2: middle tercile, #'d3: higher tercile and less predictability -#'The 'pos.d' list contains the position of each tercile in parameter 'dim' +#'The 'pos.d' list contains the position of each tercile in parameter 'dim'} #' -#'@return pred.theta a list of two lists 'qtheta' and 'pos.t'. +#' \item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'. #'The 'qtheta' list contains values of the inverse of persistence 'theta' #'divided by terciles: #'th1: lower tercile and more predictability, #'th2: middle tercile, #'th3: higher tercile and less predictability -#'The 'pos.t' list contains the position of each tercile in parameter 'theta' -#' +#'The 'pos.t' list contains the position of each tercile in parameter 'theta'} +#'} #'@return dyn_scores values from 0 to 1. A dyn_score of 1 indicates higher #'predictability. #' -#'@import s2dverification -#'@import multiApply -#'@import CSTools -#' #'@examples #'# Creating an example of matrix dat(time,grids): -#' m=matrix(rand(1,2000)*10,nrow=50,ncol=40) +#'m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40) #'# imposing a threshold -#' quanti=0.90 +#' quanti <- 0.90 #'# computing dyn_scores from parameters dim and theta of the attractor -#' attractor = ProxiesAttractor(dat=m,quanti=0.60,iplot=FALSE) -#' predyn = CST_Predictability(dim=attractor$dim,theta=attractor$theta) -#' -CST_Predictability <- function(dim,theta,ncores=NULL){ +#' attractor <- ProxiesAttractor(dat = m, quanti = 0.60, iplot = FALSE) +#' predyn <- Predictability(dim = attractor$dim, theta = attractor$theta) +#'@export +Predictability <- function(dim, theta) { if (is.null(dim)) { - stop("Parameter 'dim' is mandatory") + stop("Parameter 'dim' cannot be NULL.") } if (is.null(theta)) { - stop("Parameter 'theta' is mandatory") + stop("Parameter 'theta' cannot be NULL.") } - if (length(dim)!=length(theta)) { + if (length(dim) != length(theta)) { stop("Parameters 'dim' and 'theta' must have the same length") } pos <- c(1:length(dim)) # dim - qd1 <- quantile(dim,0.33,na.rm=TRUE) - qd3 <- quantile(dim,0.66,na.rm=TRUE) - d3 <- which(dim>=qd3) - d1 <- which(dim<=qd1) - d2 <- which(dim>qd1 & dim= qd3) + d1 <- which(dim <= qd1) + d2 <- which(dim > qd1 & dim < qd3) + qdim <- list(d1 = dim[d1], d2 = dim[d2], d3 = dim[d3]) pos.d <- list(d1=d1,d2=d2,d3=d3) #theta - qt1 <- quantile(theta,0.33,na.rm=TRUE) - qt3 <- quantile(theta,0.66,na.rm=TRUE) - th3 <- which(theta>=qt3) - th1 <- which(theta<=qt1) - th2 <- which(theta>qt1 & theta= qt3) + th1 <- which(theta <= qt1) + th2 <- which(theta > qt1 & theta < qt3) + qtheta <- list(th1 = theta[th1], th2 = theta[th2], th3 = theta[th3]) + pos.t <- list(th1 = th1, th2 = th2, th3 = th3) #scores position d1th1 <- pos.d$d1[which(pos.d$d1 %in% pos.t$th1)] @@ -111,9 +107,9 @@ CST_Predictability <- function(dim,theta,ncores=NULL){ dyn_scores[which(pos %in% d3th2)]<- 2/9 dyn_scores[which(pos %in% d3th3)]<- 1/9 - return(list(pred.dim=list(qdim=qdim,pos.d=pos.d), - pred.theta=list(qtheta=qtheta,pos.t=pos.t), - dyn_scores=dyn_scores)) +return(list(pred.dimi =list(qdim = qdim, pos.d = pos.d), + pred.theta = list(qtheta = qtheta, pos.t = pos.t), + dyn_scores = dyn_scores)) } diff --git a/man/AnoMultiMetric.Rd b/man/AnoMultiMetric.Rd index 0071a721..28869b2c 100644 --- a/man/AnoMultiMetric.Rd +++ b/man/AnoMultiMetric.Rd @@ -4,8 +4,7 @@ \alias{AnoMultiMetric} \title{Multiple Metrics applied in Multiple Model Anomalies} \usage{ -AnoMultiMetric(data, metric = "correlation", multimodel = TRUE, - names = NULL) +AnoMultiMetric(data, metric = "correlation", multimodel = TRUE, names = NULL) } \arguments{ \item{data}{an s2dverification object (list) giving as output of \code{Load} function from S2dverification package.} diff --git a/man/CST_ProxiesAttractor.Rd b/man/CST_ProxiesAttractor.Rd new file mode 100644 index 00000000..9757ff10 --- /dev/null +++ b/man/CST_ProxiesAttractor.Rd @@ -0,0 +1,52 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CST_ProxiesAttractor.R +\name{CST_ProxiesAttractor} +\alias{CST_ProxiesAttractor} +\title{Computing two dinamical proxies of the attractor in s2dv_cube.} +\usage{ +CST_ProxiesAttractor(data, quanti, ncores = NULL) +} +\arguments{ +\item{data}{a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow +and the grids in ncol(dat(time,grids)} + +\item{quanti}{list of arbitrary length of secondary grids. Each secondary grid is to +be provided as a list of length 2 with longitudes and latitudes} + +\item{ncores}{The number of cores to use in parallel computation} +} +\value{ +dim and theta +} +\description{ +This function computes two dinamical proxies of the attractor: +The local dimension (d) and the inverse of the persistence (theta) for an +'s2dv_cube' object. +These two parameters will be used as a condition for the computation of dynamical +scores to measure predictability and to compute bias correction conditioned by +the dynamics with the function DynBiasCorrection +Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in +} +\examples{ +# Example 1: Computing the attractor using simple s2dv data +attractor <- CST_ProxiesAttractor(data = lonlat_data$obs, quanti = 0.6) + +} +\references{ +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. +} +\author{ +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +} diff --git a/man/DynBiasCorrection.Rd b/man/DynBiasCorrection.Rd new file mode 100644 index 00000000..7acfd1f0 --- /dev/null +++ b/man/DynBiasCorrection.Rd @@ -0,0 +1,89 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/DynBiasCorrection.R +\name{DynBiasCorrection} +\alias{DynBiasCorrection} +\title{Performing a Bias Correction conditioned by the dynamical +properties of the data.} +\usage{ +DynBiasCorrection( + pred.dim.obs, + pred.theta.obs, + pred.dim.mod, + pred.theta.mod, + ncores = NULL +) +} +\arguments{ +\item{pred.dim.obs}{same than pred.dim.obs but for the observations} + +\item{pred.theta.obs}{same than pred.theta.obs but for the observations} + +\item{pred.dim.mod}{output of CST_Predictability or a list of two lists 'qdim' and 'pos.d'. The 'qdim' list +contains values of local dimension 'dim' divided by terciles: +d1: lower tercile and more predictability, +d2: middle tercile, +d3: higher tercile and less predictability +The 'pos.d' list contains the position of each tercile in parameter 'dim'} + +\item{pred.theta.mod}{output of CST_Predictability or a list of two lists 'qtheta' and 'pos.t'. +The 'qtheta' list contains values of the inverse of persistence 'theta' +divided by terciles: +th1: lower tercile and more predictability, +th2: middle tercile, +th3: higher tercile and less predictability +The 'pos.t' list contains the position of each tercile in parameter 'theta'} + +\item{ncores}{The number of cores to use in parallel computation} +} +\value{ +dim.bias an s2dvcube object with a bias correction performed +conditioned by local dimension 'dim' + +theta.bias an s2dvcube object with a bias correction performed +conditioned by the inverse of the persistence 'theta' +} +\description{ +This function perform a bias correction conditioned by the +dynamical properties of the dataset. This function used the functions +'CST_Predictability' to divide in terciles the two dynamical proxies +computed with CST_ProxiesAttractor or ProxiesAttractor. A bias correction +between the model and the observations is performed using the division into +terciles of the local dimension 'dim' and inverse of the persistence 'theta'. +For instance, model values with lower 'dim' will be corrected with observed +values with lower 'dim', and the same for theta. The function gives two options +of bias correction: one for 'dim' and one for 'theta' +} +\examples{ +# example 1: simple data s2dvcube style +expL <- rnorm(1:200) +dim(expL) <- c(member=10,lat = 4, lon = 5) +obsL <- c(rnorm(1:180),expL[1,,]*1.2) +dim(obsL) <- c(time = 10,lat = 4, lon = 5) +time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") +time_expL <- time_obsL[1] +lon <- seq(-1,5,1.5) +lat <- seq(30,35,1.5) +qm=0.98 +expL <- s2dv_cube(data = expL, lat = lat, lon = lon, + Dates = list(start = time_expL, end = time_expL)) +obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, + Dates = list(start = time_obsL, end = time_obsL)) +attractor <- CST_ProxiesAttractor(data=obsL,quanti=qm) +predynObs = Predictability(dim=attractor$dim,theta=attractor$theta) +attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) +predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) +dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, +} +\references{ +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +scale atmospheric predictability.Nature Communications, 10(1), 1316. +DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. +} +\author{ +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +} diff --git a/man/Predictability.Rd b/man/Predictability.Rd new file mode 100644 index 00000000..2093a196 --- /dev/null +++ b/man/Predictability.Rd @@ -0,0 +1,72 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/Predictability.R +\name{Predictability} +\alias{Predictability} +\title{Computing scores of predictability using two dinamical proxies +based on dynamical systems theory.} +\usage{ +Predictability(dim, theta) +} +\arguments{ +\item{dim}{data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol +for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints} + +\item{theta}{list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes} +} +\value{ +A list of length 2: +\itemize{ +\item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list +contains values of local dimension 'dim' divided by terciles: +d1: lower tercile and more predictability, +d2: middle tercile, +d3: higher tercile and less predictability +The 'pos.d' list contains the position of each tercile in parameter 'dim'} + +\item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'. +The 'qtheta' list contains values of the inverse of persistence 'theta' +divided by terciles: +th1: lower tercile and more predictability, +th2: middle tercile, +th3: higher tercile and less predictability +The 'pos.t' list contains the position of each tercile in parameter 'theta'} +} + +dyn_scores values from 0 to 1. A dyn_score of 1 indicates higher +predictability. +} +\description{ +This function divides in terciles the two dynamical proxies +computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will +be used to measure the predictability of the system in dyn_scores. When the +local dimension 'dim' is small and the inverse of persistence 'theta' is +small the predictability is higher, and viceversa. +} +\examples{ +# Creating an example of matrix dat(time,grids): +m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40) +# imposing a threshold + quanti <- 0.90 +# computing dyn_scores from parameters dim and theta of the attractor +attractor <- ProxiesAttractor(dat = m, quanti = 0.60, iplot = FALSE) +predyn <- Predictability(dim = attractor$dim, theta = attractor$theta) +} +\references{ +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +scale atmospheric predictability.Nature Communications, 10(1), 1316. +DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. +} +\author{ +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +} diff --git a/man/ProxiesAttractor.Rd b/man/ProxiesAttractor.Rd new file mode 100644 index 00000000..497b9728 --- /dev/null +++ b/man/ProxiesAttractor.Rd @@ -0,0 +1,95 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CST_ProxiesAttractor.R, R/ProxiesAttractor.R +\name{ProxiesAttractor} +\alias{ProxiesAttractor} +\title{Computing two dinamical proxies of the attractor.} +\usage{ +ProxiesAttractor(data, quanti, iplot = FALSE, ncores = NULL) + +ProxiesAttractor(data, quanti, iplot = FALSE, ncores = NULL) +} +\arguments{ +\item{data}{data to create the attractor. Must be a matrix with the timesteps in nrow +and the grids in ncol(dat(time,grids)} + +\item{quanti}{list of arbitrary length of secondary grids. Each secondary grid is to +be provided as a list of length 2 with longitudes and latitudes} + +\item{iplot}{FALSE by default. If TRUE the function returns a simple plot +with three pannels on the top left a plot for local dimension 'd', on the top +right a plot for the inverse of the persistence 'theta', on the bottom a plot +for the 'attractor' plotted with the two properties 'd' (x axis) and 'theta' +(y axis)} + +\item{ncores}{The number of cores to use in parallel computation} +} +\value{ +dim and theta + +dim and theta +} +\description{ +This function computes two dinamical proxies of the attractor: +The local dimension (d) and the inverse of the persistence (theta). +These two parameters will be used as a condition for the computation of dynamical +scores to measure predictability and to compute bias correction conditioned by +the dynamics with the function DynBiasCorrection. +Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in: + +This function computes two dinamical proxies of the attractor: +The local dimension (d) and the inverse of the persistence (theta). +These two parameters will be used as a condition for the computation of dynamical +scores to measure predictability and to compute bias correction conditioned by +the dynamics with the function DynBiasCorrection. +Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in: +} +\examples{ +# Example 1: Computing the attractor using simple data +# Creating an example of matrix data(time,grids): +mat <- array(rnorm(36 * 40), c(time = 36, grid = 40)) +qm <- 0.90 # imposing a threshold +Attractor <- ProxiesAttractor(data = mat, quanti = qm) + +# Example 1: Computing the attractor using simple data +# Creating an example of matrix data(time,grids): +tm=2*6*3 # time +gm=4*10 # grids +m=matrix(rand(1,tm*gm),nrow=tm,ncol=gm) +qm=0.90 # imposing a threshold +Attractor=ProxiesAttractor(data=m,quanti=qm,iplot=FALSE) + +} +\references{ +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. + +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., and Yiou, P. (2019). +The hammam effect or how a warm ocean enhances large scale atmospheric predictability. +Nature Communications, 10(1), 1316. DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. +} +\author{ +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} + +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +} -- GitLab From c9260a6b2a35a61e27466ddd78af3bc8e8c74ef7 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Tue, 1 Jun 2021 13:14:16 +0000 Subject: [PATCH 24/48] Update DynBiasCorrection.R --- R/DynBiasCorrection.R | 184 +++++++++++++++++++++++++++++------------- 1 file changed, 127 insertions(+), 57 deletions(-) diff --git a/R/DynBiasCorrection.R b/R/DynBiasCorrection.R index f9bbd06c..8eda658c 100644 --- a/R/DynBiasCorrection.R +++ b/R/DynBiasCorrection.R @@ -3,16 +3,19 @@ #'properties of the data. #' #'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} #' #'@description This function perform a bias correction conditioned by the #'dynamical properties of the dataset. This function used the functions #''CST_Predictability' to divide in terciles the two dynamical proxies -#'computed with CST_ProxiesAttractor or ProxiesAttractor. A bias correction +#'computed with 'CST_ProxiesAttractor'. A bias correction #'between the model and the observations is performed using the division into #'terciles of the local dimension 'dim' and inverse of the persistence 'theta'. #'For instance, model values with lower 'dim' will be corrected with observed #'values with lower 'dim', and the same for theta. The function gives two options -#'of bias correction: one for 'dim' and one for 'theta' +#'of bias correction: one for 'dim' and/or one for 'theta' #' #'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., #'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large @@ -22,69 +25,136 @@ #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' -#'@param pred.dim.mod output of CST_Predictability or a list of two lists 'qdim' and 'pos.d'. The 'qdim' list -#'contains values of local dimension 'dim' divided by terciles: -#'d1: lower tercile and more predictability, -#'d2: middle tercile, -#'d3: higher tercile and less predictability -#'The 'pos.d' list contains the position of each tercile in parameter 'dim' -#'@param pred.theta.mod output of CST_Predictability or a list of two lists 'qtheta' and 'pos.t'. -#'The 'qtheta' list contains values of the inverse of persistence 'theta' -#'divided by terciles: -#'th1: lower tercile and more predictability, -#'th2: middle tercile, -#'th3: higher tercile and less predictability -#'The 'pos.t' list contains the position of each tercile in parameter 'theta' -#' -#'@param pred.dim.obs same than pred.dim.obs but for the observations -#' -#'@param pred.theta.obs same than pred.theta.obs but for the observations -#' +#'@param obsL an s2vcube object with the observation data +#'@param expL an s2dvcube object with the experiment data +#'@param method the method to apply bias correction among these ones: +#'"PTF","RQUANT","QUANT","SSPLIN" +#'@param proxy the proxy (local dimension 'dim' or inverse of persistence +#''theta') to apply the dynamical conditioned bias correction method. +#'@param quanti quantile to perform the computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation #' -#'@return dim.bias an s2dvcube object with a bias correction performed -#'conditioned by local dimension 'dim' -#' -#'@return theta.bias an s2dvcube object with a bias correction performed -#'conditioned by the inverse of the persistence 'theta' +#'@return dynbias an s2dvcube object with a bias correction performed +#'conditioned by local dimension 'dim' or inverse of persistence 'theta' #' +#'@import s2dverification +#'@import multiApply +#'@import qmap +#'@import CSTools #' #'@examples #'# example 1: simple data s2dvcube style -#' expL <- rnorm(1:200) -#' dim(expL) <- c(member=10,lat = 4, lon = 5) -#' obsL <- c(rnorm(1:180),expL[1,,]*1.2) -#' dim(obsL) <- c(time = 10,lat = 4, lon = 5) -#' time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") -#' time_expL <- time_obsL[1] -#' lon <- seq(-1,5,1.5) -#' lat <- seq(30,35,1.5) -#' qm=0.60 -#' expL <- s2dv_cube(data = expL, lat = lat, lon = lon, -#' Dates = list(start = time_expL, end = time_expL)) -#' obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, -#' Dates = list(start = time_obsL[1:10], end = time_obsL[1:10])) -#' attractor <- CST_ProxiesAttractor(data=obsL,quanti=qm) -#' predynObs = Predictability(dim=attractor$dim,theta=attractor$theta) -#' attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) -#' predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) -#' dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, -# pred.dim.mod=predynObs$pred.dim,pred.theta.mod=predynMod$pred.theta) -#'@export -DynBiasCorrection<- function(pred.dim.obs,pred.theta.obs,pred.dim.mod, - pred.theta.mod,ncores=NULL){ - if (is.null(pred.dim.mod)) { - stop("Parameter 'pred.dim.mod' is mandatory") +#'expL <- rnorm(1:2000) +#'dim (expL) <- c(time =100,lat = 4, lon = 5) +#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +#'dim (obsL) <- c(time = 100,lat = 4, lon = 5) +#'time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") +#'time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") +#'lon <- seq(-1,5,1.5) +#'lat <- seq(30,35,1.5) +#'# qm=0.98 # too high for this short dataset, it is possible that doesn't +#'# get the requirement, in that case it would be necessary select a lower qm +#'# for instance qm=0.60 +#'qm=0.60 +#'method="QUANT" +#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' Dates = list(start = time_expL, end = time_expL)) +#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' Dates = list(start = time_obsL, end = time_obsL)) +#' +#'dynbias<- DynBiasCorrection(obsL = obsL, expL = expL, method = method, +#' proxy= "dim", quanti=qm, ncores=NULL) +#' + +DynBiasCorrection<- function(obsL = obsL, expL = expL, + method = method, + proxy= "dim", quanti=qm, ncores=NULL){ + if (is.null(obsL)) { + stop("Parameter 'obsL' is mandatory") } - if (is.null(pred.dim.obs)) { - stop("Parameter 'pred.dim.obs' is mandatory") + if (is.null(expL)) { + stop("Parameter 'expL' is mandatory") } - if (is.null(pred.theta.mod)) { - stop("Parameter 'pred.dim.mod' is mandatory") + if (is.null(method)) { + stop("Parameter 'method' is mandatory") } - if (is.null(pred.theta.obs)) { - stop("Parameter 'pred.dim.obs' is mandatory") + if (is.null(quanti)) { + stop("Parameter 'quanti' is mandatory") + } + if (is.null(proxy)) { + stop("Parameter 'proxy' is mandatory") + } + if (!inherits(obsL, 's2dv_cube')) { + stop("Parameter 'obsL' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + if (!inherits(expL, 's2dv_cube')) { + stop("Parameter 'expL' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + + attractor.obs <- CST_ProxiesAttractor(data=obsL,quanti=qm) + predyn.obs <- CST_Predictability(dim=attractor.obs$dim, + theta=attractor.obs$theta) + attractor.exp <- CST_ProxiesAttractor(data=expL,quanti=qm) + predyn.exp <- CST_Predictability(dim=attractor.exp$dim, + theta=attractor.exp$theta) + + fun1 <- function(data,list1,tar_dim){ + lapply(list1, function(x){ + Subset(data,indices=x,along=tar_dim)})} + + if(proxy=="dim"){ + obs= fun1(data=obsL$data,list1=predyn.obs$pred.dim$pos.d,tar_dim='time') + exp= fun1(data=expL$data,list1=predyn.exp$pred.dim$pos.d,tar_dim='time') + } + if(proxy=="theta"){ + obs= fun1(data=obsL$data,list1=predyn.obs$pred.theta$pos.t,tar_dim='time') + exp= fun1(data=expL$data,list1=predyn.exp$pred.theta$pos.t,tar_dim='time') + } + + biascorrection <- function(obs,exp,method){ + ## functions fitQmap and doQmap + if(method=="PTF"){ + qm.fit <- fitQmap(obs,exp, + method="PTF", + transfun="expasympt", + cost="RSS",wett.day=TRUE) + qmap <- doQmap(exp,qm.fit)} + if(method=="QUANT"){ + qm.fit <- fitQmap(obs,exp, + method="QUANT",qstep=0.01) + qmap <- doQmap(exp,qm.fit,type="tricub")} + if(method=="RQUANT"){ + qm.fit <- fitQmap(obs,exp, + method="RQUANT",qstep=0.01) + qmap <- doQmap(exp,qm.fit,type="linear")} + if(method=="SSPLIN"){ + qm.fit <- fitQmap(obs,exp,qstep=0.01, + method="SSPLIN") + qmap <- doQmap(exp,qm.fit)} + + return(qmap) + } + ## functions bias correction CSTools + # ? + latlonApply <- function(obs,exp,method){ + Apply(list(exp=exp,obs=obs),target_dims='time', + fun=biascorrection,method=method)} + + qmap <- mapply(latlonApply,obs,exp,method) + expAdjust = array(dim=dim(expL$data)) + + if(proxy=="dim"){ + expAdjust[predyn.exp$pred.dim$pos.d$d1,,]=qmap$d1.output1 + expAdjust[predyn.exp$pred.dim$pos.d$d2,,]=qmap$d2.output1 + expAdjust[predyn.exp$pred.dim$pos.d$d3,,]=qmap$d3.output1 + } + if(proxy=="theta"){ + expAdjust[predyn.exp$pred.theta$pos.t$th1,,]=qmap$th1.output1 + expAdjust[predyn.exp$pred.theta$pos.t$th2,,]=qmap$th2.output1 + expAdjust[predyn.exp$pred.theta$pos.t$th3,,]=qmap$th3.output1 } - dim.bias <- theta.bis <- NULL - return(list(dim.bias=dim.bias,theta.bias=theta.bias)) + + return(list(expAdjust=expAdjust)) } -- GitLab From 3c7e14914f343b2a100758689e2dbd1750a47bf2 Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 2 Jun 2021 11:42:02 +0200 Subject: [PATCH 25/48] DynBiasCorrection using atomic function --- NAMESPACE | 1 + R/CST_DynBiasCorrection.R | 202 +++++++++++++++++++++++++++++++++++ R/DynBiasCorrection.R | 160 --------------------------- R/Predictability.R | 1 + man/CST_DynBiasCorrection.Rd | 88 +++++++++++++++ man/DynBiasCorrection.Rd | 77 ++++++------- man/Predictability.Rd | 1 + 7 files changed, 324 insertions(+), 206 deletions(-) create mode 100644 R/CST_DynBiasCorrection.R delete mode 100644 R/DynBiasCorrection.R create mode 100644 man/CST_DynBiasCorrection.Rd diff --git a/NAMESPACE b/NAMESPACE index f2da859a..26042662 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -12,6 +12,7 @@ export(CST_BEI_Weighting) export(CST_BiasCorrection) export(CST_Calibration) export(CST_CategoricalEnsCombination) +export(CST_DynBiasCorrection) export(CST_EnsClustering) export(CST_Load) export(CST_MergeDims) diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R new file mode 100644 index 00000000..7149e49f --- /dev/null +++ b/R/CST_DynBiasCorrection.R @@ -0,0 +1,202 @@ +#'@rdname CST_DynBiasCorrection +#'@title Performing a Bias Correction conditioned by the dynamical +#'properties of the data. +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +#' +#'@description This function perform a bias correction conditioned by the +#'dynamical properties of the dataset. This function internally uses the functions +#''Predictability' to divide in terciles the two dynamical proxies +#'computed with 'CST_ProxiesAttractor'. A bias correction +#'between the model and the observations is performed using the division into +#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'. +#'For instance, model values with lower 'dim' will be corrected with observed +#'values with lower 'dim', and the same for theta. The function gives two options +#'of bias correction: one for 'dim' and/or one for 'theta' +#' +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +#'scale atmospheric predictability.Nature Communications, 10(1), 1316. +#'DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param exp an s2v_cube object with the experiment data +#'@param obs an s2dv_cube object with the reference data +#'@param method a character string indicating the method to apply bias correction among these ones: +#'"PTF","RQUANT","QUANT","SSPLIN" +#'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. +#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta +#'@param time_dim a character string indicating the name of the temporal dimension +#'@param ncores The number of cores to use in parallel computation +#' +#'@return dynbias an s2dvcube object with a bias correction performed +#'conditioned by local dimension 'dim' or inverse of persistence 'theta' +#' +#'@examples +#'# example 1: simple data s2dvcube style +#'set.seed(1) +#'expL <- rnorm(1:2000) +#'dim (expL) <- c(time =100,lat = 4, lon = 5) +#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +#'dim (obsL) <- c(time = 100,lat = 4, lon = 5) +#'time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") +#'time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") +#'lon <- seq(-1,5,1.5) +#'lat <- seq(30,35,1.5) +# qm=0.98 # too high for this short dataset, it is possible that doesn't +#'# get the requirement, in that case it would be necessary select a lower qm +#'# for instance qm=0.60 +#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' Dates = list(start = time_expL, end = time_expL)) +#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' Dates = list(start = time_obsL, end = time_obsL)) +#'dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", +#' quanti = 0.6, time_dim = 'time') +#' +#'@export +CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', + proxy = "dim", quanti, time_dim = 'ftime', + ncores = NULL) { + if (!inherits(obs, 's2dv_cube')) { + stop("Parameter 'obs' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + if (!inherits(exp, 's2dv_cube')) { + stop("Parameter 'exp' must be of the class 's2dv_cube', ", + "as output by CSTools::CST_Load.") + } + exp$data <- DynBiasCorrection(exp = exp$data, obs = obs$data, method = method, + proxy = proxy, quanti = quanti, + time_dim = time_dim, ncores = ncores) + return(exp) +} +#'@rdname DynBiasCorrection +#'@title Performing a Bias Correction conditioned by the dynamical +#'properties of the data. +#' +#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} +#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} +#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} +#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +#' +#'@description This function perform a bias correction conditioned by the +#'dynamical properties of the dataset. This function used the functions +#''CST_Predictability' to divide in terciles the two dynamical proxies +#'computed with 'CST_ProxiesAttractor'. A bias correction +#'between the model and the observations is performed using the division into +#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'. +#'For instance, model values with lower 'dim' will be corrected with observed +#'values with lower 'dim', and the same for theta. The function gives two options +#'of bias correction: one for 'dim' and/or one for 'theta' +#' +#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +#'scale atmospheric predictability.Nature Communications, 10(1), 1316. +#'DOI = https://doi.org/10.1038/s41467-019-09305-8 " +#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +#' Dynamical proxies of North Atlantic predictability and extremes. +#' Scientific Reports, 7-41278, 2017. +#' +#'@param exp a multidimensional array with named dimensions with the experiment data +#'@param obs a multidimensional array with named dimensions with the observation data +#'@param method a character string indicating the method to apply bias correction among these ones: +#'"PTF","RQUANT","QUANT","SSPLIN" +#'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. +#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta +#'@param time_dim a character string indicating the name of the temporal dimension +#'@param ncores The number of cores to use in parallel computation +#' +#'@return a multidimensional array with named dimensions with a bias correction performed conditioned by local dimension 'dim' or inverse of persistence 'theta' +#' +#'@import multiApply +#'@importFrom s2dverification Subset +#'@import qmap +#'@examples +#'expL <- rnorm(1:2000) +#'dim (expL) <- c(time =100,lat = 4, lon = 5) +#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +#'dim (obsL) <- c(time = 100,lat = 4, lon = 5) +#'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, +#' proxy= "dim", quanti = 0.6) +#'@export +DynBiasCorrection<- function(exp, obs, method = 'QUANT', + proxy = "dim", quanti, + time_dim = 'time', ncores = NULL){ + if (is.null(obs)) { + stop("Parameter 'obs' cannot be NULL.") + } + if (is.null(exp)) { + stop("Parameter 'exp' cannot be NULL.") + } + if (is.null(method)) { + stop("Parameter 'method' cannot be NULL.") + } + if (is.null(quanti)) { + stop("Parameter 'quanti' cannot be NULL.") + } + if (is.null(proxy)) { + stop("Parameter 'proxy' cannot be NULL.") + } + dims <- dim(exp) + + attractor.obs <- ProxiesAttractor(data = obs, quanti = quanti) + predyn.obs <- Predictability(dim = attractor.obs$dim, + theta = attractor.obs$theta) + attractor.exp <- ProxiesAttractor(data = exp, quanti = quanti) + predyn.exp <- Predictability(dim = attractor.exp$dim, + theta = attractor.exp$theta) + + if (proxy == "dim") { + adjusted <- Apply(list(exp, obs), target_dims = time_dim, + fun = .dynbias, method, + predyn.exp = predyn.exp$pred.dim$pos.d, + predyn.obs = predyn.obs$pred.dim$pos.d, + ncores = ncores, output_dims = time_dim)$output1 + } else if (proxy == "theta") { + adjusted <- Apply(list(exp, obs), target_dims = time_dim, + fun = .dynbias, method, + predyn.exp = predyn.exp$pred.theta$pos.t, + predyn.obs = predyn.obs$pred.theta$pos.t, + ncores = ncores, output_dims = time_dim)$output1 + } else { + stop ("Parameter 'proxy' must be set as 'dim' or 'theta'.") + } + return(adjusted) +} + +.dynbias <- function(exp, obs, method, predyn.exp, predyn.obs) { + result <- array(rep(NA, length(exp))) + res <- lapply(1:3, function(x) { + exp_sub <- exp[predyn.exp[[x]]] + obs_sub <- obs[predyn.obs[[x]]] + adjust <- .qbiascorrection(exp_sub, obs_sub, method) + result[predyn.exp[[x]]] <<- adjust + return(NULL) + }) + return(result) +} +.qbiascorrection <- function(expX, obsX, method) { + ## functions fitQmap and doQmap + if (method == "PTF") { + qm.fit <- fitQmap(obsX, expX, method = "PTF", transfun = "expasympt", + cost = "RSS", wett.day = TRUE) + qmap <- doQmap(expX, qm.fit) + } else if (method == "QUANT") { + qm.fit <- fitQmap(obsX, expX, method = "QUANT", qstep = 0.01) + qmap <- doQmap(expX, qm.fit, type = "tricub") + } else if (method == "RQUANT") { + qm.fit <- fitQmap(obsX, expX, method = "RQUANT", qstep = 0.01) + qmap <- doQmap(expX, qm.fit, type = "linear") + } else if (method == "SSPLIN") { + qm.fit <- fitQmap(obsX, expX, qstep = 0.01, method = "SSPLIN") + qmap <- doQmap(expX, qm.fit) + } else { + stop ("Parameter 'method' doesn't match any of the available methods.") + } + return(qmap) +} diff --git a/R/DynBiasCorrection.R b/R/DynBiasCorrection.R deleted file mode 100644 index 8eda658c..00000000 --- a/R/DynBiasCorrection.R +++ /dev/null @@ -1,160 +0,0 @@ -#'@rdname DynBiasCorrection -#'@title Performing a Bias Correction conditioned by the dynamical -#'properties of the data. -#' -#'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} -#'@author Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} -#'@author Veronica Torralba, \email{veronica.torralba@cmcc.it} -#'@author Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} -#' -#'@description This function perform a bias correction conditioned by the -#'dynamical properties of the dataset. This function used the functions -#''CST_Predictability' to divide in terciles the two dynamical proxies -#'computed with 'CST_ProxiesAttractor'. A bias correction -#'between the model and the observations is performed using the division into -#'terciles of the local dimension 'dim' and inverse of the persistence 'theta'. -#'For instance, model values with lower 'dim' will be corrected with observed -#'values with lower 'dim', and the same for theta. The function gives two options -#'of bias correction: one for 'dim' and/or one for 'theta' -#' -#'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., -#'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large -#'scale atmospheric predictability.Nature Communications, 10(1), 1316. -#'DOI = https://doi.org/10.1038/s41467-019-09305-8 " -#'@references Faranda, D., Gabriele Messori and Pascal Yiou. (2017). -#' Dynamical proxies of North Atlantic predictability and extremes. -#' Scientific Reports, 7-41278, 2017. -#' -#'@param obsL an s2vcube object with the observation data -#'@param expL an s2dvcube object with the experiment data -#'@param method the method to apply bias correction among these ones: -#'"PTF","RQUANT","QUANT","SSPLIN" -#'@param proxy the proxy (local dimension 'dim' or inverse of persistence -#''theta') to apply the dynamical conditioned bias correction method. -#'@param quanti quantile to perform the computation of local dimension and theta -#'@param ncores The number of cores to use in parallel computation -#' -#'@return dynbias an s2dvcube object with a bias correction performed -#'conditioned by local dimension 'dim' or inverse of persistence 'theta' -#' -#'@import s2dverification -#'@import multiApply -#'@import qmap -#'@import CSTools -#' -#'@examples -#'# example 1: simple data s2dvcube style -#'expL <- rnorm(1:2000) -#'dim (expL) <- c(time =100,lat = 4, lon = 5) -#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) -#'dim (obsL) <- c(time = 100,lat = 4, lon = 5) -#'time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") -#'time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") -#'lon <- seq(-1,5,1.5) -#'lat <- seq(30,35,1.5) -#'# qm=0.98 # too high for this short dataset, it is possible that doesn't -#'# get the requirement, in that case it would be necessary select a lower qm -#'# for instance qm=0.60 -#'qm=0.60 -#'method="QUANT" -#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, -#' Dates = list(start = time_expL, end = time_expL)) -#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, -#' Dates = list(start = time_obsL, end = time_obsL)) -#' -#'dynbias<- DynBiasCorrection(obsL = obsL, expL = expL, method = method, -#' proxy= "dim", quanti=qm, ncores=NULL) -#' - -DynBiasCorrection<- function(obsL = obsL, expL = expL, - method = method, - proxy= "dim", quanti=qm, ncores=NULL){ - if (is.null(obsL)) { - stop("Parameter 'obsL' is mandatory") - } - if (is.null(expL)) { - stop("Parameter 'expL' is mandatory") - } - if (is.null(method)) { - stop("Parameter 'method' is mandatory") - } - if (is.null(quanti)) { - stop("Parameter 'quanti' is mandatory") - } - if (is.null(proxy)) { - stop("Parameter 'proxy' is mandatory") - } - if (!inherits(obsL, 's2dv_cube')) { - stop("Parameter 'obsL' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") - } - if (!inherits(expL, 's2dv_cube')) { - stop("Parameter 'expL' must be of the class 's2dv_cube', ", - "as output by CSTools::CST_Load.") - } - - attractor.obs <- CST_ProxiesAttractor(data=obsL,quanti=qm) - predyn.obs <- CST_Predictability(dim=attractor.obs$dim, - theta=attractor.obs$theta) - attractor.exp <- CST_ProxiesAttractor(data=expL,quanti=qm) - predyn.exp <- CST_Predictability(dim=attractor.exp$dim, - theta=attractor.exp$theta) - - fun1 <- function(data,list1,tar_dim){ - lapply(list1, function(x){ - Subset(data,indices=x,along=tar_dim)})} - - if(proxy=="dim"){ - obs= fun1(data=obsL$data,list1=predyn.obs$pred.dim$pos.d,tar_dim='time') - exp= fun1(data=expL$data,list1=predyn.exp$pred.dim$pos.d,tar_dim='time') - } - if(proxy=="theta"){ - obs= fun1(data=obsL$data,list1=predyn.obs$pred.theta$pos.t,tar_dim='time') - exp= fun1(data=expL$data,list1=predyn.exp$pred.theta$pos.t,tar_dim='time') - } - - biascorrection <- function(obs,exp,method){ - ## functions fitQmap and doQmap - if(method=="PTF"){ - qm.fit <- fitQmap(obs,exp, - method="PTF", - transfun="expasympt", - cost="RSS",wett.day=TRUE) - qmap <- doQmap(exp,qm.fit)} - if(method=="QUANT"){ - qm.fit <- fitQmap(obs,exp, - method="QUANT",qstep=0.01) - qmap <- doQmap(exp,qm.fit,type="tricub")} - if(method=="RQUANT"){ - qm.fit <- fitQmap(obs,exp, - method="RQUANT",qstep=0.01) - qmap <- doQmap(exp,qm.fit,type="linear")} - if(method=="SSPLIN"){ - qm.fit <- fitQmap(obs,exp,qstep=0.01, - method="SSPLIN") - qmap <- doQmap(exp,qm.fit)} - - return(qmap) - } - ## functions bias correction CSTools - # ? - latlonApply <- function(obs,exp,method){ - Apply(list(exp=exp,obs=obs),target_dims='time', - fun=biascorrection,method=method)} - - qmap <- mapply(latlonApply,obs,exp,method) - expAdjust = array(dim=dim(expL$data)) - - if(proxy=="dim"){ - expAdjust[predyn.exp$pred.dim$pos.d$d1,,]=qmap$d1.output1 - expAdjust[predyn.exp$pred.dim$pos.d$d2,,]=qmap$d2.output1 - expAdjust[predyn.exp$pred.dim$pos.d$d3,,]=qmap$d3.output1 - } - if(proxy=="theta"){ - expAdjust[predyn.exp$pred.theta$pos.t$th1,,]=qmap$th1.output1 - expAdjust[predyn.exp$pred.theta$pos.t$th2,,]=qmap$th2.output1 - expAdjust[predyn.exp$pred.theta$pos.t$th3,,]=qmap$th3.output1 - } - - return(list(expAdjust=expAdjust)) -} diff --git a/R/Predictability.R b/R/Predictability.R index 1989ef3d..701b812e 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -49,6 +49,7 @@ #'@examples #'# Creating an example of matrix dat(time,grids): #'m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40) +#'names(dim(m)) <- c('time', 'grid') #'# imposing a threshold #' quanti <- 0.90 #'# computing dyn_scores from parameters dim and theta of the attractor diff --git a/man/CST_DynBiasCorrection.Rd b/man/CST_DynBiasCorrection.Rd new file mode 100644 index 00000000..facf6f51 --- /dev/null +++ b/man/CST_DynBiasCorrection.Rd @@ -0,0 +1,88 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/CST_DynBiasCorrection.R +\name{CST_DynBiasCorrection} +\alias{CST_DynBiasCorrection} +\title{Performing a Bias Correction conditioned by the dynamical +properties of the data.} +\usage{ +CST_DynBiasCorrection( + exp, + obs, + method = "QUANT", + proxy = "dim", + quanti, + time_dim = "ftime", + ncores = NULL +) +} +\arguments{ +\item{exp}{an s2v_cube object with the experiment data} + +\item{obs}{an s2dv_cube object with the reference data} + +\item{method}{a character string indicating the method to apply bias correction among these ones: +"PTF","RQUANT","QUANT","SSPLIN"} + +\item{proxy}{a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method.} + +\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} + +\item{time_dim}{a character string indicating the name of the temporal dimension} + +\item{ncores}{The number of cores to use in parallel computation} +} +\value{ +dynbias an s2dvcube object with a bias correction performed +conditioned by local dimension 'dim' or inverse of persistence 'theta' +} +\description{ +This function perform a bias correction conditioned by the +dynamical properties of the dataset. This function internally uses the functions +'Predictability' to divide in terciles the two dynamical proxies +computed with 'CST_ProxiesAttractor'. A bias correction +between the model and the observations is performed using the division into +terciles of the local dimension 'dim' and inverse of the persistence 'theta'. +For instance, model values with lower 'dim' will be corrected with observed +values with lower 'dim', and the same for theta. The function gives two options +of bias correction: one for 'dim' and/or one for 'theta' +} +\examples{ +# example 1: simple data s2dvcube style +set.seed(1) +expL <- rnorm(1:2000) +dim (expL) <- c(time =100,lat = 4, lon = 5) +obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +dim (obsL) <- c(time = 100,lat = 4, lon = 5) +time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") +time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") +lon <- seq(-1,5,1.5) +lat <- seq(30,35,1.5) +# get the requirement, in that case it would be necessary select a lower qm +# for instance qm=0.60 +expL <- s2dv_cube(data = expL, lat = lat, lon = lon, + Dates = list(start = time_expL, end = time_expL)) +obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, + Dates = list(start = time_obsL, end = time_obsL)) +dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", + quanti = 0.6, time_dim = 'time') + +} +\references{ +Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., +and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large +scale atmospheric predictability.Nature Communications, 10(1), 1316. +DOI = https://doi.org/10.1038/s41467-019-09305-8 " + +Faranda, D., Gabriele Messori and Pascal Yiou. (2017). +Dynamical proxies of North Atlantic predictability and extremes. +Scientific Reports, 7-41278, 2017. +} +\author{ +Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} +} diff --git a/man/DynBiasCorrection.Rd b/man/DynBiasCorrection.Rd index 878fc764..bd60e0f3 100644 --- a/man/DynBiasCorrection.Rd +++ b/man/DynBiasCorrection.Rd @@ -1,78 +1,57 @@ % Generated by roxygen2: do not edit by hand -% Please edit documentation in R/DynBiasCorrection.R +% Please edit documentation in R/CST_DynBiasCorrection.R \name{DynBiasCorrection} \alias{DynBiasCorrection} \title{Performing a Bias Correction conditioned by the dynamical properties of the data.} \usage{ DynBiasCorrection( - pred.dim.obs, - pred.theta.obs, - pred.dim.mod, - pred.theta.mod, + exp, + obs, + method = "QUANT", + proxy = "dim", + quanti, + time_dim = "time", ncores = NULL ) } \arguments{ -\item{pred.dim.obs}{same than pred.dim.obs but for the observations} +\item{exp}{a multidimensional array with named dimensions with the experiment data} -\item{pred.theta.obs}{same than pred.theta.obs but for the observations} +\item{obs}{a multidimensional array with named dimensions with the observation data} -\item{pred.dim.mod}{output of CST_Predictability or a list of two lists 'qdim' and 'pos.d'. The 'qdim' list -contains values of local dimension 'dim' divided by terciles: -d1: lower tercile and more predictability, -d2: middle tercile, -d3: higher tercile and less predictability -The 'pos.d' list contains the position of each tercile in parameter 'dim'} +\item{method}{a character string indicating the method to apply bias correction among these ones: +"PTF","RQUANT","QUANT","SSPLIN"} -\item{pred.theta.mod}{output of CST_Predictability or a list of two lists 'qtheta' and 'pos.t'. -The 'qtheta' list contains values of the inverse of persistence 'theta' -divided by terciles: -th1: lower tercile and more predictability, -th2: middle tercile, -th3: higher tercile and less predictability -The 'pos.t' list contains the position of each tercile in parameter 'theta'} +\item{proxy}{a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method.} + +\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} + +\item{time_dim}{a character string indicating the name of the temporal dimension} \item{ncores}{The number of cores to use in parallel computation} } \value{ -dim.bias an s2dvcube object with a bias correction performed -conditioned by local dimension 'dim' - -theta.bias an s2dvcube object with a bias correction performed -conditioned by the inverse of the persistence 'theta' +a multidimensional array with named dimensions with a bias correction performed conditioned by local dimension 'dim' or inverse of persistence 'theta' } \description{ This function perform a bias correction conditioned by the dynamical properties of the dataset. This function used the functions 'CST_Predictability' to divide in terciles the two dynamical proxies -computed with CST_ProxiesAttractor or ProxiesAttractor. A bias correction +computed with 'CST_ProxiesAttractor'. A bias correction between the model and the observations is performed using the division into terciles of the local dimension 'dim' and inverse of the persistence 'theta'. For instance, model values with lower 'dim' will be corrected with observed values with lower 'dim', and the same for theta. The function gives two options -of bias correction: one for 'dim' and one for 'theta' +of bias correction: one for 'dim' and/or one for 'theta' } \examples{ -# example 1: simple data s2dvcube style -expL <- rnorm(1:200) -dim(expL) <- c(member=10,lat = 4, lon = 5) -obsL <- c(rnorm(1:180),expL[1,,]*1.2) -dim(obsL) <- c(time = 10,lat = 4, lon = 5) -time_obsL <- paste(rep("01", 50), rep("01", 50), 1953 : 2003, sep = "-") -time_expL <- time_obsL[1] -lon <- seq(-1,5,1.5) -lat <- seq(30,35,1.5) -qm=0.60 -expL <- s2dv_cube(data = expL, lat = lat, lon = lon, - Dates = list(start = time_expL, end = time_expL)) -obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, - Dates = list(start = time_obsL[1:10], end = time_obsL[1:10])) -attractor <- CST_ProxiesAttractor(data=obsL,quanti=qm) -predynObs = Predictability(dim=attractor$dim,theta=attractor$theta) -attractorMod<- CST_ProxiesAttractor(data=expL,quanti=qm) -predynMod = CST_Predictability(dim=attractorMod$dim,theta=attractorMod$theta) -dynbias= DynBiasCorrection(pred.dim.obs=predynObs$pred.dim,pred.theta.obs=predynMod$pred.theta, +expL <- rnorm(1:2000) +dim (expL) <- c(time =100,lat = 4, lon = 5) +obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +dim (obsL) <- c(time = 100,lat = 4, lon = 5) +dynbias <- DynBiasCorrection(exp = expL, obs = obsL, + proxy= "dim", quanti = 0.6) } \references{ Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., @@ -86,4 +65,10 @@ Scientific Reports, 7-41278, 2017. } \author{ Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} + +Maria M. Chaves-Montero, \email{mdm.chaves-montero@cmcc.it} + +Veronica Torralba, \email{veronica.torralba@cmcc.it} + +Davide Faranda, \email{davide.faranda@lsce.ipsl.fr} } diff --git a/man/Predictability.Rd b/man/Predictability.Rd index 2093a196..be0fb623 100644 --- a/man/Predictability.Rd +++ b/man/Predictability.Rd @@ -45,6 +45,7 @@ small the predictability is higher, and viceversa. \examples{ # Creating an example of matrix dat(time,grids): m <- matrix(rnorm(2000) * 10, nrow = 50, ncol = 40) +names(dim(m)) <- c('time', 'grid') # imposing a threshold quanti <- 0.90 # computing dyn_scores from parameters dim and theta of the attractor -- GitLab From ef71e9d376c886909827b78182d45edd8dbc164c Mon Sep 17 00:00:00 2001 From: nperez Date: Thu, 3 Jun 2021 11:02:44 +0200 Subject: [PATCH 26/48] Adding Carlos as author --- DESCRIPTION | 1 + 1 file changed, 1 insertion(+) diff --git a/DESCRIPTION b/DESCRIPTION index aecd89b3..52999f2a 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -7,6 +7,7 @@ Authors@R: c( person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "aut", comment = c(ORCID = "0000-0001-5221-0147")), person("Carmen", "Alvarez-Castro", , "carmen.alvarez-castro@cmcc.it", role = "aut", comment = c(ORCID = "0000-0002-9958-010X")), person("Lauriane", "Batte", , "lauriane.batte@meteo.fr", role = "aut"), + person("Carlos", "Delgado", , "carlos.delgado@bsc.es", role = "aut"), person("Jost", "von Hardenberg", , email = c("j.vonhardenberg@isac.cnr.it", "jost.hardenberg@polito.it"), role = "aut", comment = c(ORCID = "0000-0002-5312-8070")), person("Llorenç", "LLedo", , "llledo@bsc.es", role = "aut"), person("Nicolau", "Manubens", , "nicolau.manubens@bsc.es", role = "aut"), -- GitLab From cc171a331d4f184f113703de9d5dbfd0014d8420 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 4 Jun 2021 16:35:35 +0200 Subject: [PATCH 27/48] Fix Analogs with new lonL latL --- R/CST_Analogs.R | 62 +++++++++++++++++++++++++++++++------------------ man/Analogs.Rd | 29 +++++++++++++++-------- 2 files changed, 59 insertions(+), 32 deletions(-) diff --git a/R/CST_Analogs.R b/R/CST_Analogs.R index 24475de1..1b463f4b 100644 --- a/R/CST_Analogs.R +++ b/R/CST_Analogs.R @@ -170,7 +170,8 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, time_obsL <- obsL$Dates$start } res <- Analogs(expL$data, obsL$data, time_obsL = time_obsL, - time_expL = time_expL, expVar = expVar$data, + time_expL = time_expL, lonL = expL$lon, + latL = expL$lat, expVar = expVar$data, obsVar = obsVar$data, criteria = criteria, excludeTime = excludeTime, region = region, lonVar = as.vector(obsVar$lon), latVar = as.vector(obsVar$lat), @@ -253,6 +254,8 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, #'@param time_expL an array of N named dimensions (coinciding with time #'dimensions in expL) of character string(s) indicating the date(s) of the #'experiment in the format "dd/mm/yyyy". Time(s) to find the analogs. +#'@param lonL a vector containing the longitude of parameter 'expL'. +#'@param latL a vector containing the latitude of parameter 'expL'. #'@param excludeTime an array of N named dimensions (coinciding with time #'dimensions in expL) of character string(s) indicating the date(s) of the #'observations in the format "dd/mm/yyyy" to be excluded during the search of @@ -343,21 +346,21 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, #'region=c(lonmin = -1 ,lonmax = 2, latmin = 30, latmax = 33) #'Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, #' obsVar = obs.pr, criteria = "Local_dist", -#' lonVar = seq(-1, 5, 1.5),latVar = seq(30, 35, 1.5), +#' lonL = seq(-1, 5, 1.5),latL = seq(30, 35, 1.5), #' region = region,time_expL = "01-10-2000", #' nAnalogs = 10, AnalogsInfo = TRUE) #' #'# Example 6: list of best analogs using criteria 'Local_dist' and 2 #'Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, -#' criteria = "Local_dist", lonVar = seq(-1, 5, 1.5), -#' latVar = seq(30, 35, 1.5), region = region, +#' criteria = "Local_dist", lonL = seq(-1, 5, 1.5), +#' latL = seq(30, 35, 1.5), region = region, #' time_expL = "01-10-2000", nAnalogs = 5, #' AnalogsInfo = TRUE) #' #'# Example 7: Downscaling using Local_dist criteria #'Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, -#' criteria = "Local_dist", lonVar = seq(-1, 5, 1.5), -#' latVar = seq(30, 35, 1.5), region = region, +#' criteria = "Local_dist", lonL = seq(-1, 5, 1.5), +#' latL = seq(30, 35, 1.5), region = region, #' time_expL = "01-10-2000", #' nAnalogs = 10, AnalogsInfo = FALSE) #' @@ -366,14 +369,16 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, #'dim(exp.pr) <- dim(expSLP) #'Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, #' obsVar = obs.pr, expVar = exp.pr, -#' criteria = "Local_cor", lonVar = seq(-1, 5, 1.5), -#' time_expL = "01-10-2000", latVar = seq(30, 35, 1.5), +#' criteria = "Local_cor", lonL = seq(-1, 5, 1.5), +#' time_expL = "01-10-2000", latL = seq(30, 35, 1.5), +#' lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), #' nAnalogs = 8, region = region, AnalogsInfo = FALSE) #'# same but without imposing nAnalogs,so nAnalogs will be set by default as 10 #'Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, #' obsVar = obs.pr, expVar = exp.pr, -#' criteria = "Local_cor", lonVar = seq(-1,5,1.5), -#' time_expL = "01-10-2000", latVar=seq(30, 35, 1.5), +#' lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), +#' criteria = "Local_cor", lonL = seq(-1,5,1.5), +#' time_expL = "01-10-2000", latL =seq(30, 35, 1.5), #' region = region, AnalogsInfo = TRUE) #' #'#'Example 9: List of best analogs in the three criterias Large_dist, @@ -382,11 +387,12 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, #' nAnalogs = 7, AnalogsInfo = TRUE) #'Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, #' time_expL = "01-10-2000", criteria = "Local_dist", -#' lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), +#' lonL = seq(-1, 5, 1.5), latL = seq(30, 35, 1.5), #' nAnalogs = 7,region = region, AnalogsInfo = TRUE) #'Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, #' obsVar = obsSLP, expVar = expSLP, #' time_expL = "01-10-2000",criteria = "Local_cor", +#' lonL = seq(-1, 5, 1.5), latL = seq(30, 35, 1.5), #' lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), #' nAnalogs = 7,region = region, #' AnalogsInfo = TRUE) @@ -404,7 +410,8 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL, #' time_obsL = time_obsSLP, time_expL = time_expSLP, #' excludeTime = excludeTime, AnalogsInfo = TRUE) #'@export -Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, +Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, + lonL = NULL, latL = NULL, expVar = NULL, obsVar = NULL, criteria = "Large_dist",excludeTime = NULL, lonVar = NULL, latVar = NULL, region = NULL, @@ -640,6 +647,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs, time_obsL, expVar = expVar, time_expL=time_expL, excludeTime=excludeTime, obsVar = obsVar, criteria = criteria, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = c('nAnalogs', 'lat', 'lon'), @@ -652,6 +660,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs,time_obsL, time_expL=time_expL, excludeTime=excludeTime, expVar = expVar, criteria = criteria, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = c('nAnalogs', 'lat', 'lon'), @@ -664,6 +673,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs, criteria = criteria,time_obsL, time_expL=time_expL, excludeTime=excludeTime, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = c('nAnalogs', 'lat', 'lon'), @@ -676,6 +686,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs, time_obsL, expVar = expVar, time_expL=time_expL, excludeTime=excludeTime, obsVar = obsVar, criteria = criteria, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = list(fields = c('nAnalogs', 'lat', 'lon'), @@ -690,6 +701,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs,time_obsL, time_expL=time_expL, excludeTime=excludeTime, expVar = expVar, criteria = criteria, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = list(fields = c('nAnalogs', 'lat', 'lon'), @@ -705,6 +717,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, fun = .analogs,time_obsL, criteria = criteria, time_expL=time_expL, excludeTime=excludeTime, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo, output_dims = list(fields = c('nAnalogs', 'lat', 'lon'), @@ -719,6 +732,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, .analogs <- function(expL, obsL, time_expL, excludeTime = NULL, obsVar = NULL, expVar = NULL, time_obsL, criteria = "Large_dist", + lonL = NULL, latL = NULL, lonVar = NULL, latVar = NULL, region = NULL, nAnalogs = NULL, AnalogsInfo = FALSE) { @@ -796,7 +810,8 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, expVar = expVar, obsVar = obsVar, criteria = criteria, AnalogsInfo = AnalogsInfo, - nAnalogs = nAnalogs,lonVar = lonVar, + nAnalogs = nAnalogs, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region) if (AnalogsInfo == TRUE) { return(list(AnalogsFields = Analog_result$AnalogsFields, @@ -807,14 +822,17 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL, expVar = NULL, return(AnalogsFields = Analog_result$AnalogsFields) } } -FindAnalog <- function(expL, obsL, time_obsL, expVar, obsVar, criteria, lonVar, +FindAnalog <- function(expL, obsL, time_obsL, expVar, obsVar, criteria, + lonL, latL, lonVar, latVar, region, nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo) { position <- Select(expL = expL, obsL = obsL, expVar = expVar, - obsVar = obsVar, criteria = criteria, lonVar = lonVar, + obsVar = obsVar, criteria = criteria, + lonL = lonL, latL = latL, lonVar = lonVar, latVar = latVar, region = region)$position metrics<- Select(expL = expL, obsL = obsL, expVar = expVar, - obsVar = obsVar, criteria = criteria, lonVar = lonVar, + obsVar = obsVar, criteria = criteria, lonL = lonL, + latL = latL, lonVar = lonVar, latVar = latVar, region = region)$metric.original best <- Apply(list(position), target_dims = c('time', 'pos'), fun = BestAnalog, criteria = criteria, @@ -824,8 +842,8 @@ FindAnalog <- function(expL, obsL, time_obsL, expVar, obsVar, criteria, lonVar, dim(Analogs_dates) <- dim(best) if (all(!is.null(region), !is.null(lonVar), !is.null(latVar))) { if (is.null(obsVar)) { - obsVar <- SelBox(obsL, lon = lonVar, lat = latVar, region = region)$data - expVar <- SelBox(expL, lon = lonVar, lat = latVar, region=region)$data + obsVar <- SelBox(obsL, lon = lonL, lat = latL, region = region)$data + expVar <- SelBox(expL, lon = lonL, lat = latL, region=region)$data Analogs_fields <- Subset(obsVar, along = which(names(dim(obsVar)) == 'time'), indices = best) @@ -955,7 +973,7 @@ BestAnalog <- function(position, nAnalogs = nAnalogs, AnalogsInfo = FALSE, } } Select <- function(expL, obsL, expVar = NULL, obsVar = NULL, - criteria = "Large_dist", + criteria = "Large_dist", lonL = NULL, latL = NULL, lonVar = NULL, latVar = NULL, region = NULL) { names(dim(expL)) <- replace_repeat_dimnames(names(dim(expL)), names(dim(obsL))) @@ -989,11 +1007,11 @@ Select <- function(expL, obsL, expVar = NULL, obsVar = NULL, position = pos1)) } if (criteria == "Local_dist" | criteria == "Local_cor") { - obs <- SelBox(obsL, lon = lonVar, lat = latVar, region = region)$data - exp <- SelBox(expL, lon = lonVar, lat = latVar, region = region)$data + obs <- SelBox(obsL, lon = lonL, lat = latL, region = region)$data + exp <- SelBox(expL, lon = lonL, lat = latL, region = region)$data metric2 <- Apply(list(obs), target_dims = list(c('lat', 'lon')), fun = .select, exp, metric = "dist")$output1 - metric2.original=metric2 + metric2.original = metric2 dim(metric2) <- c(dim(metric2), metric=1) margins <- c(1 : (length(dim(metric2))))[-dim_time_obs] pos2 <- apply(metric2, margins, order) diff --git a/man/Analogs.Rd b/man/Analogs.Rd index 746ebdd1..fc26a552 100644 --- a/man/Analogs.Rd +++ b/man/Analogs.Rd @@ -9,6 +9,8 @@ Analogs( obsL, time_obsL, time_expL = NULL, + lonL = NULL, + latL = NULL, expVar = NULL, obsVar = NULL, criteria = "Large_dist", @@ -41,6 +43,10 @@ in the format "dd/mm/yyyy". Reference time to search for analogs.} dimensions in expL) of character string(s) indicating the date(s) of the experiment in the format "dd/mm/yyyy". Time(s) to find the analogs.} +\item{lonL}{a vector containing the longitude of parameter 'expL'.} + +\item{latL}{a vector containing the latitude of parameter 'expL'.} + \item{expVar}{an array of N named dimensions containing the experimental field on the local scale, usually a different variable to the parameter 'expL'. If it is not NULL (by default, NULL), the returned field by this @@ -167,21 +173,21 @@ downscale_field <- Analogs(expL = expSLP, obsL = obsSLP, obsVar = obs.pr, region=c(lonmin = -1 ,lonmax = 2, latmin = 30, latmax = 33) Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, obsVar = obs.pr, criteria = "Local_dist", - lonVar = seq(-1, 5, 1.5),latVar = seq(30, 35, 1.5), + lonL = seq(-1, 5, 1.5),latL = seq(30, 35, 1.5), region = region,time_expL = "01-10-2000", nAnalogs = 10, AnalogsInfo = TRUE) # Example 6: list of best analogs using criteria 'Local_dist' and 2 Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, - criteria = "Local_dist", lonVar = seq(-1, 5, 1.5), - latVar = seq(30, 35, 1.5), region = region, + criteria = "Local_dist", lonL = seq(-1, 5, 1.5), + latL = seq(30, 35, 1.5), region = region, time_expL = "01-10-2000", nAnalogs = 5, AnalogsInfo = TRUE) # Example 7: Downscaling using Local_dist criteria Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, - criteria = "Local_dist", lonVar = seq(-1, 5, 1.5), - latVar = seq(30, 35, 1.5), region = region, + criteria = "Local_dist", lonL = seq(-1, 5, 1.5), + latL = seq(30, 35, 1.5), region = region, time_expL = "01-10-2000", nAnalogs = 10, AnalogsInfo = FALSE) @@ -190,14 +196,16 @@ exp.pr <- c(rnorm(1:20) * 0.001) dim(exp.pr) <- dim(expSLP) Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, obsVar = obs.pr, expVar = exp.pr, - criteria = "Local_cor", lonVar = seq(-1, 5, 1.5), - time_expL = "01-10-2000", latVar = seq(30, 35, 1.5), + criteria = "Local_cor", lonL = seq(-1, 5, 1.5), + time_expL = "01-10-2000", latL = seq(30, 35, 1.5), + lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), nAnalogs = 8, region = region, AnalogsInfo = FALSE) # same but without imposing nAnalogs,so nAnalogs will be set by default as 10 Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, obsVar = obs.pr, expVar = exp.pr, - criteria = "Local_cor", lonVar = seq(-1,5,1.5), - time_expL = "01-10-2000", latVar=seq(30, 35, 1.5), + lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), + criteria = "Local_cor", lonL = seq(-1,5,1.5), + time_expL = "01-10-2000", latL =seq(30, 35, 1.5), region = region, AnalogsInfo = TRUE) #'Example 9: List of best analogs in the three criterias Large_dist, @@ -206,11 +214,12 @@ Large_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, nAnalogs = 7, AnalogsInfo = TRUE) Local_scale <- Analogs(expL = expSLP, obsL = obsSLP, time_obsL = time_obsSLP, time_expL = "01-10-2000", criteria = "Local_dist", - lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), + lonL = seq(-1, 5, 1.5), latL = seq(30, 35, 1.5), nAnalogs = 7,region = region, AnalogsInfo = TRUE) Local_scalecor <- Analogs(expL = expSLP, obsL = obsSLP,time_obsL = time_obsSLP, obsVar = obsSLP, expVar = expSLP, time_expL = "01-10-2000",criteria = "Local_cor", + lonL = seq(-1, 5, 1.5), latL = seq(30, 35, 1.5), lonVar = seq(-1, 5, 1.5), latVar = seq(30, 35, 1.5), nAnalogs = 7,region = region, AnalogsInfo = TRUE) -- GitLab From 38eef77e377230c25e587dd42b55b2b2aa9328ab Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 4 Jun 2021 19:35:28 +0200 Subject: [PATCH 28/48] UseCase2 and add Observed Terciles plot wind --- inst/doc/UseCase1_WindEvent_March2018.R | 38 +++- ...e2_PrecipitationDownscaling_RainFARM_RF4.R | 172 ++++++++++++++++++ ...aunch_UseCase2_PrecipitationDownscaling.sh | 10 + 3 files changed, 216 insertions(+), 4 deletions(-) create mode 100644 inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R create mode 100644 inst/doc/launch_UseCase2_PrecipitationDownscaling.sh diff --git a/inst/doc/UseCase1_WindEvent_March2018.R b/inst/doc/UseCase1_WindEvent_March2018.R index 2baa597b..657d1d06 100644 --- a/inst/doc/UseCase1_WindEvent_March2018.R +++ b/inst/doc/UseCase1_WindEvent_March2018.R @@ -63,11 +63,12 @@ for (mm in 1:3) { grid = 'r360x181') str(wind_ref$Dates) dim(wind_ref$data) + print(wind_ref$Dates$start) wind_ref_terciles <- rbind(wind_ref_terciles, quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.3, 0.6))) wind_ref_extremes <- rbind(wind_ref_extremes, quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.1, 0.9))) - #source("/esarchive/scratch/nperez/git/cstools/R/CST_BiasCorrection.R") + source("/esarchive/scratch/nperez/git/cstools/R/CST_BiasCorrection.R") library(multiApply) wind_fsct <- CST_BiasCorrection(exp = wind_hcst, obs = wind_ref, @@ -125,7 +126,8 @@ for (mm in 1:3) { width = 1000, height = 1000, units = 'px', res = 144) PlotMostLikelyQuantileMap(probs = Mean_PB, lon = wind_fsct$lon, lat = wind_fsct$lat, intylat = 2, intxlon = 2, - coast_width = 1.5, legend_scale = 0.8, cat_dim = 'bin', + coast_width = 1.5, legend_scale = 0.8, + cat_dim = 'bin', dot_size = 2, dots = filtered_obs_terciles[,,1,1,1,1], toptitle = c(paste0("Initialized on ", month.name[as.numeric(months_in_advance[mm])]))) @@ -136,7 +138,7 @@ visual <- data.frame(dec = as.vector(MeanDims(wind_fsct_BC[[3]]$data, c('lat', ' jan = as.vector(MeanDims(wind_fsct_BC[[2]]$data, c('lat', 'lon'))), feb = as.vector(MeanDims(wind_fsct_BC[[1]]$data, c('lat', 'lon')))) - wind_obs <- CST_Load(var = 'windagl100', obs = list(obs_path), + wind_obs_areave <- CST_Load(var = 'windagl100', obs = list(obs_path), sdates = '20180301', nmember = 1, leadtimemin = 1, leadtimemax = 1, storefreq = "monthly", sampleperiod = 1, @@ -152,8 +154,36 @@ agg_png("/esarchive/scratch/nperez/CSTools_manuscript/Wind/PlotForecast_IP.png", PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, extreme.limits = wind_ref_extremes, var.name = "Wind Speed 100 m (m/s)", title = "Bias Corrected forecasts at IP", - fcst.names = c("December", "January", "February"), obs = as.vector(wind_obs$data)) + fcst.names = c("December", "January", "February"), obs = as.vector(wind_obs_areave$data)) dev.off() +# Plotting observed terciles: + +wind_obs_obstercile <- Apply(list(wind_obs$data), target_dims = NULL, + fun = function(x, tercile) { + if (x <= tercile[1]) { + res <- 1 + } else if (x > tercile[2]) { + res <- 3 + } else { + res <- 2 + } + return(res) + }, tercile = wind_ref_terciles[1,])$output1 + +agg_png("/esarchive/scratch/nperez/CSTools_manuscript/Wind/MostLikely_Observed_obstercile.png", + width = 1000, height = 1000, units = 'px', res = 144) + +s2dv::PlotEquiMap(wind_obs_obstercile, + lon = wind_obs$lon, lat = wind_obs$lat, + brks = c(0,1,2,3), + cols = c("#6BAED6FF", "#FFEDA0FF", "#FC4E2AFF"), + intylat = 2, intxlon = 2, + coast_width = 1.5, filled.continents = FALSE, + toptitle = c("Observed terciles March 2018")) +dev.off() + + + print("DONE") diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R new file mode 100644 index 00000000..19f59fa5 --- /dev/null +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -0,0 +1,172 @@ +#!/usr/bin/env Rscript +rm(list=ls()); gc(); + +### Creation date: 3rd June 2021 +# Author: N. Pérez-Zanón +# Refer CSTools package and manuscript when using it. +# ---------------------------------------- +# This code is divided in 4 steps plus visualization of the results +# STEP 1: Compute Slope values for RainFARM downscaling method +# STEP 2: Apply Quantile Mapping Correction to Seasonal Precipitation Forecast +# STEP 3: Compute Weights for RainFARM downscaling method +# STEP 4: Apply RainFARM downscaling method +# ---------------------------------------- +# Note: STEP 3 requires a machine with internet connexion. +# In this file, the lines are commented since they have been run and the +# result saved on disk, then the result is loaded. +# ---------------------------------------- +# +# Load required libraries and setup output directory: +library(CSTools) +library(ClimProjDiags) +library(zeallot) +library(ragg) +dir_output <- '/esarchive/scratch/nperez/CSTools_manuscript/v20210603/' #slash end + +# -------------------------------------------- +# STEP 1: +# -------------------------------------------- +era5 <- list(name = 'era5', + path = '/esarchive/recon/ecmwf/era5/$STORE_FREQ$_mean/$VAR_NAME$_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') +years <- unlist(lapply(1993:2018, function(x){ + paste0(x, sprintf("%02d",1:12), '01')})) +era5 <- CST_Load(var = 'prlr', + exp = list(era5), + sdates = years, nmember = 1, + storefreq = "daily", sampleperiod = 1, + latmin = 37.5, latmax = 53.25, lonmin = 2.5, lonmax = 18.25, + output = 'lonlat', nprocs = 1) +era5$data <- era5$data * 24 * 3600 * 1000 # with or without this line -> same result +era5 <- CST_SplitDim(era5, split_dim = 'sdate', indices = rep(1:12, 26)) +slope <- CST_RFSlope(era5, time_dim = c('sdate', 'ftime'), kmin = 5) +save(slope, file = paste0(dir_output, 'Slope.RDS'), version = 2) + + +# -------------------------------------------- +# STEP 2: +# -------------------------------------------- +StartDates <- paste0(1993:2018, '1101') +exp <- list(name = 'ecmwfS5', + path = "/esarchive/exp/ecmwf/system5c3s/$STORE_FREQ$_mean/$VAR_NAME$_s0-24h/$VAR_NAME$_$START_DATE$.nc") +obs <- list(name = 'era5', + path = '/esarchive/recon/ecmwf/era5/$STORE_FREQ$_mean/$VAR_NAME$_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') +#obs <- list(name = 'era5', path = '/esarchive/scratch/nperez/ERA5/daily_total_prlr_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') + +c(exp, obs) %<-% CST_Load(var = 'prlr', exp = list(exp), obs = list(obs), + sdates = StartDates, nmember = 25, + storefreq = "daily", sampleperiod = 1, + latmin = 42, latmax = 49, lonmin = 4, lonmax = 11, + output = 'lonlat', nprocs = 1) +exp <- CST_SplitDim(exp, split_dim = c('ftime')) +obs <- CST_SplitDim(obs, split_dim = c('ftime')) +exp$data <- exp$data * 24 * 3600 * 1000 +obs$data <- obs$data * 24 * 3600 * 1000 +exp$data[which(exp$data < 0)] <- 0 +exp.qm <- CST_QuantileMapping(exp, obs, method = "QUANT", + wet.day = FALSE, + sample_dims = c('member', 'sdate', 'ftime'), + ncores = 4) +save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS', version = 2) + +# -------------------------------------------- +# STEP 3: +# -------------------------------------------- +#library(raster); library(s2dv); library(CSTools) +#worldclim <- getData("worldclim", var = "prec", res = 0.5, lon = 5, lat = 45) +#wc_month <- lapply(1:12, FUN = function(x) { +# res <- crop(worldclim[[x]], +# extent(3.5, 11.5, 41.5, 49.5)) +# res <- as.array(res) +# names(dim(res)) <- c('lat', 'lon', 'month') +# return(res) +# }) +#xy <- xyFromCell(crop(worldclim[[1]], extent(3.5, 11.5, 41.5, 49.5)), +# 1:length(crop(worldclim[[1]], extent(3.5, 11.5, 41.5, 49.5)))) +#lons <- unique(xy[,1]) +#lats <- unique(xy[,2]) +#wc_month <- unlist(wc_month) +#dim(wc_month) <- c(lat = length(lats), lon = length(lons), monthly = 12) +#wc_month <- Reorder(wc_month, c('lon', 'lat', 'monthly')) +#wc_month <- s2dv_cube(data = wc_month, lon = lons, lat = lats, +# Datasets = 'WorldClim') +#weight <- CST_RFWeights(wc_month, lon = 4:11, lat = 49:42, nf = 4) +#save(weight, file = paste0(dir_output, 'weights.RDS')) +load(paste0(dir_output, 'weights.RDS')) + +# -------------------------------------------- +# STEP 4: +# -------------------------------------------- + +weights <- Subset(weight$data, along = 'monthly', indices = c(11,12,1:1:6)) +slope <- Subset(slope, along = 'monthly', indices = c(11,12,1:1:6), drop = 'non-selected') +fs <- CST_RainFARM(exp.qm, nf = 4, + weights = weights, slope = slope, + kmin = 1, nens = 10, verbose = TRUE, + time_dim = c("member", "sdate", "ftime"), nprocs = 8, + drop_realization = TRUE) +newfs <- CST_MergeDims(fs, merge_dims = c("ftime", "monthly"), + na.rm = TRUE) + +newfs$Dates[[1]] <- exp$Dates[[1]] +CST_SaveExp(newfs, destination = output_dir) + +# -------------------------------------------- +# Visualization +# -------------------------------------------- +agg_png(paste0(output_dir, "EXP_11dec.png"), + width = 500, height = 600, units = 'px',res = 144) +PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, + filled.continents = FALSE, bar_limits = c(0,40), + intylat = 2, intxlon = 2, title_scale = 0.7, + toptitle = 'ECMWF-S5C3S', units = 'precipitation (mm)') +dev.off() +agg_png(paste0(output_dir, "EXPQM_11dec.png"), + width = 500, height = 600, units = 'px',res = 144) +PlotEquiMap(exp.qm$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, + filled.continents = FALSE, bar_limits = c(0,40), + intylat = 2, intxlon = 2, title_scale = 0.7, + toptitle = 'Bias Corrected', units = 'precipitation (mm)') +dev.off() +agg_png(paste0(output_dir, "Down_11dec.png"), + width = 500, height = 600, units = 'px',res = 144) +PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, + filled.continents = FALSE, bar_limits = c(0,40), + intylat = 2, intxlon = 2, title_scale = 0.7, + toptitle = 'Downsacaled', units = 'precipitation (mm)') +dev.off() +agg_png(paste0(output_dir, "WeightsNov.png"), + width = 500, height = 600, units = 'px',res = 144) +PlotEquiMap(weight$data[,,11], lon = weight$lon, lat = weight$lat, + filled.continents = FALSE, + intylat = 2, intxlon = 2, + toptitle = 'December Weights WorldClim2') +dev.off() +agg_png(paste0(output_dir, "Slope.png"), + width = 500, height = 600, units = 'px',res = 144) +plot(1:12, slope[3:12,1:2] type = 'b', main = 'slope', pch = 3) +dev.off() + +# Plot ForecastPDF +obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) +print(quantile(obsts, c(0.1,0.3,0.6,0.9), na.rm = T)) +expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) +exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) +empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, ftime = 31, monthly = 8)) +data <- abind(expts, exp.qmts, along = 1) +names(dim(data)) <- names(dim(expts)) +data <- abind(data, empty, along = 2) +names(dim(data)) <- names(dim(expts)) +fsts <- MeanDims(fs, c('lat', 'lon'), na.rm = T) +data <- abind(data, fsts, along = 1) +names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') +agg_png("/esarchive/scratch/nperez/CSTools_manuscript/v20201201/FiguresPDF_11DecemberAll2.png", + width = 750, height = 650, units = 'px', res = 144) +PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.58, 2.4), obs = obsts[,,1,11,2], + extreme.limits = c(0.06, 7.26), color.set = 'hydro', add.ensmemb = 'no', + var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", + fcst.names = c("ECMWFS5C3S", "Bias Corrected", "Downscaled")) +dev.off() + + + + diff --git a/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh b/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh new file mode 100644 index 00000000..55c3e848 --- /dev/null +++ b/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh @@ -0,0 +1,10 @@ +#!/bin/bash +#BSUB -W 2:00 +#BSUB -n 16 +#BSUB -M 7000 +#BSUB -J RainFARM_Downsc +#BSUB -o /esarchive/scratch/nperez/CSTools_manuscript/v20210603/lsf-%J.out +#BSUB -e /esarchive/scratch/nperez/CSTools_manuscript/v20210603/lsf-%J.err + +module load R +Rscript /esarchive/scratch/nperez/git/cstools/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R -- GitLab From 8cb1ef9b8c982e63cf742a109e9cb49ece043e6c Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Mon, 7 Jun 2021 19:46:35 +0000 Subject: [PATCH 29/48] corrections review CST_ProxiesAttractor.R --- R/CST_ProxiesAttractor.R | 42 ++++++++++++++-------------------------- 1 file changed, 15 insertions(+), 27 deletions(-) diff --git a/R/CST_ProxiesAttractor.R b/R/CST_ProxiesAttractor.R index 7df33fee..518968fa 100644 --- a/R/CST_ProxiesAttractor.R +++ b/R/CST_ProxiesAttractor.R @@ -23,8 +23,7 @@ #'@param data a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow #'and the grids in ncol(dat(time,grids) # -#'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to -#' be provided as a list of length 2 with longitudes and latitudes +#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta #' #'@param ncores The number of cores to use in parallel computation #' @@ -70,18 +69,11 @@ CST_ProxiesAttractor <- function(data, quanti, ncores = NULL){ #' Scientific Reports, 7-41278, 2017. #' #'@param data a multidimensional array with named dimensions to create the attractor. It requires a temporal dimension named 'time' and spatial dimensions called 'lat' and 'lon', or 'latitude' and 'longitude' or 'grid'. -#'@param quanti list of arbitrary length of secondary grids. Each secondary grid is to -#' be provided as a list of length 2 with longitudes and latitudes. -#'@param iplot a logical indicating if to plot results. +#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation. -#' -#'@param iplot FALSE by default. If TRUE the function returns a simple plot -#'with three pannels on the top left a plot for local dimension 'd', on the top -#'right a plot for the inverse of the persistence 'theta', on the bottom a plot -#'for the 'attractor' plotted with the two properties 'd' (x axis) and 'theta' -#'(y axis) #' #'@return dim and theta +#' #'@import multiApply #' #'@examples @@ -90,18 +82,24 @@ CST_ProxiesAttractor <- function(data, quanti, ncores = NULL){ #'mat <- array(rnorm(36 * 40), c(time = 36, grid = 40)) #'qm <- 0.90 # imposing a threshold #'Attractor <- ProxiesAttractor(data = mat, quanti = qm) +#'# to plot the result +#'time = c(1:length(Attractor$theta)) +#'layout(matrix(c(1, 3, 2, 3), 2, 2)) +#'plot(time, Attractor$dim, xlab = 'time', ylab = 'd', +#' main = 'local dimension', type = 'l') +#'plot(time, Attractor$theta, xlab = 'time', ylab = 'theta', main = 'theta') +#'plot(Attractor$dim, Attractor$theta, col = 'blue', +#' main = "Proxies of the Attractor", +#' xlab = "local dimension", ylab = "theta", lwd = 8, 'p') #' #'@export -ProxiesAttractor <- function(data, quanti, iplot = FALSE, ncores = NULL){ +ProxiesAttractor <- function(data, quanti, ncores = NULL){ if (is.null(data)) { stop("Parameter 'data' cannot be NULL.") } if (is.null(quanti)) { stop("Parameter 'quanti' is mandatory") - } - if (!is.logical(iplot) || is.null(iplot)) { - stop("Parameter 'iplot' is required and needs to be TRUE or FALSE.") } if (any(names(dim(data)) %in% 'sdate')) { if (any(names(dim(data)) %in% 'ftime')) { @@ -129,7 +127,7 @@ ProxiesAttractor <- function(data, quanti, iplot = FALSE, ncores = NULL){ } attractor <- Apply(data, target_dims = c('time', 'grid'), fun = .proxiesattractor, - quanti = quanti , iplot = FALSE, ncores = ncores) + quanti = quanti , ncores = ncores) # rename dimensions attractor <- lapply(attractor, FUN = function(x, dimname){ @@ -139,7 +137,7 @@ ProxiesAttractor <- function(data, quanti, iplot = FALSE, ncores = NULL){ return(list(dim = attractor$dim, theta = attractor$theta)) } -.proxiesattractor <- function(data, quanti, iplot = FALSE) { +.proxiesattractor <- function(data, quanti) { # expected dimensions data: time and grid logdista <- Apply(data, target_dims = 'grid', fun = function(x, y){ @@ -176,16 +174,6 @@ ProxiesAttractor <- function(data, quanti, iplot = FALSE, ncores = NULL){ names(dim(logdista)) <- c('dim1', 'dim2') proxies <- Apply(data = list(logdista = logdista), target_dims = list('dim1'), fun = Theta, quanti = quanti) - if(iplot == TRUE) { - time = c(1:length(proxies$theta)) - layout(matrix(c(1, 3, 2, 3), 2, 2)) - plot(time, proxies$dim, xlab = 'time', ylab = 'd', - main = 'local dimension', type = 'l') - plot(time, proxies$theta, xlab = 'time', ylab = 'theta', main = 'theta') - plot(proxies$dim, proxies$theta, col = 'blue', - main = "Proxies of the Attractor", - xlab = "local dimension", ylab = "theta", lwd = 8, 'p') - } return(list(dim = proxies$dim, theta = proxies$theta)) } -- GitLab From bdd294a0a68f5ede446f620afd8658af7e869e95 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Mon, 7 Jun 2021 19:48:17 +0000 Subject: [PATCH 30/48] Corrections review Predictability.R --- R/Predictability.R | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/R/Predictability.R b/R/Predictability.R index 701b812e..d17ea24c 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -56,7 +56,28 @@ #' attractor <- ProxiesAttractor(dat = m, quanti = 0.60, iplot = FALSE) #' predyn <- Predictability(dim = attractor$dim, theta = attractor$theta) #'@export -Predictability <- function(dim, theta) { +#' +Predictability<- function(dim, theta, ncores = NULL) { + # if (!inherits(dim, 's2dv_cube')) { + # stop("Parameter 'dim' must be of the class 's2dv_cube', ", + # "as output by CSTools::CST_Load.") + # } + # if (!inherits(theta, 's2dv_cube')) { + # stop("Parameter 'theta' must be of the class 's2dv_cube', ", + # "as output by CSTools::CST_Load.") + # } + + pred <- Apply(list(dim, theta), target_dims = 'time', + fun = .predictability, + ncores = ncores) + dim(pred$dyn_scores) <- dim(theta) + return(list(pred.dim = list(qdim = list(pred$qdim.d1,pred$qdim.d2,pred$qdim.d3), + pos.d = list(pred$pos.d1,pred$pos.d2,pred$pos.d3)), + pred.theta = list(qtheta = list(pred$qtheta.th1,pred$qtheta.th2,pred$qtheta.th3), + pos.t = list(pred$pos.th1,pred$pos.th2,pred$pos.th3)), + dyn_scores = pred$dyn_scores)) +} +.predictability <- function(dim, theta) { if (is.null(dim)) { stop("Parameter 'dim' cannot be NULL.") } @@ -108,9 +129,13 @@ Predictability <- function(dim, theta) { dyn_scores[which(pos %in% d3th2)]<- 2/9 dyn_scores[which(pos %in% d3th3)]<- 1/9 -return(list(pred.dimi =list(qdim = qdim, pos.d = pos.d), - pred.theta = list(qtheta = qtheta, pos.t = pos.t), +return(list(qdim.d1 = dim[d1], qdim.d2 = dim[d2], qdim.d3 = dim[d3], + pos.d1 = d1, pos.d2 = d2, pos.d3 =d3, + qtheta.th1 = theta[th1], qtheta.th2 = theta[th2], qtheta.th3 = theta[th3], + pos.th1 = th1, pos.th2 = th2, pos.th3 = th3, dyn_scores = dyn_scores)) } + + -- GitLab From 4efac0fbb14ff551f4800e2c1a615cde94468120 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Mon, 7 Jun 2021 19:49:50 +0000 Subject: [PATCH 31/48] Corrections review in CST_DynBiasCorrection.R --- R/CST_DynBiasCorrection.R | 61 ++++++++++++++++++++++++++++++++------- 1 file changed, 50 insertions(+), 11 deletions(-) diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R index 7149e49f..00d8dc68 100644 --- a/R/CST_DynBiasCorrection.R +++ b/R/CST_DynBiasCorrection.R @@ -60,7 +60,7 @@ #' #'@export CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', - proxy = "dim", quanti, time_dim = 'ftime', + proxy = "dim", quanti, ncores = NULL) { if (!inherits(obs, 's2dv_cube')) { stop("Parameter 'obs' must be of the class 's2dv_cube', ", @@ -71,8 +71,7 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', "as output by CSTools::CST_Load.") } exp$data <- DynBiasCorrection(exp = exp$data, obs = obs$data, method = method, - proxy = proxy, quanti = quanti, - time_dim = time_dim, ncores = ncores) + proxy = proxy, quanti = quanti, ncores = ncores) return(exp) } #'@rdname DynBiasCorrection @@ -121,12 +120,11 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', #'dim (expL) <- c(time =100,lat = 4, lon = 5) #'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) #'dim (obsL) <- c(time = 100,lat = 4, lon = 5) -#'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, -#' proxy= "dim", quanti = 0.6) +#'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT', +#' proxy= "dim", quanti = 0.6,time_dim='time') #'@export DynBiasCorrection<- function(exp, obs, method = 'QUANT', - proxy = "dim", quanti, - time_dim = 'time', ncores = NULL){ + proxy = "dim", quanti, ncores = NULL){ if (is.null(obs)) { stop("Parameter 'obs' cannot be NULL.") } @@ -151,21 +149,62 @@ DynBiasCorrection<- function(exp, obs, method = 'QUANT', predyn.exp <- Predictability(dim = attractor.exp$dim, theta = attractor.exp$theta) + if (!(any(names(dim(exp)) %in% 'time'))){ + if (any(names(dim(exp)) %in% 'sdate')) { + if (any(names(dim(exp)) %in% 'ftime')) { + exp <- MergeDims(exp, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + } + if (!(any(names(dim(obs)) %in% 'time'))){ + if (any(names(dim(obs)) %in% 'sdate')) { + if (any(names(dim(obs)) %in% 'ftime')) { + obs <- MergeDims(obs, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + } + + + if(dim(obs)['member']!=dim(exp)['member']){ + names(dim(obs))[names(dim(obs))=='member'] <- 'memberObs' + } + + if(dim(obs)['dataset']!=dim(exp)['dataset']){ + names(dim(obs))[names(dim(obs))=='dataset'] <- 'datasetObs' + } if (proxy == "dim") { - adjusted <- Apply(list(exp, obs), target_dims = time_dim, + adjusted <- Apply(list(exp, obs), target_dims = 'time', fun = .dynbias, method, predyn.exp = predyn.exp$pred.dim$pos.d, predyn.obs = predyn.obs$pred.dim$pos.d, - ncores = ncores, output_dims = time_dim)$output1 + ncores = ncores, output_dims = 'time')$output1 } else if (proxy == "theta") { - adjusted <- Apply(list(exp, obs), target_dims = time_dim, + adjusted <- Apply(list(exp, obs), target_dims = 'time', fun = .dynbias, method, predyn.exp = predyn.exp$pred.theta$pos.t, predyn.obs = predyn.obs$pred.theta$pos.t, - ncores = ncores, output_dims = time_dim)$output1 + ncores = ncores, output_dims = 'time')$output1 } else { stop ("Parameter 'proxy' must be set as 'dim' or 'theta'.") } + + if(any(names(dim(adjusted)) %in% 'memberObs')){ + if(dim(adjusted)['memberObs'] == 1){ + adjusted <- Subset(adjusted,along='memberObs',indices=1,drop = 'selected') + }else{ + print('Dimension member in obs changed to memberObs') + } + } + + if(any(names(dim(adjusted)) %in% 'datasetObs')){ + if(dim(adjusted)['datasetObs'] == 1){ + adjusted <- Subset(adjusted,along='datasetObs',indices=1,drop = 'selected') + }else{ + print('Dimension dataset in obs changed to datasetObs') + } + } return(adjusted) } -- GitLab From 42fb98ad109305d50b53e82309f84aad223fac53 Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 8 Jun 2021 18:03:22 +0200 Subject: [PATCH 32/48] fixing plots --- inst/doc/UseCase1_WindEvent_March2018.R | 36 ++++++++++++---- ...e2_PrecipitationDownscaling_RainFARM_RF4.R | 41 +++++++++++-------- 2 files changed, 50 insertions(+), 27 deletions(-) diff --git a/inst/doc/UseCase1_WindEvent_March2018.R b/inst/doc/UseCase1_WindEvent_March2018.R index 657d1d06..719f5bf3 100644 --- a/inst/doc/UseCase1_WindEvent_March2018.R +++ b/inst/doc/UseCase1_WindEvent_March2018.R @@ -1,7 +1,22 @@ +#!/usr/bin/env Rscript +rm(list=ls()); gc(); + +### Creation date: 3rd June 2021 +# Author: N. Pérez-Zanón +# Refer CSTools package and manuscript when using it. +# ---------------------------------------- +# Wind speed bias adjustment for assessment of an extreme event +# The System5-ECMWF downloaded from C3S seasonal forecasts initialized +# in December 2017, January 2018 and February 2018 +# This code includes the bias adjustent and the results visualization +# ---------------------------------------- library(CSTools) library(s2dv) library(ragg) +library(multiApply) +output_dir <- "/esarchive/scratch/nperez/CSTools_manuscript/Wind/" + exp_path <- list(name = "ECMWFS5", path = "/esarchive/exp/ecmwf/system5c3s/$STORE_FREQ$_mean/$VAR_NAME$_f6h/$VAR_NAME$_$START_DATE$.nc") @@ -15,6 +30,7 @@ months_in_advance <- c('02', '01', '12') wind_fsct_BC <- list() wind_ref_terciles <- NULL wind_ref_extremes <- NULL +wind_thres_latlon <- NULL # Observations March 2018 wind_obs <- CSTools::CST_Load(var = 'windagl100', obs = list(obs_path), sdates = '20180301', nmember = 1, @@ -64,12 +80,12 @@ for (mm in 1:3) { str(wind_ref$Dates) dim(wind_ref$data) print(wind_ref$Dates$start) + wind_ref_terciles <- rbind(wind_ref_terciles, quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.3, 0.6))) wind_ref_extremes <- rbind(wind_ref_extremes, quantile(MeanDims(wind_ref$data, c('lat', 'lon')), c(0.1, 0.9))) source("/esarchive/scratch/nperez/git/cstools/R/CST_BiasCorrection.R") - library(multiApply) wind_fsct <- CST_BiasCorrection(exp = wind_hcst, obs = wind_ref, exp_cor = wind_fcst) @@ -119,10 +135,10 @@ for (mm in 1:3) { } return(y) })$output1 - + wind_thres_latlon <- abind::abind(wind_thres_latlon, thres, along = 4) source("/esarchive/scratch/nperez/git/cstools/R/PlotCombinedMap.R") source("/esarchive/scratch/nperez/git/cstools/R/PlotMostLikelyQuantileMap.R") - agg_png(paste0("/esarchive/scratch/nperez/CSTools_manuscript/Wind/MostLikely_", mm, "_obstercile.png"), + agg_png(paste0(output_dir, "MostLikely_", mm, "_obstercile.png"), width = 1000, height = 1000, units = 'px', res = 144) PlotMostLikelyQuantileMap(probs = Mean_PB, lon = wind_fsct$lon, lat = wind_fsct$lat, intylat = 2, intxlon = 2, @@ -149,7 +165,7 @@ visual <- data.frame(dec = as.vector(MeanDims(wind_fsct_BC[[3]]$data, c('lat', ' print("IS DATA LOADED") print("Wait") -agg_png("/esarchive/scratch/nperez/CSTools_manuscript/Wind/PlotForecast_IP.png", +agg_png(paste0(output_dir, "PlotForecast_IP.png"), width = 1000, height = 1000, units = 'px',res = 144) PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, extreme.limits = wind_ref_extremes, @@ -158,8 +174,10 @@ PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, dev.off() # Plotting observed terciles: - -wind_obs_obstercile <- Apply(list(wind_obs$data), target_dims = NULL, +names(dim(wind_thres_latlon)) <- c('thres', 'lat', 'lon', 'sdate') +wind_thres_latlon <- ClimProjDiags::Subset(wind_thres_latlon, indices = 1, along = 'sdate') +wind_obs_obstercile <- Apply(list(wind_obs$data, wind_thres_latlon), + target_dims = list(NULL, 'thres'), fun = function(x, tercile) { if (x <= tercile[1]) { res <- 1 @@ -169,9 +187,9 @@ wind_obs_obstercile <- Apply(list(wind_obs$data), target_dims = NULL, res <- 2 } return(res) - }, tercile = wind_ref_terciles[1,])$output1 + })$output1 -agg_png("/esarchive/scratch/nperez/CSTools_manuscript/Wind/MostLikely_Observed_obstercile.png", +agg_png(paste0(output_dir, "MostLikely_Observed_obstercile.png"), width = 1000, height = 1000, units = 'px', res = 144) s2dv::PlotEquiMap(wind_obs_obstercile, @@ -183,7 +201,7 @@ s2dv::PlotEquiMap(wind_obs_obstercile, toptitle = c("Observed terciles March 2018")) dev.off() - +# All gridpoints are above normal observed tercile. print("DONE") diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R index 19f59fa5..6917ae6d 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -3,6 +3,8 @@ rm(list=ls()); gc(); ### Creation date: 3rd June 2021 # Author: N. Pérez-Zanón +# Adapted from the original version Terzago et al. 2020 +# https://drive.google.com/file/d/1qp2gbtKdBl4XmsyOeaEhFENwpeUuJwkf/view # Refer CSTools package and manuscript when using it. # ---------------------------------------- # This code is divided in 4 steps plus visualization of the results @@ -40,7 +42,7 @@ era5$data <- era5$data * 24 * 3600 * 1000 # with or without this line -> same re era5 <- CST_SplitDim(era5, split_dim = 'sdate', indices = rep(1:12, 26)) slope <- CST_RFSlope(era5, time_dim = c('sdate', 'ftime'), kmin = 5) save(slope, file = paste0(dir_output, 'Slope.RDS'), version = 2) - +slope_plot <- slope # -------------------------------------------- # STEP 2: @@ -66,7 +68,7 @@ exp.qm <- CST_QuantileMapping(exp, obs, method = "QUANT", wet.day = FALSE, sample_dims = c('member', 'sdate', 'ftime'), ncores = 4) -save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS', version = 2) +save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS'), version = 2) # -------------------------------------------- # STEP 3: @@ -91,6 +93,7 @@ save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS', version = 2) # Datasets = 'WorldClim') #weight <- CST_RFWeights(wc_month, lon = 4:11, lat = 49:42, nf = 4) #save(weight, file = paste0(dir_output, 'weights.RDS')) + load(paste0(dir_output, 'weights.RDS')) # -------------------------------------------- @@ -108,42 +111,44 @@ newfs <- CST_MergeDims(fs, merge_dims = c("ftime", "monthly"), na.rm = TRUE) newfs$Dates[[1]] <- exp$Dates[[1]] -CST_SaveExp(newfs, destination = output_dir) +CST_SaveExp(newfs, destination = dir_output) # -------------------------------------------- # Visualization # -------------------------------------------- -agg_png(paste0(output_dir, "EXP_11dec.png"), - width = 500, height = 600, units = 'px',res = 144) +library(s2dv) +agg_png(paste0(dir_output, "EXP_11dec.png"), + width = 1000, height = 1000, units = 'px',res = 144) PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.7, toptitle = 'ECMWF-S5C3S', units = 'precipitation (mm)') dev.off() -agg_png(paste0(output_dir, "EXPQM_11dec.png"), - width = 500, height = 600, units = 'px',res = 144) +agg_png(paste0(dir_output, "EXPQM_11dec.png"), + width = 1000, height = 1000, units = 'px',res = 144) PlotEquiMap(exp.qm$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.7, - toptitle = 'Bias Corrected', units = 'precipitation (mm)') + toptitle = 'Bias Adjusted', units = 'precipitation (mm)') dev.off() -agg_png(paste0(output_dir, "Down_11dec.png"), - width = 500, height = 600, units = 'px',res = 144) +agg_png(paste0(dir_output, "Down_11dec.png"), + width = 1000, height = 1000, units = 'px',res = 144) PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.7, toptitle = 'Downsacaled', units = 'precipitation (mm)') dev.off() -agg_png(paste0(output_dir, "WeightsNov.png"), - width = 500, height = 600, units = 'px',res = 144) -PlotEquiMap(weight$data[,,11], lon = weight$lon, lat = weight$lat, +agg_png(paste0(dir_output, "WeightsDec.png"), + width = 1000, height = 1000, units = 'px',res = 144) +PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, filled.continents = FALSE, intylat = 2, intxlon = 2, toptitle = 'December Weights WorldClim2') dev.off() -agg_png(paste0(output_dir, "Slope.png"), - width = 500, height = 600, units = 'px',res = 144) -plot(1:12, slope[3:12,1:2] type = 'b', main = 'slope', pch = 3) +agg_png(paste0(dir_output, "Slope.png"), + width = 1000, height = 1000, units = 'px',res = 144) +plot(1:12, slope_plot, type = 'b', main = 'slope', pch = 3) +line(12, slope_plot[12], type = 'p', col = 'red', pch = 3) dev.off() # Plot ForecastPDF @@ -159,12 +164,12 @@ names(dim(data)) <- names(dim(expts)) fsts <- MeanDims(fs, c('lat', 'lon'), na.rm = T) data <- abind(data, fsts, along = 1) names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') -agg_png("/esarchive/scratch/nperez/CSTools_manuscript/v20201201/FiguresPDF_11DecemberAll2.png", +agg_png(paste0(dir_output, "/FiguresPDF_11DecemberAll2.png", width = 750, height = 650, units = 'px', res = 144) PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.58, 2.4), obs = obsts[,,1,11,2], extreme.limits = c(0.06, 7.26), color.set = 'hydro', add.ensmemb = 'no', var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", - fcst.names = c("ECMWFS5C3S", "Bias Corrected", "Downscaled")) + fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled")) dev.off() -- GitLab From e0ba23a0b2d1bd91cf20e8a33962c7a264dfba94 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 11 Jun 2021 11:20:18 +0200 Subject: [PATCH 33/48] Use cases and extra_string parameter in SaveExp --- R/CST_SaveExp.R | 28 ++- ..._PrecipitationDownscaling_RainFARM_RF100.R | 195 ++++++++++++++++++ ...e2_PrecipitationDownscaling_RainFARM_RF4.R | 63 +++--- man/CST_SaveExp.Rd | 4 +- man/SaveExp.Rd | 7 +- 5 files changed, 259 insertions(+), 38 deletions(-) create mode 100644 inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R diff --git a/R/CST_SaveExp.R b/R/CST_SaveExp.R index 9c689ff7..f9290b18 100644 --- a/R/CST_SaveExp.R +++ b/R/CST_SaveExp.R @@ -13,6 +13,7 @@ #'folder tree: destination/experiment/variable/. By default the function #'creates and saves the data into the folder "CST_Data" in the working #'directory. +#'@param extra_string a character string to be include as part of the file name, for instance, to identify member or realization. It would be added to the file name between underscore characters. #' #'@seealso \code{\link{CST_Load}}, \code{\link{as.s2dv_cube}} and \code{\link{s2dv_cube}} #' @@ -29,7 +30,7 @@ #'} #' #'@export -CST_SaveExp <- function(data, destination = "./CST_Data") { +CST_SaveExp <- function(data, destination = "./CST_Data", extra_string = NULL) { if (!is.character(destination) & length(destination) > 1) { stop("Parameter 'destination' must be a character string of one element ", "indicating the name of the file (including the folder if needed) ", @@ -55,7 +56,8 @@ CST_SaveExp <- function(data, destination = "./CST_Data") { SaveExp(data = data$data, lon = data$lon, lat = data$lat, Dataset = names(data$Datasets), var_name = var_name, units = units, cdo_grid_name = cdo_grid_name, projection = projection, - startdates = sdates, Dates = time_values, destination) + startdates = sdates, Dates = time_values, destination, + extra_string = extra_string) } #'Save an experiment in a format compatible with CST_Load #'@description This function is created for compatibility with CST_Load/Load for saving post-processed datasets such as those calibrated of downscaled with CSTools functions @@ -73,8 +75,9 @@ CST_SaveExp <- function(data, destination = "./CST_Data") { #'@param cdo_grid_name a character string indicating the name of the grid e.g.: 'r360x181' #'@param projection a character string indicating the projection name #'@param destination a character string indicating the path where to store the NetCDF files +#'@param extra_string a character string to be include as part of the file name, for instance, to identify member or realization. #' -#'@return the function creates as many files as sdates per dataset. Each file could contain multiple members +#'@return the function creates as many files as sdates per dataset. Each file could contain multiple members. It would be added to the file name between underscore characters. #' The path will be created with the name of the variable and each Datasets. #' #'@import ncdf4 @@ -102,7 +105,8 @@ CST_SaveExp <- function(data, destination = "./CST_Data") { #'} #'@export SaveExp <- function(data, lon, lat, Dataset, var_name, units, startdates, Dates, - cdo_grid_name, projection, destination) { + cdo_grid_name, projection, destination, + extra_string = NULL) { dimname <- names(dim(data)) if (any(dimname == "ftime")) { dimname[which(dimname == "ftime")] <- "time" @@ -136,7 +140,11 @@ SaveExp <- function(data, lon, lat, Dataset, var_name, units, startdates, Dates, stop("Element 'data' in parameter 'data' has more than one 'sdate'", " dimension.") } - + if (!is.null(extra_string)) { + if (!is.character(extra_string)) { + stop("Parameter 'extra_string' must be a character string.") + } + } dataset_pos <- which(dimname == "dataset" | dimname == "dat") dims <- dim(data) if (length(dataset_pos) == 0) { @@ -227,7 +235,7 @@ SaveExp <- function(data, lon, lat, Dataset, var_name, units, startdates, Dates, NULL, 'time'), fun = .saveExp, var_name = var_name, units = units, dims_var = dims_var, cdo_grid_name = cdo_grid_name, projection = projection, - destination = path) + destination = path, extra_string = extra_string) } } @@ -252,7 +260,7 @@ SaveExp <- function(data, lon, lat, Dataset, var_name, units, startdates, Dates, #Dates <- as.Date(c("1900-11-01", "1900-12-01", "1901-01-01", "1901-02-01", "1901-03-01")) #.saveExp(data, sdate, Dates, var_name, units, dims_var, cdo_grid_name = 'r360x181', projection = 'none', destination) .saveExp <- function(data, sdate, Dates, var_name, units, dims_var, - cdo_grid_name, projection, destination) { + cdo_grid_name, projection, destination, extra_string) { dim_names <- names(dim(data)) if (any(dim_names != c('longitude', 'latitude', 'member', 'time'))) { data <- Reorder(data, c('longitude', 'latitude', 'member', 'time')) @@ -266,7 +274,11 @@ SaveExp <- function(data, lon, lat, Dataset, var_name, units, startdates, Dates, datanc <- ncvar_def(name = var_name, units = units, dim = dims_var, missval = -99999) - file_name <- paste0(var_name, "_", sdate, ".nc") + if (is.null(extra_string)) { + file_name <- paste0(var_name, "_", sdate, ".nc") + } else { + file_name <- paste0(var_name, "_", extra_string, "_", sdate, ".nc") + } full_filename <- file.path(destination, file_name) file_nc <- nc_create(full_filename, datanc) ncvar_put(file_nc, datanc, data) diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R new file mode 100644 index 00000000..7b057180 --- /dev/null +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R @@ -0,0 +1,195 @@ +#!/usr/bin/env Rscript +rm(list=ls()); gc(); + +### Creation date: 9th June 2021 +# Author: N. Pérez-Zanón +# Adapted from the original version Terzago et al. 2020 +# https://drive.google.com/file/d/1qp2gbtKdBl4XmsyOeaEhFENwpeUuJwkf/view +# Refer CSTools package and manuscript when using it. +# ---------------------------------------- +# This code is using divided in 4 steps plus visualization of the results +# Taking advantage of +# STEP 1: Compute Slope values for RainFARM downscaling method +# STEP 2: Apply Quantile Mapping Correction to Seasonal Precipitation Forecast +# STEP 3: Compute Weights for RainFARM downscaling method +# STEP 4: Apply RainFARM downscaling method +# ---------------------------------------- +# Note: STEP 3 requires a machine with internet connexion. +# In this file, the lines are commented since they have been run and the +# result saved on disk, then the result is loaded. +# ---------------------------------------- +# +# Load required libraries and setup output directory: +library(CSTools) +library(ClimProjDiags) +library(zeallot) +library(ragg) +dir_output <- '/esarchive/scratch/nperez/CSTools_manuscript/v20210603/' #slash end + +# -------------------------------------------- +# STEP 1: +# -------------------------------------------- +era5 <- list(name = 'era5', + path = '/esarchive/recon/ecmwf/era5/$STORE_FREQ$_mean/$VAR_NAME$_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') +years <- unlist(lapply(1993:2018, function(x){ + paste0(x, sprintf("%02d",1:12), '01')})) +era5 <- CST_Load(var = 'prlr', + exp = list(era5), + sdates = years, nmember = 1, + storefreq = "daily", sampleperiod = 1, + latmin = 37.5, latmax = 53.25, lonmin = 2.5, lonmax = 18.25, + output = 'lonlat', nprocs = 1) +era5$data <- era5$data * 24 * 3600 * 1000 # with or without this line -> same result +era5 <- CST_SplitDim(era5, split_dim = 'sdate', indices = rep(1:12, 26)) +slope <- CST_RFSlope(era5, time_dim = c('sdate', 'ftime'), kmin = 5) +save(slope, file = paste0(dir_output, 'Slope.RDS'), version = 2) +slope_plot <- slope + +# -------------------------------------------- +# STEP 2: +# -------------------------------------------- +StartDates <- paste0(1993:2018, '1101') +exp <- list(name = 'ecmwfS5', + path = "/esarchive/exp/ecmwf/system5c3s/$STORE_FREQ$_mean/$VAR_NAME$_s0-24h/$VAR_NAME$_$START_DATE$.nc") +obs <- list(name = 'era5', + path = '/esarchive/recon/ecmwf/era5/$STORE_FREQ$_mean/$VAR_NAME$_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') +#obs <- list(name = 'era5', path = '/esarchive/scratch/nperez/ERA5/daily_total_prlr_f1h-r1440x721cds/$VAR_NAME$_$YEAR$$MONTH$.nc') + +c(exp, obs) %<-% CST_Load(var = 'prlr', exp = list(exp), obs = list(obs), + sdates = StartDates, nmember = 25, + storefreq = "daily", sampleperiod = 1, + latmin = 42, latmax = 49, lonmin = 4, lonmax = 11, + output = 'lonlat', nprocs = 1) +exp <- CST_SplitDim(exp, split_dim = c('ftime')) +obs <- CST_SplitDim(obs, split_dim = c('ftime')) +exp$data <- exp$data * 24 * 3600 * 1000 +obs$data <- obs$data * 24 * 3600 * 1000 +exp$data[which(exp$data < 0)] <- 0 +exp.qm <- CST_QuantileMapping(exp, obs, method = "QUANT", + wet.day = FALSE, + sample_dims = c('member', 'sdate', 'ftime'), + ncores = 4) +save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS'), version = 2) + +# -------------------------------------------- +# STEP 3: +# -------------------------------------------- +#library(raster); library(s2dv); library(CSTools) +#worldclim <- getData("worldclim", var = "prec", res = 0.5, lon = 5, lat = 45) +#wc_month <- lapply(1:12, FUN = function(x) { +# res <- crop(worldclim[[x]], +# extent(3.5, 11.5, 41.5, 49.5)) +# res <- as.array(res) +# names(dim(res)) <- c('lat', 'lon', 'month') +# return(res) +# }) +#xy <- xyFromCell(crop(worldclim[[1]], extent(3.5, 11.5, 41.5, 49.5)), +# 1:length(crop(worldclim[[1]], extent(3.5, 11.5, 41.5, 49.5)))) +#lons <- unique(xy[,1]) +#lats <- unique(xy[,2]) +#wc_month <- unlist(wc_month) +#dim(wc_month) <- c(lat = length(lats), lon = length(lons), monthly = 12) +#wc_month <- Reorder(wc_month, c('lon', 'lat', 'monthly')) +#wc_month <- s2dv_cube(data = wc_month, lon = lons, lat = lats, +# Datasets = 'WorldClim') +#weight <- CST_RFWeights(wc_month, lon = 4:11, lat = 49:42, nf = 100) +#save(weight, file = paste0(dir_output, 'weightsRF100.RDS')) + +load(paste0(dir_output, 'weightsRF100.RDS')) + +# -------------------------------------------- +# Visualization +# -------------------------------------------- +agg_png(paste0(dir_output, "RF100_WeightsDec.png"), + width = 1000, height = 1100, units = 'px',res = 144) +PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, + filled.continents = FALSE, title_scale = 1, + intylat = 2, intxlon = 2, + toptitle = 'December Weights RF 100') +dev.off() + +# -------------------------------------------- +# STEP 4: +# -------------------------------------------- + +weights <- Subset(weight$data, along = 'monthly', indices = c(11, 12, 1:6)) +slope <- Subset(slope, along = 'monthly', indices = c(11, 12, 1:6), + drop = 'non-selected') +k = 1 # To create the member ID +#----- +# To be removed when CSTools 4.0.1 is published: +source("/gpfs/scratch/bsc32/bsc32339/CSTools_manuscript/CST_SaveExp.R") +library(multiApply) +library(ncdf4) +library(s2dv) +#----- +for (realizations in 1:10) { + for (member in 1:25) { + result <- data # to store the data + result$data <- NULL + for (month in 1:8) { + data <- exp.qm # to take the correct piece of data + data$data <- data$data[1, member, , , , , month] + fs <- CST_RainFARM(data, nf = 100, + weights = weights, slope = slope[month], + kmin = 1, nens = 1, verbose = TRUE, + nprocs = 8, + drop_realization = TRUE) + print(dim(fs$data)) + result$data <- abind::abind(result$data, fs$data, along = 5) + if (month == 2 & member == 1 & realization == 1) { + # ---------------------------- + # Visualization: + # ---------------------------- + agg_png(paste0(dir_output, "RF100_Down_11dec.png"), + width = 1000, height = 1100, units = 'px',res = 144) + PlotEquiMap(fs$data[1,11,,],lon = fs$lon, lat = fs$lat, + filled.continents = FALSE, bar_limits = c(0,40), + intylat = 2, intxlon = 2, title_scale = 1, + triangle_ends = c(TRUE, FALSE), + toptitle = 'Downsacaled RF 100', units = 'precipitation (mm)') + dev.off() + # PlotForecastPDF + library(abind) + obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) + print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) + expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) + exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) + empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, + ftime = 31, monthly = 8)) + data <- abind(expts, exp.qmts, along = 1) + names(dim(data)) <- names(dim(expts)) + data <- abind(data, empty, along = 2) + names(dim(data)) <- names(dim(expts)) + fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) + print(dim(fsts)) + print(dim(data)) + data <- abind(data, fsts, along = 1) + names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') + agg_png(paste0(dir_output, "/FiguresPDF_RF100_11DecemberAll2.png"), + width = 750, height = 650, units = 'px', res = 144) + PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.58, 2.4), + obs = obsts[,,1,11,2], + extreme.limits = c(0.06, 7.26), + color.set = 'hydro', add.ensmemb = 'no', + var.name = "Precipitation (mm)", + title = "Forecasts issued on Nov 1993 for 11th December 1993", + fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled")) + dev.off() + } + result$lon <- fs$lon + result$lat <- fs$lat + result <- CST_MergeDims(result, merge_dims = c("ftime", "monthly"), + na.rm = TRUE) + result$Dataset <- paste0('RF100_ECMWFC3S_QM_member_', member, '_real_', + realizations) + result$Dates[[1]] <- exp$Dates[[1]] + CST_SaveExp(result, destination = dir_output, + extra_string = paste0('member', k)) + gc() + k = k + 1 + rm(list= list('fs', 'result', 'data')) + } +} + + diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R index 6917ae6d..44b1a94e 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -92,16 +92,17 @@ save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS'), version = 2) #wc_month <- s2dv_cube(data = wc_month, lon = lons, lat = lats, # Datasets = 'WorldClim') #weight <- CST_RFWeights(wc_month, lon = 4:11, lat = 49:42, nf = 4) -#save(weight, file = paste0(dir_output, 'weights.RDS')) +#save(weight, file = paste0(dir_output, 'weightsRF4.RDS')) -load(paste0(dir_output, 'weights.RDS')) +load(paste0(dir_output, 'weightsRF4.RDS')) # -------------------------------------------- # STEP 4: # -------------------------------------------- -weights <- Subset(weight$data, along = 'monthly', indices = c(11,12,1:1:6)) -slope <- Subset(slope, along = 'monthly', indices = c(11,12,1:1:6), drop = 'non-selected') +weights <- Subset(weight$data, along = 'monthly', indices = c(11, 12, 1:6)) +slope <- Subset(slope, along = 'monthly', indices = c(11, 12, 1:6), + drop = 'non-selected') fs <- CST_RainFARM(exp.qm, nf = 4, weights = weights, slope = slope, kmin = 1, nens = 10, verbose = TRUE, @@ -118,42 +119,47 @@ CST_SaveExp(newfs, destination = dir_output) # -------------------------------------------- library(s2dv) agg_png(paste0(dir_output, "EXP_11dec.png"), - width = 1000, height = 1000, units = 'px',res = 144) + width = 1000, height = 1100, units = 'px',res = 144) PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 0.7, + intylat = 2, intxlon = 2, title_scale = 1, + triangle_ends = c(TRUE, FALSE), toptitle = 'ECMWF-S5C3S', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "EXPQM_11dec.png"), - width = 1000, height = 1000, units = 'px',res = 144) + width = 1000, height = 1100, units = 'px',res = 144) PlotEquiMap(exp.qm$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 0.7, + intylat = 2, intxlon = 2, title_scale = 1, + triangle_ends = c(TRUE, FALSE), toptitle = 'Bias Adjusted', units = 'precipitation (mm)') dev.off() -agg_png(paste0(dir_output, "Down_11dec.png"), - width = 1000, height = 1000, units = 'px',res = 144) +agg_png(paste0(dir_output, "RF4_Down_11dec.png"), + width = 1000, height = 1100, units = 'px',res = 144) PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 0.7, - toptitle = 'Downsacaled', units = 'precipitation (mm)') + intylat = 2, intxlon = 2, title_scale = 1, + triangle_ends = c(TRUE, FALSE), + toptitle = 'Downsacaled RF 4', units = 'precipitation (mm)') dev.off() -agg_png(paste0(dir_output, "WeightsDec.png"), - width = 1000, height = 1000, units = 'px',res = 144) +agg_png(paste0(dir_output, "RF4_WeightsDec.png"), + width = 1000, height = 1100, units = 'px',res = 144) PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, - filled.continents = FALSE, + filled.continents = FALSE, title_scale = 1, intylat = 2, intxlon = 2, - toptitle = 'December Weights WorldClim2') + toptitle = 'December Weights RF 4') dev.off() agg_png(paste0(dir_output, "Slope.png"), - width = 1000, height = 1000, units = 'px',res = 144) -plot(1:12, slope_plot, type = 'b', main = 'slope', pch = 3) -line(12, slope_plot[12], type = 'p', col = 'red', pch = 3) + width = 700, height = 700, units = 'px',res = 144) +plot(1:12, slope_plot, type = 'b', main = 'Slope', pch = 16, xlab = 'month', + ylab = 'Slope', bty = 'n') +lines(12, slope_plot[12], type = 'p', col = 'red', pch = 16) dev.off() # Plot ForecastPDF +library(abind) obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) -print(quantile(obsts, c(0.1,0.3,0.6,0.9), na.rm = T)) +print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, ftime = 31, monthly = 8)) @@ -161,15 +167,18 @@ data <- abind(expts, exp.qmts, along = 1) names(dim(data)) <- names(dim(expts)) data <- abind(data, empty, along = 2) names(dim(data)) <- names(dim(expts)) -fsts <- MeanDims(fs, c('lat', 'lon'), na.rm = T) +fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) data <- abind(data, fsts, along = 1) names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') -agg_png(paste0(dir_output, "/FiguresPDF_11DecemberAll2.png", - width = 750, height = 650, units = 'px', res = 144) -PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.58, 2.4), obs = obsts[,,1,11,2], - extreme.limits = c(0.06, 7.26), color.set = 'hydro', add.ensmemb = 'no', - var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", - fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled")) +agg_png(paste0(dir_output, "/FiguresPDF_RF4_11DecemberAll2.png"), + width = 1000, height = 700, units = 'px', res = 144) +PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.67, 2.5), + #obs = obsts[,,1,11,2], + extreme.limits = c(0.09, 7.3), color.set = 'hydro', + #add.ensmemb = 'no', + var.name = "Precipitation (mm)", + title = "Forecasts issued on Nov 1993 for 11th December 1993", + fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled RF 4")) dev.off() diff --git a/man/CST_SaveExp.Rd b/man/CST_SaveExp.Rd index ddd9164e..92c97282 100644 --- a/man/CST_SaveExp.Rd +++ b/man/CST_SaveExp.Rd @@ -5,7 +5,7 @@ \title{Save CSTools objects of class 's2dv_cube' containing experiments or observed data in NetCDF format} \usage{ -CST_SaveExp(data, destination = "./CST_Data") +CST_SaveExp(data, destination = "./CST_Data", extra_string = NULL) } \arguments{ \item{data}{an object of class \code{s2dv_cube}.} @@ -15,6 +15,8 @@ to save the data. NetCDF file for each starting date are saved into the folder tree: destination/experiment/variable/. By default the function creates and saves the data into the folder "CST_Data" in the working directory.} + +\item{extra_string}{a character string to be include as part of the file name, for instance, to identify member or realization. It would be added to the file name between underscore characters.} } \description{ This function allows to divide and save a object of class diff --git a/man/SaveExp.Rd b/man/SaveExp.Rd index 40ace2db..345974b2 100644 --- a/man/SaveExp.Rd +++ b/man/SaveExp.Rd @@ -15,7 +15,8 @@ SaveExp( Dates, cdo_grid_name, projection, - destination + destination, + extra_string = NULL ) } \arguments{ @@ -40,9 +41,11 @@ SaveExp( \item{projection}{a character string indicating the projection name} \item{destination}{a character string indicating the path where to store the NetCDF files} + +\item{extra_string}{a character string to be include as part of the file name, for instance, to identify member or realization.} } \value{ -the function creates as many files as sdates per dataset. Each file could contain multiple members +the function creates as many files as sdates per dataset. Each file could contain multiple members. It would be added to the file name between underscore characters. The path will be created with the name of the variable and each Datasets. } \description{ -- GitLab From 11969d0ea5abd7106ab7ecfda2c21ae2193d76ea Mon Sep 17 00:00:00 2001 From: Bert Van Schaeybroeck Date: Mon, 14 Jun 2021 14:19:21 +0000 Subject: [PATCH 34/48] Use Case 3 code --- .../UseCase3_data_preparation_SCHEME_model.R | 436 ++++++++++++++++++ 1 file changed, 436 insertions(+) create mode 100644 inst/doc/UseCase3_data_preparation_SCHEME_model.R diff --git a/inst/doc/UseCase3_data_preparation_SCHEME_model.R b/inst/doc/UseCase3_data_preparation_SCHEME_model.R new file mode 100644 index 00000000..0ff104db --- /dev/null +++ b/inst/doc/UseCase3_data_preparation_SCHEME_model.R @@ -0,0 +1,436 @@ +rm(list = ls()) +library(CSTools) +library(s2dverification) +library(CSTools) +library(ClimProjDiags) + +#SETUP PARAMETERS (TO FIX BEFORE RUNNING SCRIPT): +#------------------------------------------------ +var.to.use <- "prlr" #which variable to correct (prlr=precipitation, tasmin, tasmax, tas) + +init.yr <- 1993 #initial year (for ECMWF Sys5 = 1993) +end.yr <- 2019 #end year (for ECMWF Sys5 = 2019) +amt.ftime <- 214 +n.cores.to.use <- 20 +use.chirps <- T +eval.method.to.use <- "leave-one-out" +domain.high.res <- "greece_high_res" +domain.low.res <- "greece_low_res" +sdate.mon.to.use <- 5 #Month of startdate for ECMWF Sys5 possibilities are 5 (May) or 11 (November) +sdate.day.to.use <- 1 #Day of startdate for ECMWF Sys5 only possibility is 1 + +#LOCAL PARAMETERS (to be adjusted for each working system) +#--------------------------------------------------------- +dir.rdata <- "/mnt/HDS_URCLIM/URCLIM/bertvs/medscope/data/greece_rdata/" + +#dir.scripts <- "/mnt/netapp/home/bertvs/ARCHIVE_bertvs/R/medscope/D3.1/" +dir.scratch <- "/scratch/bertvs/" +#Base path for C3S forecast (experiment) dataset: +dir.c3s <- "/mnt/HDS_URCLIM/URCLIM/bertvs/medscope/data/" +#Base path for ERA5 reference or obs dataset: +dir.era5 <- "/mnt/HDS_BREGILABEPOC/BREGILABEPOC/era5/europe/daily/per_mon/" +#Base path for CHIRPS (reference or obs) rainfall dataset: +dir.chirps <- "/mnt/HDS_MEDYCLIM/MEDYCLIM/PREDANTAR/climate_data/obs/chirps/" +dir.chirps.low.res <- paste0(dir.chirps, "/", domain.low.res, "/per_mon/") +dir.chirps.high.res <- paste0(dir.chirps, "/", domain.high.res, "/per_mon/") + + +#AUXILIARY FUNCTIONS +#------------------- +set.msk <- function(x, msk, const){ + x[msk] = const + return(x) +} + +#FIXED PARAMETERS: +#----------------- +greece.coor.vec <- c( + lonmin = 18.975, + lonmax = 24.025, + latmin = 37.975, + latmax = 43.025) +greece.coor.lst <- list( + lon.min = 18.975, + lon.max = 24.025, + lat.min = 37.975, + lat.max = 43.025) +coor.to.use <- greece.coor.lst + +europe.coor <- list( + lon.min = -27, + lon.max = 45, + lat.min = 33, + lat.max = 73.5) + +#Large-scale pressure field metadata (necessary for analogs) +var.msl <- "mean_sea_level_pressure" +nc.var.name.msl <- "msl" + +#Depending on the variable loaded, different datasets and metadata are used +if(var.to.use == "prlr"){ #Precipitation + var.era5 <- "total_precipitation" + time.era5 <- "daily" + nc.var.name.era5 <- "tp" + var.chirps <- "precip" + time.chirps <- "daily" + nc.var.name.chirps <- "precip" + cal.meth.to.use <- "bias" #method for bias calibration + chirps.low.res.daily <- list( + name = "chirps_low_res", + path = paste0(dir.chirps.low.res, "chirps-v2.0.$YEAR$.days_greece_low_res-$MONTH$.nc"), + nc_var_name = nc.var.name.chirps) + chirps.high.res.daily <- list( + name = "chirps_high_res", + path = paste0(dir.chirps.high.res, + "chirps-v2.0.$YEAR$.days_greece_high_res-$MONTH$.nc"), + nc_var_name = nc.var.name.chirps) + #unit conversions + mul.cor.era5 <- 1000 * 24 + add.cor.era5 <- 0 + mul.cor.chirps <- 1 + add.cor.chirps <- 0 + mul.cor.exp <- 1000 * 3600 * 24 + add.cor.exp <- 0 + if(use.chirps){ + add.cor.obs <- add.cor.chirps + mul.cor.obs <- mul.cor.chirps + } else { + add.cor.obs <- add.cor.era5 + mul.cor.obs <- mul.cor.er5 + } +} else if(var.to.use == "tas"){ + var.era5 <- "2m_temperature" + time.era5 <- "daily" + nc.var.name.era5 <- "t2m" + cal.meth.to.use <- "mse_min" + + #unit conversions + mul.cor.era5 <- 0 + add.cor.era5 <- 0 + mul.cor.exp <- 0 + add.cor.exp <- 0 +} else if(var.to.use == "tasmin"){ + var.era5 <- "2m_temperature" + time.era5 <- "daily_min" + nc_var_name.era5 <- "t2m" + cal.meth.to.use <- "mse_min" #method for bias calibration + + #unit conversions + mul.cor.era5 <- 0 + add.cor.era5 <- 0 + mul.cor.exp <- 0 + add.cor.exp <- 0 +} else if(var.to.use == "tasmax"){ + var.era5 <- "2m_temperature" + time.era5 <- "daily_max" + nc_var_name.era5 <- "t2m" + cal.meth.to.use <- "mse_min" #method for bias calibration + + #unit conversions + mul.cor.era5 <- 0 + add.cor.era5 <- 0 + mul.cor.exp <- 0 + add.cor.exp <- 0 +} + + +#Experiment path specification: +ecmwf.s5.daily <- list( + name = "ecmwfS5", + path = paste0(dir.c3s, + "C3S/$EXP_NAME$/$STORE_FREQ$/$VAR_NAME$/", + "$VAR_NAME$_$START_DATE$.nc")) +#Reference or obs path specifications (ERA5 data available over Europe): +era5.daily <- list(name = "era5", + path = paste0(dir.era5, "era5-", time.era5, + "-europe-", var.era5, "-$YEAR$-$MONTH$.nc"), + nc_var_name = nc.var.name.era5) +#Reference or obs path specifications for pressure field (ERA5 data available over Europe): +msl.era5.daily <- list(name = "msl", + path = paste0(dir.era5, "era5-", time.era5, + "-europe-", var.msl, "-$YEAR$-$MONTH$.nc"), + nc_var_name = nc.var.name.msl) + +#UNIVERSAL PARAMETERS: +#--------------------- +amt.mon.per.yr <- 12 +amt.day.per.mon <- 31 +sdate.day.str <- formatC(sdate.day.to.use, width = 2, flag = "0") +sdate.mon.str <- formatC(sdate.mon.to.use, width = 2, flag = "0") +day.lst <- formatC(seq(1, amt.day.per.mon), width = 2, flag = "0") +yr.lst <- seq(init.yr, end.yr) +amt.yr <- length(yr.lst) +sdate.lst <- paste0(yr.lst, sdate.mon.str, sdate.day.str ) + + +#START +#----- + +#1. LOAD THE DATA +#---------------- + +if(use.chirps){ + obs.set.to.use <- chirps.low.res.daily +} else { + obs.set.to.use <- era5.daily +} + +#Load mean sea level pressure field from ERA5 (no need to set the units) + +file.to.load <- paste0(dir.rdata, "msl_all.RData") +if(file.exists(file.to.load)){ + load(file.to.load, verbose = T) +} else { + msl.all <- CST_Load( + var = "psl", #nc.var.name.msl, + obs = list(msl.era5.daily), + exp = list(ecmwf.s5.daily), + nmember = NULL, + sdates = sdate.lst, + lonmin = europe.coor$lon.min, + lonmax = europe.coor$lon.max, + latmin = europe.coor$lat.min, + latmax = europe.coor$lat.max, + output = "lonlat", + storefreq = "daily", + nprocs = n.cores.to.use) + save(file = file.to.load, msl.all) +} + +#Data manipulation: first split lead times per month. Then merge all data per month and all sdates. +#This merged dataset will be used to calibrate (per month) and find analogs (per month). +obs.msl.eur.split <- CST_SplitDim(msl.all$obs, split_dim = c("ftime")) +exp.msl.eur.split <- CST_SplitDim(msl.all$exp, split_dim = c("ftime")) +obs.msl.eur.merge <- CST_MergeDims( + obs.msl.eur.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") +exp.msl.eur.merge <- CST_MergeDims( + exp.msl.eur.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") + +#Load observational and forecast set of variable that needs to be calibrated and downscaled: +file.to.load <- paste0(dir.rdata, "data_all.RData") +if(file.exists(file.to.load)){ + load(file.to.load, verbose = T) +} else { + data.all <- CST_Load( + var = var.to.use, + obs = list(obs.set.to.use), + exp = list(ecmwf.s5.daily), + nmember = NULL, + sdates = sdate.lst, + lonmin = coor.to.use$lon.min, + lonmax = coor.to.use$lon.max, + latmin = coor.to.use$lat.min, + latmax = coor.to.use$lat.max, + output = "lonlat", + storefreq = "daily", + nprocs = n.cores.to.use) + save(file = file.to.load, data.all) +} +#Set the units: +data.all$obs$data <- data.all$obs$data * mul.cor.obs + add.cor.obs +data.all$exp$data <- data.all$exp$data * mul.cor.exp + add.cor.exp + +#Data manipulation: first split lead times per month. Then merge all data per month and all sdates. +#This merged dataset will be used to calibrate (per month) and find analogs (per month). +obs.split <- CST_SplitDim(data.all$obs, split_dim = c("ftime")) +exp.split <- CST_SplitDim(data.all$exp, split_dim = c("ftime")) +obs.merge <- CST_MergeDims( + obs.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") +exp.merge <- CST_MergeDims( + exp.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") + +#Calibrate the exp data (per month) +cal.merge <- CST_Calibration( + exp = exp.merge, + obs = obs.merge, + cal.method = cal.meth.to.use, + eval.method = eval.method.to.use) +cal.merge$data[cal.merge$data < 0] <- 0 + +#LOAD HIGH RES CHIRPS DATA +file.to.load <- paste0(dir.rdata, "obs_high_res.RData") +if(file.exists(file.to.load)){ + load(file.to.load, verbose = T) +} else { + obs.high.res <- CST_Load(var = var.to.use, + obs = list(chirps.high.res.daily), + exp = NULL, + sdates = sdate.lst, + nmember = 1, + leadtimemax = amt.ftime, + sampleperiod = 1, + lonmin = coor.to.use$lon.min, + lonmax = coor.to.use$lon.max, + latmin = coor.to.use$lat.min, + latmax = coor.to.use$lat.max, + output = "lonlat", + storefreq = "daily", + nprocs = n.cores.to.use) + save(file = file.to.load, obs.high.res) +} +#set the units +obs.high.res$data <- obs.high.res$data * mul.cor.chirps + + add.cor.chirps +#split per month +obs.high.res.split <- CST_SplitDim( + obs.high.res, + split_dim = c("ftime")) +#merge lead times and sdates +obs.high.res.merge <- CST_MergeDims( + obs.high.res.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") + +#LOAD LOW RES CHIRPS DATA +file.to.load <- paste0(dir.rdata, "obs_low_res.RData") +if(file.exists(file.to.load)){ + load(file.to.load, verbose = T) +} else { + obs.low.res <- CST_Load(var = var.to.use, + obs = list(chirps.low.res.daily), + exp = NULL, + sdates = sdate.lst, + nmember = 1, + leadtimemax = amt.ftime, + sampleperiod = 1, + lonmin = coor.to.use$lon.min, + lonmax = coor.to.use$lon.max, + latmin = coor.to.use$lat.min, + latmax = coor.to.use$lat.max, + output = "lonlat", + storefreq = "daily", + nprocs = n.cores.to.use) + save(file = file.to.load, obs.low.res) +} +#set units +obs.low.res$data <- obs.low.res$data * mul.cor.chirps + + add.cor.chirps +#split per month +obs.low.res.split <- CST_SplitDim( + obs.low.res, + split_dim = c("ftime")) +#merge lead times and sdates +obs.low.res.merge <- CST_MergeDims( + obs.low.res.split, + merge_dims = c("sdate", "ftime"), + rename_dim = "sdate") + + +#2. PROCESS THE DATA +#------------------- + +#amount of ensemble members from experiment. For ECMWF Sys5 it is 25: +amt.mbr <- as.numeric(dim(cal.merge$data)["member"]) +lon.low.res <- as.vector(cal.merge$lon) +lat.low.res <- as.vector(cal.merge$lat) +lon.high.res <- as.vector(obs.high.res$lon) +lat.high.res <- as.vector(obs.high.res$lat) +lon.eur <- as.vector(obs.msl.eur.merge$lon) +lat.eur <- as.vector(obs.msl.eur.merge$lat) + +#amount of lead times in months. For ECMWF Sys5 it is 7: +amt.lead.mon <- as.numeric(dim(cal.merge$data)["monthly"]) +mon.seq.tmp <- seq(sdate.mon.to.use, sdate.mon.to.use + amt.lead.mon - 1) +mon.seq.tmp <- ((mon.seq.tmp - 1) %% amt.mon.per.yr) + 1 +lead.mon.lst <- formatC(mon.seq.tmp, width = 2, flag = "0") +#amount of starting days from experiment. For ECMWF Sys5 it is 837: +amt.sdate <- as.numeric(dim(cal.merge$data)["sdate"]) + +sub.time <- outer( + as.vector(t(outer(yr.lst, day.lst, paste, sep="-"))), + lead.mon.lst, + paste, sep = "-") +#This step is necessary to set the non-existent dates to NA +sub.time <- format(as.Date(sub.time, format("%Y-%d-%m")), "%Y-%m-%d") +dim(sub.time) <- c(sdate = amt.yr * amt.day.per.mon, time = amt.lead.mon) + +cal.high.res.merge <- obs.high.res.merge +cal.high.res.merge$data[] <- NA + +#Determine spatial points with all obs.high.res.merge (CHIRPS) data equal to NA. These are the points over sea. +is.na.high.res.obs <- apply( + obs.high.res.merge$data, + MARGIN = c(4, 5), + FUN = function(x){all(is.na(x))}) +#Determine spatial points with all obs.low.res.merge (CHIRPS) data equal to NA. These are the points over sea. +is.na.low.res.obs <- apply( + obs.low.res.merge$data, + MARGIN = c(4, 5), + FUN = function(x){all(is.na(x))}) + +#Set all calibrated exp data (cal.merge) equal to NA at the sea point. +cal.merge.tmp = Apply( + data = list(x = cal.merge$data), + target_dims = list(x = c("lat", "lon")), + fun = set.msk, + msk = is.na.low.res.obs, + const = 0, + output_dims = list(c("lat", "lon")) + )$output1 +dex.match <- match(names(dim(cal.merge$data)), names(dim(cal.merge.tmp))) +cal.merge$data <- aperm(cal.merge.tmp, dex.match) +rm(cal.merge.tmp) + +#2. PROCESS THE DATA +#------------------- + +i.dataset <- 1 +i.mbr.obs <- 1 +for(i.mbr in seq(1, amt.mbr)){ + for(i.mon in seq(1, amt.lead.mon)){ + for(i.sdate in seq(1, amt.sdate)){ + i.mbr <- 1 + i.mon = 1 + i.sdate = 24 + date.to.use <- sub.time[ i.sdate, i.mon] + date.an.lst <- sub.time[ , i.mon] + cat("i.mbr = ", i.mbr, ", i.mon =", i.mon, ", i.sdate = ", + i.sdate, "date: ", date.to.use,"\n") + + + #Extract the (calibrated) forecast that you want to downscale: + exp.low.res.tmp <- exp.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] + cal.low.res.tmp <- cal.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] + #Extract the large-scale pressure field of that day + exp.msl.eur.tmp <- exp.msl.eur.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] + + #Extract all observations that will be used to find analogs + obs.msl.eur.tmp <- obs.msl.eur.merge$data[i.dataset, i.mbr.obs, , , , i.mon]#-i.sdate + obs.low.res.tmp <- obs.low.res.merge$data[i.dataset, i.mbr.obs, , , , i.mon] #-i.sdate + obs.high.res.tmp <- obs.high.res.merge$data[i.dataset, i.mbr.obs, , , , i.mon] #-i.sdate + names(dim(obs.high.res.tmp)) <- c("time", "lat", "lon") + names(dim(obs.low.res.tmp)) <- c("time", "lat", "lon") + names(dim(obs.msl.eur.tmp)) <- c("time", "lat", "lon") + if(!is.na(date.to.use) & !all(is.na(cal.low.res.tmp))){ + obs.low.res.tmp[is.na(obs.low.res.tmp)] <- 0 + + res <- Analogs( + expL = exp.msl.eur.tmp, + obsL = obs.msl.eur.tmp, + time_obsL = date.an.lst, + obsVar = obs.low.res.tmp, + expVar = exp.low.res.tmp, + lonVar = lon.low.res, + latVar = lat.low.res, + lonL = lon.eur, + latL = lat.eur, + region = greece.coor.vec, + criteria = "Local_dist", + time_expL = date.to.use, + excludeTime = date.to.use, + AnalogsInfo = T, + nAnalogs = 1000) + } + } + } +} + + -- GitLab From 8824d09546961f4f36ef427013d8248f2955bdf1 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 14 Jun 2021 17:08:23 +0200 Subject: [PATCH 35/48] remove commented lines --- ...Case2_PrecipitationDownscaling_RainFARM_RF4.R | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R index 44b1a94e..5af7770c 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -69,7 +69,7 @@ exp.qm <- CST_QuantileMapping(exp, obs, method = "QUANT", sample_dims = c('member', 'sdate', 'ftime'), ncores = 4) save(exp.qm, file = paste0(dir_output, 'ExpQM.RDS'), version = 2) - +save(exp, file = paste0(dir_output, 'Exp.RDS'), version = 2) # -------------------------------------------- # STEP 3: # -------------------------------------------- @@ -112,7 +112,7 @@ newfs <- CST_MergeDims(fs, merge_dims = c("ftime", "monthly"), na.rm = TRUE) newfs$Dates[[1]] <- exp$Dates[[1]] -CST_SaveExp(newfs, destination = dir_output) +CST_SaveExp(newfs, destination = paste0(dir_output, 'RF4/')) # -------------------------------------------- # Visualization @@ -124,7 +124,7 @@ PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 1, triangle_ends = c(TRUE, FALSE), - toptitle = 'ECMWF-S5C3S', units = 'precipitation (mm)') + toptitle = 'SEAS5', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "EXPQM_11dec.png"), width = 1000, height = 1100, units = 'px',res = 144) @@ -140,14 +140,14 @@ PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 1, triangle_ends = c(TRUE, FALSE), - toptitle = 'Downsacaled RF 4', units = 'precipitation (mm)') + toptitle = 'Downscaled nf 4', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "RF4_WeightsDec.png"), width = 1000, height = 1100, units = 'px',res = 144) PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, filled.continents = FALSE, title_scale = 1, intylat = 2, intxlon = 2, - toptitle = 'December Weights RF 4') + toptitle = 'December Weights nf 4') dev.off() agg_png(paste0(dir_output, "Slope.png"), width = 700, height = 700, units = 'px',res = 144) @@ -171,14 +171,12 @@ fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) data <- abind(data, fsts, along = 1) names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') agg_png(paste0(dir_output, "/FiguresPDF_RF4_11DecemberAll2.png"), - width = 1000, height = 700, units = 'px', res = 144) + width = 1100, height = 700, units = 'px', res = 144) PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.67, 2.5), - #obs = obsts[,,1,11,2], extreme.limits = c(0.09, 7.3), color.set = 'hydro', - #add.ensmemb = 'no', var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", - fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled RF 4")) + fcst.names = c("SEAS5", "Bias Adjusted", "Downscaled nf 4")) dev.off() -- GitLab From 3eefddcb6bb313d7d082ff8d14f1c46e4090f507 Mon Sep 17 00:00:00 2001 From: nperez Date: Thu, 17 Jun 2021 17:50:47 +0200 Subject: [PATCH 36/48] updated docu and fixed common dimensions code --- R/CST_DynBiasCorrection.R | 34 ++++++++++++++++++++++------------ R/Predictability.R | 3 ++- man/CST_DynBiasCorrection.Rd | 5 +---- man/CST_ProxiesAttractor.Rd | 3 +-- man/DynBiasCorrection.Rd | 5 +---- man/Predictability.Rd | 6 ++++-- man/ProxiesAttractor.Rd | 20 +++++++++++--------- 7 files changed, 42 insertions(+), 34 deletions(-) diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R index 00d8dc68..52096ea6 100644 --- a/R/CST_DynBiasCorrection.R +++ b/R/CST_DynBiasCorrection.R @@ -31,7 +31,6 @@ #'"PTF","RQUANT","QUANT","SSPLIN" #'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. #'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta -#'@param time_dim a character string indicating the name of the temporal dimension #'@param ncores The number of cores to use in parallel computation #' #'@return dynbias an s2dvcube object with a bias correction performed @@ -56,7 +55,7 @@ #'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, #' Dates = list(start = time_obsL, end = time_obsL)) #'dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", -#' quanti = 0.6, time_dim = 'time') +#' quanti = 0.6) #' #'@export CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', @@ -107,7 +106,6 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', #'"PTF","RQUANT","QUANT","SSPLIN" #'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. #'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta -#'@param time_dim a character string indicating the name of the temporal dimension #'@param ncores The number of cores to use in parallel computation #' #'@return a multidimensional array with named dimensions with a bias correction performed conditioned by local dimension 'dim' or inverse of persistence 'theta' @@ -121,7 +119,7 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', #'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) #'dim (obsL) <- c(time = 100,lat = 4, lon = 5) #'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT', -#' proxy= "dim", quanti = 0.6,time_dim='time') +#' proxy= "dim", quanti = 0.6) #'@export DynBiasCorrection<- function(exp, obs, method = 'QUANT', proxy = "dim", quanti, ncores = NULL){ @@ -165,15 +163,27 @@ DynBiasCorrection<- function(exp, obs, method = 'QUANT', } } } + + dim_exp <- dim(exp) + names_to_check <- names(dim_exp)[which(names(dim_exp) %in% + c('time', 'lat', 'lon', 'sdate') == FALSE)] + if (length(names_to_check) > 0) { + dim_obs <- dim(obs) + if (any(names(dim_obs) %in% names_to_check)) { + if (any(dim_obs[which(names(dim_obs) %in% names_to_check)] != + dim_exp[which(names(dim_exp) %in% names_to_check)])) { + for (i in names_to_check) { + pos <- which(names(dim_obs) == i) + names(dim(obs))[pos] <- ifelse(dim_obs[pos] != + dim_exp[which(names(dim_exp) == i)], + paste0('obs_', names(dim_obs[pos])), + names(dim(obs)[pos])) + } + warning("Common dimension names with different length are renamed.") + } + } + } - - if(dim(obs)['member']!=dim(exp)['member']){ - names(dim(obs))[names(dim(obs))=='member'] <- 'memberObs' - } - - if(dim(obs)['dataset']!=dim(exp)['dataset']){ - names(dim(obs))[names(dim(obs))=='dataset'] <- 'datasetObs' - } if (proxy == "dim") { adjusted <- Apply(list(exp, obs), target_dims = 'time', fun = .dynbias, method, diff --git a/R/Predictability.R b/R/Predictability.R index d17ea24c..73df65ee 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -25,6 +25,7 @@ #'for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints # #'@param theta list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes +#'@param ncores The number of cores to use in parallel computation #' #'@return A list of length 2: #' \itemize{ @@ -53,7 +54,7 @@ #'# imposing a threshold #' quanti <- 0.90 #'# computing dyn_scores from parameters dim and theta of the attractor -#' attractor <- ProxiesAttractor(dat = m, quanti = 0.60, iplot = FALSE) +#' attractor <- ProxiesAttractor(dat = m, quanti = 0.60) #' predyn <- Predictability(dim = attractor$dim, theta = attractor$theta) #'@export #' diff --git a/man/CST_DynBiasCorrection.Rd b/man/CST_DynBiasCorrection.Rd index facf6f51..23eefd92 100644 --- a/man/CST_DynBiasCorrection.Rd +++ b/man/CST_DynBiasCorrection.Rd @@ -11,7 +11,6 @@ CST_DynBiasCorrection( method = "QUANT", proxy = "dim", quanti, - time_dim = "ftime", ncores = NULL ) } @@ -27,8 +26,6 @@ CST_DynBiasCorrection( \item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} -\item{time_dim}{a character string indicating the name of the temporal dimension} - \item{ncores}{The number of cores to use in parallel computation} } \value{ @@ -64,7 +61,7 @@ expL <- s2dv_cube(data = expL, lat = lat, lon = lon, obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, Dates = list(start = time_obsL, end = time_obsL)) dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", - quanti = 0.6, time_dim = 'time') + quanti = 0.6) } \references{ diff --git a/man/CST_ProxiesAttractor.Rd b/man/CST_ProxiesAttractor.Rd index 9757ff10..ecf9b9da 100644 --- a/man/CST_ProxiesAttractor.Rd +++ b/man/CST_ProxiesAttractor.Rd @@ -10,8 +10,7 @@ CST_ProxiesAttractor(data, quanti, ncores = NULL) \item{data}{a s2dv_cube object with the data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol(dat(time,grids)} -\item{quanti}{list of arbitrary length of secondary grids. Each secondary grid is to -be provided as a list of length 2 with longitudes and latitudes} +\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} \item{ncores}{The number of cores to use in parallel computation} } diff --git a/man/DynBiasCorrection.Rd b/man/DynBiasCorrection.Rd index bd60e0f3..f496c13d 100644 --- a/man/DynBiasCorrection.Rd +++ b/man/DynBiasCorrection.Rd @@ -11,7 +11,6 @@ DynBiasCorrection( method = "QUANT", proxy = "dim", quanti, - time_dim = "time", ncores = NULL ) } @@ -27,8 +26,6 @@ DynBiasCorrection( \item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} -\item{time_dim}{a character string indicating the name of the temporal dimension} - \item{ncores}{The number of cores to use in parallel computation} } \value{ @@ -50,7 +47,7 @@ expL <- rnorm(1:2000) dim (expL) <- c(time =100,lat = 4, lon = 5) obsL <- c(rnorm(1:1980),expL[1,,]*1.2) dim (obsL) <- c(time = 100,lat = 4, lon = 5) -dynbias <- DynBiasCorrection(exp = expL, obs = obsL, +dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT', proxy= "dim", quanti = 0.6) } \references{ diff --git a/man/Predictability.Rd b/man/Predictability.Rd index be0fb623..c36d6fa5 100644 --- a/man/Predictability.Rd +++ b/man/Predictability.Rd @@ -5,13 +5,15 @@ \title{Computing scores of predictability using two dinamical proxies based on dynamical systems theory.} \usage{ -Predictability(dim, theta) +Predictability(dim, theta, ncores = NULL) } \arguments{ \item{dim}{data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints} \item{theta}{list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes} + +\item{ncores}{The number of cores to use in parallel computation} } \value{ A list of length 2: @@ -49,7 +51,7 @@ names(dim(m)) <- c('time', 'grid') # imposing a threshold quanti <- 0.90 # computing dyn_scores from parameters dim and theta of the attractor -attractor <- ProxiesAttractor(dat = m, quanti = 0.60, iplot = FALSE) +attractor <- ProxiesAttractor(dat = m, quanti = 0.60) predyn <- Predictability(dim = attractor$dim, theta = attractor$theta) } \references{ diff --git a/man/ProxiesAttractor.Rd b/man/ProxiesAttractor.Rd index b1fa71e3..768ba736 100644 --- a/man/ProxiesAttractor.Rd +++ b/man/ProxiesAttractor.Rd @@ -4,19 +4,12 @@ \alias{ProxiesAttractor} \title{Computing two dinamical proxies of the attractor.} \usage{ -ProxiesAttractor(data, quanti, iplot = FALSE, ncores = NULL) +ProxiesAttractor(data, quanti, ncores = NULL) } \arguments{ \item{data}{a multidimensional array with named dimensions to create the attractor. It requires a temporal dimension named 'time' and spatial dimensions called 'lat' and 'lon', or 'latitude' and 'longitude' or 'grid'.} -\item{quanti}{list of arbitrary length of secondary grids. Each secondary grid is to -be provided as a list of length 2 with longitudes and latitudes.} - -\item{iplot}{FALSE by default. If TRUE the function returns a simple plot -with three pannels on the top left a plot for local dimension 'd', on the top -right a plot for the inverse of the persistence 'theta', on the bottom a plot -for the 'attractor' plotted with the two properties 'd' (x axis) and 'theta' -(y axis)} +\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} \item{ncores}{The number of cores to use in parallel computation.} } @@ -37,6 +30,15 @@ Funtion based on the matlab code (davide.faranda@lsce.ipsl.fr) used in: mat <- array(rnorm(36 * 40), c(time = 36, grid = 40)) qm <- 0.90 # imposing a threshold Attractor <- ProxiesAttractor(data = mat, quanti = qm) +# to plot the result +time = c(1:length(Attractor$theta)) +layout(matrix(c(1, 3, 2, 3), 2, 2)) +plot(time, Attractor$dim, xlab = 'time', ylab = 'd', + main = 'local dimension', type = 'l') +plot(time, Attractor$theta, xlab = 'time', ylab = 'theta', main = 'theta') +plot(Attractor$dim, Attractor$theta, col = 'blue', + main = "Proxies of the Attractor", + xlab = "local dimension", ylab = "theta", lwd = 8, 'p') } \references{ -- GitLab From becb13727e7179a6556182b84722cd9666177870 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 18 Jun 2021 17:52:24 +0200 Subject: [PATCH 37/48] Fix in PlotCombinedMap to scale bar titles for Use Case 1 --- DESCRIPTION | 2 +- R/PlotCombinedMap.R | 4 +- ...e2_PrecipitationDownscaling_RainFARM_RF4.R | 52 +++++++++---------- man/PlotCombinedMap.Rd | 3 ++ 4 files changed, 33 insertions(+), 28 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index aecd89b3..97a80c72 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -84,4 +84,4 @@ VignetteBuilder: knitr License: Apache License 2.0 Encoding: UTF-8 LazyData: true -RoxygenNote: 7.0.2 +RoxygenNote: 7.0.1 diff --git a/R/PlotCombinedMap.R b/R/PlotCombinedMap.R index 7169e4c4..8af4a14d 100644 --- a/R/PlotCombinedMap.R +++ b/R/PlotCombinedMap.R @@ -22,6 +22,7 @@ #' layers via the parameter 'dot_symbol'. #'@param bar_titles Optional vector of character strings providing the titles to be shown on top of each of the colour bars. #'@param legend_scale Scale factor for the size of the colour bar labels. Takes 1 by default. +#'@param cex_bar_titles Scale factor for the sizes of the bar titles. Takes 1.5 by default. #'@param fileout File where to save the plot. If not specified (default) a graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, bmp and tiff #'@param width File width, in the units specified in the parameter size_units (inches by default). Takes 8 by default. #'@param height File height, in the units specified in the parameter size_units (inches by default). Takes 5 by default. @@ -69,6 +70,7 @@ PlotCombinedMap <- function(maps, lon, lat, mask = NULL, col_mask = 'grey', dots = NULL, bar_titles = NULL, legend_scale = 1, + cex_bar_titles = 1.5, fileout = NULL, width = 8, height = 5, size_units = 'in', res = 100, ...) { @@ -422,7 +424,7 @@ PlotCombinedMap <- function(maps, lon, lat, draw_separators = TRUE, extra_margin = c(2, 0, 2, 0), label_scale = legend_scale * 1.5) if (!is.null(bar_titles)) { - mtext(bar_titles[[k]], 3, line = -3, cex = 1.5) + mtext(bar_titles[[k]], 3, line = -3, cex = cex_bar_titles) } } diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R index 5af7770c..a4c4ed7a 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -24,7 +24,6 @@ library(ClimProjDiags) library(zeallot) library(ragg) dir_output <- '/esarchive/scratch/nperez/CSTools_manuscript/v20210603/' #slash end - # -------------------------------------------- # STEP 1: # -------------------------------------------- @@ -119,60 +118,61 @@ CST_SaveExp(newfs, destination = paste0(dir_output, 'RF4/')) # -------------------------------------------- library(s2dv) agg_png(paste0(dir_output, "EXP_11dec.png"), - width = 1000, height = 1100, units = 'px',res = 144) + width = 800, height = 900, units = 'px',res = 144) PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 1, - triangle_ends = c(TRUE, FALSE), + intylat = 2, intxlon = 2, title_scale = 0.8, + triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, toptitle = 'SEAS5', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "EXPQM_11dec.png"), - width = 1000, height = 1100, units = 'px',res = 144) + width = 800, height = 900, units = 'px',res = 144) PlotEquiMap(exp.qm$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 1, - triangle_ends = c(TRUE, FALSE), + intylat = 2, intxlon = 2, title_scale = 0.8, + triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, toptitle = 'Bias Adjusted', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "RF4_Down_11dec.png"), - width = 1000, height = 1100, units = 'px',res = 144) + width = 800, height = 900, units = 'px',res = 144) PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, filled.continents = FALSE, bar_limits = c(0,40), - intylat = 2, intxlon = 2, title_scale = 1, - triangle_ends = c(TRUE, FALSE), + intylat = 2, intxlon = 2, title_scale = 0.8, + triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, toptitle = 'Downscaled nf 4', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "RF4_WeightsDec.png"), - width = 1000, height = 1100, units = 'px',res = 144) + width = 800, height = 900, units = 'px',res = 144) PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, - filled.continents = FALSE, title_scale = 1, - intylat = 2, intxlon = 2, + filled.continents = FALSE, title_scale = 0.8, + intylat = 2, intxlon = 2, degree_sym = TRUE, toptitle = 'December Weights nf 4') dev.off() agg_png(paste0(dir_output, "Slope.png"), width = 700, height = 700, units = 'px',res = 144) plot(1:12, slope_plot, type = 'b', main = 'Slope', pch = 16, xlab = 'month', - ylab = 'Slope', bty = 'n') + ylab = 'Slope', bty = 'n', cex.main = 1.2, cex.lab = 1.2) lines(12, slope_plot[12], type = 'p', col = 'red', pch = 16) dev.off() # Plot ForecastPDF library(abind) -obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) -print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) -expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) -exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) -empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, ftime = 31, monthly = 8)) -data <- abind(expts, exp.qmts, along = 1) -names(dim(data)) <- names(dim(expts)) +#obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) +#print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) +#expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) +#exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) +data <- rbind(exp$data[1, ,1,11,5,4,2], exp.qm$data[1,,1,11,5,4,2]) +empty <- array(NA, c(dataset = 2, members = 225)) +names(dim(data)) <- names(dim(empty)) data <- abind(data, empty, along = 2) -names(dim(data)) <- names(dim(expts)) -fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) -data <- abind(data, fsts, along = 1) -names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') +names(dim(data)) <- names(dim(empty)) +data <- abind(data, fs$data[1,,1,11,15,18 ,2], along = 1) +names(dim(data)) <- names(dim(empty)) +print(dim(data)) +print(quantile(obs$data[1,,,11,5,4,2])) agg_png(paste0(dir_output, "/FiguresPDF_RF4_11DecemberAll2.png"), width = 1100, height = 700, units = 'px', res = 144) -PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.67, 2.5), +PlotForecastPDF(data, tercile.limits = c(0.67, 2.5), extreme.limits = c(0.09, 7.3), color.set = 'hydro', var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", diff --git a/man/PlotCombinedMap.Rd b/man/PlotCombinedMap.Rd index e631761e..3d6661e1 100644 --- a/man/PlotCombinedMap.Rd +++ b/man/PlotCombinedMap.Rd @@ -19,6 +19,7 @@ PlotCombinedMap( dots = NULL, bar_titles = NULL, legend_scale = 1, + cex_bar_titles = 1.5, fileout = NULL, width = 8, height = 5, @@ -61,6 +62,8 @@ layers via the parameter 'dot_symbol'.} \item{legend_scale}{Scale factor for the size of the colour bar labels. Takes 1 by default.} +\item{cex_bar_titles}{Scale factor for the sizes of the bar titles. Takes 1.5 by default.} + \item{fileout}{File where to save the plot. If not specified (default) a graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, bmp and tiff} \item{width}{File width, in the units specified in the parameter size_units (inches by default). Takes 8 by default.} -- GitLab From f3c9558813f253ca2b2a7a64e38e4335ff091af1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ver=C3=B3nica=20Torralba-Fern=C3=A1ndez?= Date: Fri, 25 Jun 2021 14:23:11 +0200 Subject: [PATCH 38/48] including a new parameter in CST_Dyn.. and checks in Predictability --- R/CST_DynBiasCorrection.R | 25 ++++++++++++++----------- R/Predictability.R | 21 ++++++++++++++++++++- 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R index 52096ea6..bb7bc03b 100644 --- a/R/CST_DynBiasCorrection.R +++ b/R/CST_DynBiasCorrection.R @@ -29,6 +29,7 @@ #'@param obs an s2dv_cube object with the reference data #'@param method a character string indicating the method to apply bias correction among these ones: #'"PTF","RQUANT","QUANT","SSPLIN" +#'@param wetday logical indicating whether to perform wet day correction or not OR a numeric threshold below which all values are set to zero. #'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. #'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation @@ -58,7 +59,7 @@ #' quanti = 0.6) #' #'@export -CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', +CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', wetday=wetday, proxy = "dim", quanti, ncores = NULL) { if (!inherits(obs, 's2dv_cube')) { @@ -70,6 +71,7 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', "as output by CSTools::CST_Load.") } exp$data <- DynBiasCorrection(exp = exp$data, obs = obs$data, method = method, + wetday=wetday, proxy = proxy, quanti = quanti, ncores = ncores) return(exp) } @@ -104,6 +106,7 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', #'@param obs a multidimensional array with named dimensions with the observation data #'@param method a character string indicating the method to apply bias correction among these ones: #'"PTF","RQUANT","QUANT","SSPLIN" +#'@param wetday logical indicating whether to perform wet day correction or not OR a numeric threshold below which all values are set to zero. #'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. #'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation @@ -121,7 +124,7 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', #'dynbias <- DynBiasCorrection(exp = expL, obs = obsL, method='QUANT', #' proxy= "dim", quanti = 0.6) #'@export -DynBiasCorrection<- function(exp, obs, method = 'QUANT', +DynBiasCorrection<- function(exp, obs, method = 'QUANT',wetday=FALSE, proxy = "dim", quanti, ncores = NULL){ if (is.null(obs)) { stop("Parameter 'obs' cannot be NULL.") @@ -186,13 +189,13 @@ DynBiasCorrection<- function(exp, obs, method = 'QUANT', if (proxy == "dim") { adjusted <- Apply(list(exp, obs), target_dims = 'time', - fun = .dynbias, method, + fun = .dynbias, method, wetday, predyn.exp = predyn.exp$pred.dim$pos.d, predyn.obs = predyn.obs$pred.dim$pos.d, ncores = ncores, output_dims = 'time')$output1 } else if (proxy == "theta") { adjusted <- Apply(list(exp, obs), target_dims = 'time', - fun = .dynbias, method, + fun = .dynbias, method, wetday, predyn.exp = predyn.exp$pred.theta$pos.t, predyn.obs = predyn.obs$pred.theta$pos.t, ncores = ncores, output_dims = 'time')$output1 @@ -218,31 +221,31 @@ DynBiasCorrection<- function(exp, obs, method = 'QUANT', return(adjusted) } -.dynbias <- function(exp, obs, method, predyn.exp, predyn.obs) { +.dynbias <- function(exp, obs, method, wetday,predyn.exp, predyn.obs) { result <- array(rep(NA, length(exp))) res <- lapply(1:3, function(x) { exp_sub <- exp[predyn.exp[[x]]] obs_sub <- obs[predyn.obs[[x]]] - adjust <- .qbiascorrection(exp_sub, obs_sub, method) + adjust <- .qbiascorrection(exp_sub, obs_sub, method,wetday) result[predyn.exp[[x]]] <<- adjust return(NULL) }) return(result) } -.qbiascorrection <- function(expX, obsX, method) { +.qbiascorrection <- function(expX, obsX, method,wetday) { ## functions fitQmap and doQmap if (method == "PTF") { qm.fit <- fitQmap(obsX, expX, method = "PTF", transfun = "expasympt", - cost = "RSS", wett.day = TRUE) + cost = "RSS", wet.day = wetday) qmap <- doQmap(expX, qm.fit) } else if (method == "QUANT") { - qm.fit <- fitQmap(obsX, expX, method = "QUANT", qstep = 0.01) + qm.fit <- fitQmap(obsX, expX, method = "QUANT", qstep = 0.01, wet.day = wetday) qmap <- doQmap(expX, qm.fit, type = "tricub") } else if (method == "RQUANT") { - qm.fit <- fitQmap(obsX, expX, method = "RQUANT", qstep = 0.01) + qm.fit <- fitQmap(obsX, expX, method = "RQUANT", qstep = 0.01,wet.day = wetday) qmap <- doQmap(expX, qm.fit, type = "linear") } else if (method == "SSPLIN") { - qm.fit <- fitQmap(obsX, expX, qstep = 0.01, method = "SSPLIN") + qm.fit <- fitQmap(obsX, expX, qstep = 0.01, method = "SSPLIN",wet.day = wetday) qmap <- doQmap(expX, qm.fit) } else { stop ("Parameter 'method' doesn't match any of the available methods.") diff --git a/R/Predictability.R b/R/Predictability.R index 73df65ee..44d300a6 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -67,7 +67,26 @@ Predictability<- function(dim, theta, ncores = NULL) { # stop("Parameter 'theta' must be of the class 's2dv_cube', ", # "as output by CSTools::CST_Load.") # } - + if (any(names(dim(dim)) %in% 'sdate')) { + if (any(names(dim(dim)) %in% 'ftime')) { + dim <- MergeDims(dim, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + if (!(any(names(dim(dim)) %in% 'time'))){ + stop("Parameter 'dim' must have a temporal dimension named 'time'.") + } + + if (any(names(dim(theta)) %in% 'sdate')) { + if (any(names(dim(theta)) %in% 'ftime')) { + theta <- MergeDims(theta, merge_dims = c('ftime', 'sdate'), + rename_dim = 'time') + } + } + if (!(any(names(dim(theta)) %in% 'time'))){ + stop("Parameter 'data' must have a temporal dimension named 'time'.") + } + pred <- Apply(list(dim, theta), target_dims = 'time', fun = .predictability, ncores = ncores) -- GitLab From 513dbcec21000b0f89e0ae77f7dc8d9f98e20704 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ver=C3=B3nica=20Torralba-Fern=C3=A1ndez?= Date: Fri, 25 Jun 2021 14:41:37 +0200 Subject: [PATCH 39/48] fixing typos --- R/Predictability.R | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/R/Predictability.R b/R/Predictability.R index 44d300a6..ddc516b8 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -1,5 +1,5 @@ #'@rdname Predictability -#'@title Computing scores of predictability using two dinamical proxies +#'@title Computing scores of predictability using two dynamical proxies #'based on dynamical systems theory. #' #'@author Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it} @@ -11,7 +11,7 @@ #'computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will #'be used to measure the predictability of the system in dyn_scores. When the #'local dimension 'dim' is small and the inverse of persistence 'theta' is -#'small the predictability is higher, and viceversa. +#'small the predictability is high, and viceversa. #' #'@references Faranda, D., Alvarez-Castro, M.C., Messori, G., Rodriguez, D., #'and Yiou, P. (2019). The hammam effect or how a warm ocean enhances large @@ -21,7 +21,7 @@ #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' -#'@param dim data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol +#'@param dim data to create the attractor. must be a matrix with the timesteps in nrow and the grids in ncol #'for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints # #'@param theta list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes @@ -31,20 +31,20 @@ #' \itemize{ #' \item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list #'contains values of local dimension 'dim' divided by terciles: -#'d1: lower tercile and more predictability, +#'d1: lower tercile (high predictability), #'d2: middle tercile, -#'d3: higher tercile and less predictability +#'d3: higher tercile (low predictability) #'The 'pos.d' list contains the position of each tercile in parameter 'dim'} #' #' \item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'. #'The 'qtheta' list contains values of the inverse of persistence 'theta' #'divided by terciles: -#'th1: lower tercile and more predictability, +#'th1: lower tercile (high predictability), #'th2: middle tercile, -#'th3: higher tercile and less predictability +#'th3: higher tercile (low predictability) #'The 'pos.t' list contains the position of each tercile in parameter 'theta'} #'} -#'@return dyn_scores values from 0 to 1. A dyn_score of 1 indicates higher +#'@return dyn_scores values from 0 to 1. A dyn_score of 1 indicates the highest #'predictability. #' #'@examples -- GitLab From c4a575dfdd29e46f64b367e318b738ea5d422394 Mon Sep 17 00:00:00 2001 From: Carmen Alvarez-Castro Date: Fri, 2 Jul 2021 16:59:56 +0200 Subject: [PATCH 40/48] updating documentation --- R/CST_DynBiasCorrection.R | 73 ++++++++++++++++++++++++--------------- R/Predictability.R | 14 ++++---- 2 files changed, 53 insertions(+), 34 deletions(-) diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R index bb7bc03b..9aa33ced 100644 --- a/R/CST_DynBiasCorrection.R +++ b/R/CST_DynBiasCorrection.R @@ -27,11 +27,16 @@ #' #'@param exp an s2v_cube object with the experiment data #'@param obs an s2dv_cube object with the reference data -#'@param method a character string indicating the method to apply bias correction among these ones: -#'"PTF","RQUANT","QUANT","SSPLIN" -#'@param wetday logical indicating whether to perform wet day correction or not OR a numeric threshold below which all values are set to zero. -#'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. -#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta +#'@param method a character string indicating the method to apply bias +#'correction among these ones: "PTF","RQUANT","QUANT","SSPLIN" +#'@param wetday logical indicating whether to perform wet day correction +#'or not OR a numeric threshold below which all values are set to zero (by +#'default is set to 'FALSE'). +#'@param proxy a character string indicating the proxy for local dimension +#' 'dim' or inverse of persistence 'theta' to apply the dynamical +#' conditioned bias correction method. +#'@param quanti a number lower than 1 indicating the quantile to perform +#'the computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation #' #'@return dynbias an s2dvcube object with a bias correction performed @@ -39,27 +44,30 @@ #' #'@examples #'# example 1: simple data s2dvcube style -#'set.seed(1) -#'expL <- rnorm(1:2000) -#'dim (expL) <- c(time =100,lat = 4, lon = 5) -#'obsL <- c(rnorm(1:1980),expL[1,,]*1.2) -#'dim (obsL) <- c(time = 100,lat = 4, lon = 5) -#'time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") -#'time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") -#'lon <- seq(-1,5,1.5) -#'lat <- seq(30,35,1.5) -# qm=0.98 # too high for this short dataset, it is possible that doesn't -#'# get the requirement, in that case it would be necessary select a lower qm -#'# for instance qm=0.60 -#'expL <- s2dv_cube(data = expL, lat = lat, lon = lon, +#' set.seed(1) +#' expL <- rnorm(1:2000) +#' dim (expL) <- c(time =100,lat = 4, lon = 5) +#' obsL <- c(rnorm(1:1980),expL[1,,]*1.2) +#' dim (obsL) <- c(time = 100,lat = 4, lon = 5) +#' time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") +#' time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") +#' lon <- seq(-1,5,1.5) +#' lat <- seq(30,35,1.5) +#' # qm=0.98 # too high for this short dataset, it is possible that doesn't# get the requirement, in that case it would be necessary select a lower qm +#' # for instance qm=0.60 +#' expL <- s2dv_cube(data = expL, lat = lat, lon = lon, #' Dates = list(start = time_expL, end = time_expL)) -#'obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, +#' obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, #' Dates = list(start = time_obsL, end = time_obsL)) -#'dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", +#' # to use DynBiasCorrection +#' dynbias1 <- DynBiasCorrection(exp = expL$data, obs = obsL$data, proxy= "dim", +#' quanti = 0.6) +#' # to use CST_DynBiasCorrection +#' dynbias2 <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", #' quanti = 0.6) #' #'@export -CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', wetday=wetday, +CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', wetday=FALSE, proxy = "dim", quanti, ncores = NULL) { if (!inherits(obs, 's2dv_cube')) { @@ -102,16 +110,25 @@ CST_DynBiasCorrection<- function(exp, obs, method = 'QUANT', wetday=wetday, #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' -#'@param exp a multidimensional array with named dimensions with the experiment data -#'@param obs a multidimensional array with named dimensions with the observation data -#'@param method a character string indicating the method to apply bias correction among these ones: +#'@param exp a multidimensional array with named dimensions with the +#'experiment data +#'@param obs a multidimensional array with named dimensions with the +#'observation data +#'@param method a character string indicating the method to apply bias +#'correction among these ones: #'"PTF","RQUANT","QUANT","SSPLIN" -#'@param wetday logical indicating whether to perform wet day correction or not OR a numeric threshold below which all values are set to zero. -#'@param proxy a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method. -#'@param quanti a number lower than 1 indicating the quantile to perform the computation of local dimension and theta +#'@param wetday logical indicating whether to perform wet day correction +#'or not OR a numeric threshold below which all values are set to zero (by +#'default is set to 'FALSE'). +#'@param proxy a character string indicating the proxy for local dimension +#''dim' or inverse of persistence 'theta' to apply the dynamical conditioned +#'bias correction method. +#'@param quanti a number lower than 1 indicating the quantile to perform the +#'computation of local dimension and theta #'@param ncores The number of cores to use in parallel computation #' -#'@return a multidimensional array with named dimensions with a bias correction performed conditioned by local dimension 'dim' or inverse of persistence 'theta' +#'@return a multidimensional array with named dimensions with a bias correction +#'performed conditioned by local dimension 'dim' or inverse of persistence 'theta' #' #'@import multiApply #'@importFrom s2dverification Subset diff --git a/R/Predictability.R b/R/Predictability.R index ddc516b8..12cd6b41 100644 --- a/R/Predictability.R +++ b/R/Predictability.R @@ -21,10 +21,12 @@ #' Dynamical proxies of North Atlantic predictability and extremes. #' Scientific Reports, 7-41278, 2017. #' -#'@param dim data to create the attractor. must be a matrix with the timesteps in nrow and the grids in ncol -#'for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints +#'@param dim An array of N named dimensions containing the local dimension as +#'the output of CST_ProxiesAttractor or ProxiesAttractor. # -#'@param theta list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes +#'@param theta An array of N named dimensions containing the inverse of the +#'persistence 'theta' as the output of CST_ProxiesAttractor or ProxiesAttractor. +#' #'@param ncores The number of cores to use in parallel computation #' #'@return A list of length 2: @@ -92,9 +94,9 @@ Predictability<- function(dim, theta, ncores = NULL) { ncores = ncores) dim(pred$dyn_scores) <- dim(theta) return(list(pred.dim = list(qdim = list(pred$qdim.d1,pred$qdim.d2,pred$qdim.d3), - pos.d = list(pred$pos.d1,pred$pos.d2,pred$pos.d3)), - pred.theta = list(qtheta = list(pred$qtheta.th1,pred$qtheta.th2,pred$qtheta.th3), - pos.t = list(pred$pos.th1,pred$pos.th2,pred$pos.th3)), + pos.d = list(pred$pos.d1,pred$pos.d2,pred$pos.d3)), + pred.theta = list(qtheta = list(pred$qtheta.th1,pred$qtheta.th2,pred$qtheta.th3), + pos.t = list(pred$pos.th1,pred$pos.th2,pred$pos.th3)), dyn_scores = pred$dyn_scores)) } .predictability <- function(dim, theta) { -- GitLab From d06bf1b2a68dbec32c9d50838eec81af01a9187c Mon Sep 17 00:00:00 2001 From: Carlos Delgado Torres Date: Wed, 28 Jul 2021 12:54:13 +0000 Subject: [PATCH 41/48] Changed s2dv::MeanDims by multiApply(fun = mean) for not losing the dimnames --- R/CST_Calibration.R | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/R/CST_Calibration.R b/R/CST_Calibration.R index b58dd08e..2884712a 100644 --- a/R/CST_Calibration.R +++ b/R/CST_Calibration.R @@ -300,8 +300,8 @@ Calibration <- function(exp, obs, cal.method = "mse_min", #correct evaluation subset var.cor.fc[ , eval.dexes] <- .correct.crps.min.fc(fc.ev , mbm.par, na.rm = na.rm) } else if (cal.method == 'rpc-based') { - ens_mean.ev <- s2dv::MeanDims(data = fc.ev, dims = names(amt.mbr), na.rm = na.rm) - ens_mean.tr <- s2dv::MeanDims(data = fc.tr, dims = names(amt.mbr), na.rm = na.rm) ## Ensemble mean + ens_mean.ev <- multiApply::Apply(data = fc.ev, target_dims = names(amt.mbr), fun = mean, na.rm = na.rm)$output1 ## Ensemble mean + ens_mean.tr <- multiApply::Apply(data = fc.tr, target_dims = names(amt.mbr), fun = mean, na.rm = na.rm)$output1 ## Ensemble mean ens_spread.tr <- multiApply::Apply(data = list(fc.tr, ens_mean.tr), target_dims = names(amt.sdate), fun = "-")$output1 ## Ensemble spread exp_mean.tr <- mean(fc.tr, na.rm = na.rm) ## Mean (climatology) var_signal.tr <- var(ens_mean.tr, na.rm = na.rm) ## Ensemble mean variance -- GitLab From 8420b6000e226e4084c00ff6f52d0a3544c57893 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 13 Sep 2021 11:50:57 +0200 Subject: [PATCH 42/48] Latests changes use cases --- inst/doc/UseCase1_WindEvent_March2018.R | 30 +++++++----- ...e2_PrecipitationDownscaling_RainFARM_RF4.R | 49 +++++++++++++++---- ...aunch_UseCase2_PrecipitationDownscaling.sh | 2 +- 3 files changed, 58 insertions(+), 23 deletions(-) diff --git a/inst/doc/UseCase1_WindEvent_March2018.R b/inst/doc/UseCase1_WindEvent_March2018.R index 719f5bf3..d3bbb936 100644 --- a/inst/doc/UseCase1_WindEvent_March2018.R +++ b/inst/doc/UseCase1_WindEvent_March2018.R @@ -11,11 +11,11 @@ rm(list=ls()); gc(); # This code includes the bias adjustent and the results visualization # ---------------------------------------- -library(CSTools) +#library(CSTools) library(s2dv) library(ragg) library(multiApply) -output_dir <- "/esarchive/scratch/nperez/CSTools_manuscript/Wind/" +output_dir <- "/esarchive/scratch/nperez/CSTools_manuscript/v20210603/" exp_path <- list(name = "ECMWFS5", @@ -138,15 +138,17 @@ for (mm in 1:3) { wind_thres_latlon <- abind::abind(wind_thres_latlon, thres, along = 4) source("/esarchive/scratch/nperez/git/cstools/R/PlotCombinedMap.R") source("/esarchive/scratch/nperez/git/cstools/R/PlotMostLikelyQuantileMap.R") - agg_png(paste0(output_dir, "MostLikely_", mm, "_obstercile.png"), - width = 1000, height = 1000, units = 'px', res = 144) - PlotMostLikelyQuantileMap(probs = Mean_PB, lon = wind_fsct$lon, lat = wind_fsct$lat, + agg_png(paste0(output_dir, "Wind_MostLikely_", mm, "_obstercile.png"), + width = 1050, height = 1000, units = 'px', res = 144) + PlotMostLikelyQuantileMap(probs = Mean_PB, lon = wind_fsct$lon, + lat = wind_fsct$lat, sizetit = 1.5, intylat = 2, intxlon = 2, coast_width = 1.5, legend_scale = 0.8, - cat_dim = 'bin', dot_size = 2, + cat_dim = 'bin', dot_size = 2.5, + axes_label_scale = 1.6, degree_sym = TRUE, dots = filtered_obs_terciles[,,1,1,1,1], toptitle = c(paste0("Initialized on ", - month.name[as.numeric(months_in_advance[mm])]))) + month.name[as.numeric(months_in_advance[mm])]))) dev.off() } @@ -154,7 +156,7 @@ visual <- data.frame(dec = as.vector(MeanDims(wind_fsct_BC[[3]]$data, c('lat', ' jan = as.vector(MeanDims(wind_fsct_BC[[2]]$data, c('lat', 'lon'))), feb = as.vector(MeanDims(wind_fsct_BC[[1]]$data, c('lat', 'lon')))) - wind_obs_areave <- CST_Load(var = 'windagl100', obs = list(obs_path), + wind_obs_areave <- CSTools::CST_Load(var = 'windagl100', obs = list(obs_path), sdates = '20180301', nmember = 1, leadtimemin = 1, leadtimemax = 1, storefreq = "monthly", sampleperiod = 1, @@ -165,12 +167,14 @@ visual <- data.frame(dec = as.vector(MeanDims(wind_fsct_BC[[3]]$data, c('lat', ' print("IS DATA LOADED") print("Wait") -agg_png(paste0(output_dir, "PlotForecast_IP.png"), - width = 1000, height = 1000, units = 'px',res = 144) -PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, +agg_png(paste0(output_dir, "Wind_PlotForecast_IP.png"), + width = 1000, height = 1000, units = 'px',res = 150) +CSTools::PlotForecastPDF(visual, tercile.limits = wind_ref_terciles, extreme.limits = wind_ref_extremes, - var.name = "Wind Speed 100 m (m/s)", title = "Bias Corrected forecasts at IP", - fcst.names = c("December", "January", "February"), obs = as.vector(wind_obs_areave$data)) + var.name = "Wind Speed 100 m (m/s)", + title = "Bias Corrected forecasts at IP", + fcst.names = c("December", "January", "February"), + obs = as.vector(wind_obs_areave$data)) dev.off() # Plotting observed terciles: diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R index a4c4ed7a..ee2df194 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4.R @@ -98,7 +98,7 @@ load(paste0(dir_output, 'weightsRF4.RDS')) # -------------------------------------------- # STEP 4: # -------------------------------------------- - +Rprof() weights <- Subset(weight$data, along = 'monthly', indices = c(11, 12, 1:6)) slope <- Subset(slope, along = 'monthly', indices = c(11, 12, 1:6), drop = 'non-selected') @@ -113,6 +113,8 @@ newfs <- CST_MergeDims(fs, merge_dims = c("ftime", "monthly"), newfs$Dates[[1]] <- exp$Dates[[1]] CST_SaveExp(newfs, destination = paste0(dir_output, 'RF4/')) +Rprof(NULL) +profile.info <- summaryRprof(paste0(dir_output, "Rprof.out")) # -------------------------------------------- # Visualization # -------------------------------------------- @@ -122,7 +124,8 @@ agg_png(paste0(dir_output, "EXP_11dec.png"), PlotEquiMap(exp$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.8, - triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, + bar_label_scale = 1.3, axes_label_scale = 1.2, + triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.5, toptitle = 'SEAS5', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "EXPQM_11dec.png"), @@ -130,7 +133,8 @@ agg_png(paste0(dir_output, "EXPQM_11dec.png"), PlotEquiMap(exp.qm$data[1,1,1,11,,,2],lon = exp$lon, lat = exp$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.8, - triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, + bar_label_scale = 1.3, axes_label_scale = 1.2, + triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.5, toptitle = 'Bias Adjusted', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "RF4_Down_11dec.png"), @@ -138,21 +142,23 @@ agg_png(paste0(dir_output, "RF4_Down_11dec.png"), PlotEquiMap(fs$data[1,1,1,11,,,2],lon = fs$lon, lat = fs$lat, filled.continents = FALSE, bar_limits = c(0,40), intylat = 2, intxlon = 2, title_scale = 0.8, - triangle_ends = c(TRUE, FALSE), degree_sym = TRUE, units_scale = 1.8, + bar_label_scale = 1.3, axes_label_scale = 1.2, + triangle_ends = c(TRUE, TRUE), degree_sym = TRUE, units_scale = 1.5, toptitle = 'Downscaled nf 4', units = 'precipitation (mm)') dev.off() agg_png(paste0(dir_output, "RF4_WeightsDec.png"), width = 800, height = 900, units = 'px',res = 144) PlotEquiMap(weight$data[,,12], lon = weight$lon, lat = weight$lat, filled.continents = FALSE, title_scale = 0.8, + bar_label_scale = 1.3, axes_label_scale = 1.2, intylat = 2, intxlon = 2, degree_sym = TRUE, toptitle = 'December Weights nf 4') dev.off() agg_png(paste0(dir_output, "Slope.png"), width = 700, height = 700, units = 'px',res = 144) plot(1:12, slope_plot, type = 'b', main = 'Slope', pch = 16, xlab = 'month', - ylab = 'Slope', bty = 'n', cex.main = 1.2, cex.lab = 1.2) -lines(12, slope_plot[12], type = 'p', col = 'red', pch = 16) + ylab = 'Slope', bty = 'n', cex.main = 1.5, cex.lab = 1.3, cex = 1.5) +lines(12, slope_plot[12], type = 'p', col = 'red', pch = 16, cex = 1.3) dev.off() # Plot ForecastPDF @@ -161,6 +167,8 @@ library(abind) #print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) #expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) #exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) +print("Quantiles gridpoint") +print(quantile(as.vector(exp.qm$data[1,,,11,5,4,2]), c(0.1,0.3,0.6,0.9))) data <- rbind(exp$data[1, ,1,11,5,4,2], exp.qm$data[1,,1,11,5,4,2]) empty <- array(NA, c(dataset = 2, members = 225)) names(dim(data)) <- names(dim(empty)) @@ -170,9 +178,30 @@ data <- abind(data, fs$data[1,,1,11,15,18 ,2], along = 1) names(dim(data)) <- names(dim(empty)) print(dim(data)) print(quantile(obs$data[1,,,11,5,4,2])) -agg_png(paste0(dir_output, "/FiguresPDF_RF4_11DecemberAll2.png"), - width = 1100, height = 700, units = 'px', res = 144) -PlotForecastPDF(data, tercile.limits = c(0.67, 2.5), +agg_png(paste0(dir_output, "/FiguresPDF_RF4_11December_GridPoint.png"), + width = 1000, height = 1000, units = 'px', res = 144) +PlotForecastPDF(data, tercile.limits = c(0.02, 1.17), + extreme.limits = c(0.0155555, 7.75), color.set = 'hydro', + var.name = "Precipitation (mm)", add.ensmemb = 'no', + title = "Forecasts issued on Nov 1993 for 11th December 1993", + fcst.names = c("SEAS5", "Bias Adjusted", "Downscaled nf 4")) +dev.off() + +obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) +print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) +expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) +exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) +empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, ftime = 31, monthly = 8)) +data <- abind(expts, exp.qmts, along = 1) +names(dim(data)) <- names(dim(expts)) +data <- abind(data, empty, along = 2) +names(dim(data)) <- names(dim(expts)) +fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) +data <- abind(data, fsts, along = 1) +names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') +agg_png(paste0(dir_output, "/FiguresPDF_RF4_11December_Areave.png"), + width = 1400, height = 800, units = 'px', res = 144) +PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.67, 2.5), extreme.limits = c(0.09, 7.3), color.set = 'hydro', var.name = "Precipitation (mm)", title = "Forecasts issued on Nov 1993 for 11th December 1993", @@ -182,3 +211,5 @@ dev.off() + + diff --git a/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh b/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh index 55c3e848..2e1dd939 100644 --- a/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh +++ b/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh @@ -1,5 +1,5 @@ #!/bin/bash -#BSUB -W 2:00 +#BSUB -W 6:00 #BSUB -n 16 #BSUB -M 7000 #BSUB -J RainFARM_Downsc -- GitLab From 548c902065b1626d364d3614211fa5f6ebf31a32 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 13 Sep 2021 14:43:09 +0200 Subject: [PATCH 43/48] Tested on Win-builder --- DESCRIPTION | 2 +- NAMESPACE | 2 +- NEWS.md | 2 ++ R/CST_DynBiasCorrection.R | 3 ++- man/CST_DynBiasCorrection.Rd | 23 ++++++++++++++++++----- man/DynBiasCorrection.Rd | 24 ++++++++++++++++++------ man/Predictability.Rd | 21 +++++++++++---------- vignettes/Analogs_vignette.Rmd | 4 ++-- vignettes/RainFARM_vignette.Rmd | 2 +- 9 files changed, 56 insertions(+), 27 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index aecd89b3..97a80c72 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -84,4 +84,4 @@ VignetteBuilder: knitr License: Apache License 2.0 Encoding: UTF-8 LazyData: true -RoxygenNote: 7.0.2 +RoxygenNote: 7.0.1 diff --git a/NAMESPACE b/NAMESPACE index 26042662..34851f66 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -120,4 +120,4 @@ importFrom(utils,head) importFrom(utils,read.table) importFrom(utils,tail) importFrom(verification,verify) -useDynLib(CSTools) +useDynLib(CSTools, .registration = TRUE) diff --git a/NEWS.md b/NEWS.md index 9a4c78e0..154c6307 100644 --- a/NEWS.md +++ b/NEWS.md @@ -3,6 +3,8 @@ **Submission date to CRAN: XX-06-2021** - New features: + + Dynamical Bias Correction method: `CST_ProxiesAttractors` and `CST_DynBiasCorrection` + (optionally `Predictability`) + CST_BiasCorrection and BiasCorrection allows to calibrate a forecast given the calibration in the hindcast by using parameter 'exp_cor'. - Fixes: diff --git a/R/CST_DynBiasCorrection.R b/R/CST_DynBiasCorrection.R index 9aa33ced..20c263c6 100644 --- a/R/CST_DynBiasCorrection.R +++ b/R/CST_DynBiasCorrection.R @@ -53,7 +53,8 @@ #' time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") #' lon <- seq(-1,5,1.5) #' lat <- seq(30,35,1.5) -#' # qm=0.98 # too high for this short dataset, it is possible that doesn't# get the requirement, in that case it would be necessary select a lower qm +#' # qm=0.98 # too high for this short dataset, it is possible that doesn't +#' # get the requirement, in that case it would be necessary select a lower qm #' # for instance qm=0.60 #' expL <- s2dv_cube(data = expL, lat = lat, lon = lon, #' Dates = list(start = time_expL, end = time_expL)) diff --git a/man/CST_DynBiasCorrection.Rd b/man/CST_DynBiasCorrection.Rd index 23eefd92..e2467e72 100644 --- a/man/CST_DynBiasCorrection.Rd +++ b/man/CST_DynBiasCorrection.Rd @@ -9,6 +9,7 @@ CST_DynBiasCorrection( exp, obs, method = "QUANT", + wetday = FALSE, proxy = "dim", quanti, ncores = NULL @@ -19,12 +20,19 @@ CST_DynBiasCorrection( \item{obs}{an s2dv_cube object with the reference data} -\item{method}{a character string indicating the method to apply bias correction among these ones: -"PTF","RQUANT","QUANT","SSPLIN"} +\item{method}{a character string indicating the method to apply bias +correction among these ones: "PTF","RQUANT","QUANT","SSPLIN"} -\item{proxy}{a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method.} +\item{wetday}{logical indicating whether to perform wet day correction +or not OR a numeric threshold below which all values are set to zero (by +default is set to 'FALSE').} -\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} +\item{proxy}{a character string indicating the proxy for local dimension +'dim' or inverse of persistence 'theta' to apply the dynamical +conditioned bias correction method.} + +\item{quanti}{a number lower than 1 indicating the quantile to perform +the computation of local dimension and theta} \item{ncores}{The number of cores to use in parallel computation} } @@ -54,13 +62,18 @@ time_obsL <- paste(rep("01", 100), rep("01", 100), 1920 : 2019, sep = "-") time_expL <- paste(rep("01", 100), rep("01", 100), 1929 : 2019, sep = "-") lon <- seq(-1,5,1.5) lat <- seq(30,35,1.5) +# qm=0.98 # too high for this short dataset, it is possible that doesn't # get the requirement, in that case it would be necessary select a lower qm # for instance qm=0.60 expL <- s2dv_cube(data = expL, lat = lat, lon = lon, Dates = list(start = time_expL, end = time_expL)) obsL <- s2dv_cube(data = obsL, lat = lat, lon = lon, Dates = list(start = time_obsL, end = time_obsL)) -dynbias <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", +# to use DynBiasCorrection +dynbias1 <- DynBiasCorrection(exp = expL$data, obs = obsL$data, proxy= "dim", + quanti = 0.6) +# to use CST_DynBiasCorrection +dynbias2 <- CST_DynBiasCorrection(exp = expL, obs = obsL, proxy= "dim", quanti = 0.6) } diff --git a/man/DynBiasCorrection.Rd b/man/DynBiasCorrection.Rd index f496c13d..e6de373c 100644 --- a/man/DynBiasCorrection.Rd +++ b/man/DynBiasCorrection.Rd @@ -9,27 +9,39 @@ DynBiasCorrection( exp, obs, method = "QUANT", + wetday = FALSE, proxy = "dim", quanti, ncores = NULL ) } \arguments{ -\item{exp}{a multidimensional array with named dimensions with the experiment data} +\item{exp}{a multidimensional array with named dimensions with the +experiment data} -\item{obs}{a multidimensional array with named dimensions with the observation data} +\item{obs}{a multidimensional array with named dimensions with the +observation data} -\item{method}{a character string indicating the method to apply bias correction among these ones: +\item{method}{a character string indicating the method to apply bias +correction among these ones: "PTF","RQUANT","QUANT","SSPLIN"} -\item{proxy}{a character string indicating the proxy for local dimension 'dim' or inverse of persistence 'theta' to apply the dynamical conditioned bias correction method.} +\item{wetday}{logical indicating whether to perform wet day correction +or not OR a numeric threshold below which all values are set to zero (by +default is set to 'FALSE').} -\item{quanti}{a number lower than 1 indicating the quantile to perform the computation of local dimension and theta} +\item{proxy}{a character string indicating the proxy for local dimension +'dim' or inverse of persistence 'theta' to apply the dynamical conditioned +bias correction method.} + +\item{quanti}{a number lower than 1 indicating the quantile to perform the +computation of local dimension and theta} \item{ncores}{The number of cores to use in parallel computation} } \value{ -a multidimensional array with named dimensions with a bias correction performed conditioned by local dimension 'dim' or inverse of persistence 'theta' +a multidimensional array with named dimensions with a bias correction +performed conditioned by local dimension 'dim' or inverse of persistence 'theta' } \description{ This function perform a bias correction conditioned by the diff --git a/man/Predictability.Rd b/man/Predictability.Rd index c36d6fa5..d37efcdc 100644 --- a/man/Predictability.Rd +++ b/man/Predictability.Rd @@ -2,16 +2,17 @@ % Please edit documentation in R/Predictability.R \name{Predictability} \alias{Predictability} -\title{Computing scores of predictability using two dinamical proxies +\title{Computing scores of predictability using two dynamical proxies based on dynamical systems theory.} \usage{ Predictability(dim, theta, ncores = NULL) } \arguments{ -\item{dim}{data to create the attractor. Must be a matrix with the timesteps in nrow and the grids in ncol -for instance: dat(time,grids)=(1:24418,1:1060), where time=24418 timesteps and grids=1060 gridpoints} +\item{dim}{An array of N named dimensions containing the local dimension as +the output of CST_ProxiesAttractor or ProxiesAttractor.} -\item{theta}{list of arbitrary length of secondary grids. Each secondary grid is to be provided as a list of length 2 with longitudes and latitudes} +\item{theta}{An array of N named dimensions containing the inverse of the +persistence 'theta' as the output of CST_ProxiesAttractor or ProxiesAttractor.} \item{ncores}{The number of cores to use in parallel computation} } @@ -20,21 +21,21 @@ A list of length 2: \itemize{ \item\code{pred.dim} {a list of two lists 'qdim' and 'pos.d'. The 'qdim' list contains values of local dimension 'dim' divided by terciles: -d1: lower tercile and more predictability, +d1: lower tercile (high predictability), d2: middle tercile, -d3: higher tercile and less predictability +d3: higher tercile (low predictability) The 'pos.d' list contains the position of each tercile in parameter 'dim'} \item\code{pred.theta} {a list of two lists 'qtheta' and 'pos.t'. The 'qtheta' list contains values of the inverse of persistence 'theta' divided by terciles: -th1: lower tercile and more predictability, +th1: lower tercile (high predictability), th2: middle tercile, -th3: higher tercile and less predictability +th3: higher tercile (low predictability) The 'pos.t' list contains the position of each tercile in parameter 'theta'} } -dyn_scores values from 0 to 1. A dyn_score of 1 indicates higher +dyn_scores values from 0 to 1. A dyn_score of 1 indicates the highest predictability. } \description{ @@ -42,7 +43,7 @@ This function divides in terciles the two dynamical proxies computed with CST_ProxiesAttractor or ProxiesAttractor. These terciles will be used to measure the predictability of the system in dyn_scores. When the local dimension 'dim' is small and the inverse of persistence 'theta' is -small the predictability is higher, and viceversa. +small the predictability is high, and viceversa. } \examples{ # Creating an example of matrix dat(time,grids): diff --git a/vignettes/Analogs_vignette.Rmd b/vignettes/Analogs_vignette.Rmd index 5a52a05d..31fb9c29 100644 --- a/vignettes/Analogs_vignette.Rmd +++ b/vignettes/Analogs_vignette.Rmd @@ -156,7 +156,7 @@ dateseq <- format(seq(start, end, by = "year"), "%Y%m%d") Using the `CST_Load` function from **CSTool package**, the data available in our data store can be loaded. The following lines show how this function can be used. The experimental datasets are interpolated to the ERA5 grid by specifying the 'grid' parameter while ERA5 doesn't need to be interpolated. While parameter leadtimemax is set to 1 for the experimental dataset, it is set to 31 for the observations, returning the daily observations for October for the years requested in 'sdate' (2000-2006). -Download the data to run the recipe in the link https://downloads.cmcc.bo.it/d_chaves/ANALOGS/data_for_Analogs.Rdat or ask carmen.alvarez-castro at cmcc.it or nuria.perez at bsc.es. +Download the data to run the recipe under the HTTPS: downloads.cmcc.bo.it/d_chaves/ANALOGS/data_for_Analogs.Rdat or ask carmen.alvarez-castro at cmcc.it or nuria.perez at bsc.es. ``` exp <- list(name = 'ECMWF_system4_m1', @@ -373,4 +373,4 @@ down4 <- CST_Analogs(expL = expPSL, obsL = obsPSL, AnalogsInfo = TRUE, In this case, the best analog is still being 7th of October, 2005. -*Note: You can compute the anomalies values before applying the criterias (as in Yiou et al, 2013) using `CST_Anomaly` of CSTools package* \ No newline at end of file +*Note: You can compute the anomalies values before applying the criterias (as in Yiou et al, 2013) using `CST_Anomaly` of CSTools package* diff --git a/vignettes/RainFARM_vignette.Rmd b/vignettes/RainFARM_vignette.Rmd index dbcb48a4..c47d0e73 100644 --- a/vignettes/RainFARM_vignette.Rmd +++ b/vignettes/RainFARM_vignette.Rmd @@ -118,7 +118,7 @@ RainFARM has downscaled the original field with a realistic fine-scale correlati The area of interest in our example presents a complex orography, but the basic RainFARM algorithm used does not consider topographic elevation in deciding how to distribute fine-scale precipitation. A long term climatology of the downscaled fields would have a resolution comparable to that of the original coarse fields and would not resemble the fine-scale structure of an observed climatology. If an external fine-scale climatology of precipitation is available, we can use the method discussed in Terzago et al. (2018) to change the distribution of precipitation by RainFARM for each timestep, so that the long-term average is close to this reference climatology in terms of precipitation distribution (while the total precipitation amount of the original fields to downscale is preserved). -Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (https://www.worldclim.org) or CHELSA (https://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://gdal.org). +Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (https://www.worldclim.org) or CHELSA (chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://gdal.org). We will assume that a copy of the WORLDCLIM precipitation climatology at 30 arcseconds (about 1km resolution) is available in the local file `medscope.nc`. From this file we can derive suitable weights to be used with RainFARM using the `CST_RFWeights` functions as follows: ```{r} ww <- CST_RFWeights("./worldclim.nc", nf = 20, lon = exp$lon, lat = exp$lat) -- GitLab From b92f0291ea913d4e92a4975095b9f82559432443 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 1 Oct 2021 12:37:17 +0200 Subject: [PATCH 44/48] Update --- ..._PrecipitationDownscaling_RainFARM_RF100.R | 37 +------------------ ..._UseCase2_PrecipitationDownscaling_RF4.sh} | 0 2 files changed, 1 insertion(+), 36 deletions(-) rename inst/doc/{launch_UseCase2_PrecipitationDownscaling.sh => launch_UseCase2_PrecipitationDownscaling_RF4.sh} (100%) diff --git a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R index 7b057180..146382dc 100644 --- a/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R +++ b/inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100.R @@ -116,16 +116,9 @@ weights <- Subset(weight$data, along = 'monthly', indices = c(11, 12, 1:6)) slope <- Subset(slope, along = 'monthly', indices = c(11, 12, 1:6), drop = 'non-selected') k = 1 # To create the member ID -#----- -# To be removed when CSTools 4.0.1 is published: -source("/gpfs/scratch/bsc32/bsc32339/CSTools_manuscript/CST_SaveExp.R") -library(multiApply) -library(ncdf4) -library(s2dv) -#----- for (realizations in 1:10) { for (member in 1:25) { - result <- data # to store the data + result <- exp.qm # to store the data result$data <- NULL for (month in 1:8) { data <- exp.qm # to take the correct piece of data @@ -135,7 +128,6 @@ for (realizations in 1:10) { kmin = 1, nens = 1, verbose = TRUE, nprocs = 8, drop_realization = TRUE) - print(dim(fs$data)) result$data <- abind::abind(result$data, fs$data, along = 5) if (month == 2 & member == 1 & realization == 1) { # ---------------------------- @@ -149,33 +141,6 @@ for (realizations in 1:10) { triangle_ends = c(TRUE, FALSE), toptitle = 'Downsacaled RF 100', units = 'precipitation (mm)') dev.off() - # PlotForecastPDF - library(abind) - obsts <- MeanDims(obs$data, c('lat', 'lon'), na.rm = T) - print(quantile(obsts, c(0.1, 0.3, 0.6, 0.9), na.rm = T)) - expts <- MeanDims(exp$data, c('lat', 'lon'), na.rm = T) - exp.qmts <- MeanDims(exp.qm$data, c('lat', 'lon'), na.rm = T) - empty <- array(NA, c(dataset = 2, member = 225, sdate = 26, - ftime = 31, monthly = 8)) - data <- abind(expts, exp.qmts, along = 1) - names(dim(data)) <- names(dim(expts)) - data <- abind(data, empty, along = 2) - names(dim(data)) <- names(dim(expts)) - fsts <- MeanDims(fs$data, c('lat', 'lon'), na.rm = T) - print(dim(fsts)) - print(dim(data)) - data <- abind(data, fsts, along = 1) - names(dim(data)) <- c('dataset', 'members', 'sdate', 'ftime', 'monthly') - agg_png(paste0(dir_output, "/FiguresPDF_RF100_11DecemberAll2.png"), - width = 750, height = 650, units = 'px', res = 144) - PlotForecastPDF(data[,,1,11,2], tercile.limits = c(0.58, 2.4), - obs = obsts[,,1,11,2], - extreme.limits = c(0.06, 7.26), - color.set = 'hydro', add.ensmemb = 'no', - var.name = "Precipitation (mm)", - title = "Forecasts issued on Nov 1993 for 11th December 1993", - fcst.names = c("ECMWFS5C3S", "Bias Adjusted", "Downscaled")) - dev.off() } result$lon <- fs$lon result$lat <- fs$lat diff --git a/inst/doc/launch_UseCase2_PrecipitationDownscaling.sh b/inst/doc/launch_UseCase2_PrecipitationDownscaling_RF4.sh similarity index 100% rename from inst/doc/launch_UseCase2_PrecipitationDownscaling.sh rename to inst/doc/launch_UseCase2_PrecipitationDownscaling_RF4.sh -- GitLab From d2b117c82082dd6fd2d36cdd0fd4fcda926d1df8 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 1 Oct 2021 12:41:51 +0200 Subject: [PATCH 45/48] Update code use case 3 --- .../UseCase3_data_preparation_SCHEME_model.R | 90 +++++++++++++++++-- 1 file changed, 83 insertions(+), 7 deletions(-) diff --git a/inst/doc/UseCase3_data_preparation_SCHEME_model.R b/inst/doc/UseCase3_data_preparation_SCHEME_model.R index 0ff104db..ada24ef2 100644 --- a/inst/doc/UseCase3_data_preparation_SCHEME_model.R +++ b/inst/doc/UseCase3_data_preparation_SCHEME_model.R @@ -1,3 +1,6 @@ +# Author: Bert Van Schaeybroeck +# Use Case 3: Seasonal forecasts for a river flow +# ----------------------------------------------- rm(list = ls()) library(CSTools) library(s2dverification) @@ -18,6 +21,7 @@ domain.high.res <- "greece_high_res" domain.low.res <- "greece_low_res" sdate.mon.to.use <- 5 #Month of startdate for ECMWF Sys5 possibilities are 5 (May) or 11 (November) sdate.day.to.use <- 1 #Day of startdate for ECMWF Sys5 only possibility is 1 +make.plot.msk <- F #mask to indicate if figures need to be made based on the output of the Analog function. #LOCAL PARAMETERS (to be adjusted for each working system) #--------------------------------------------------------- @@ -210,6 +214,10 @@ exp.msl.eur.merge <- CST_MergeDims( merge_dims = c("sdate", "ftime"), rename_dim = "sdate") +obs.msl.eur.merge.an <- CST_Anomaly(exp = obs.msl.eur.merge, dim_anom = 3) +exp.msl.eur.merge.an <- CST_Anomaly(exp = exp.msl.eur.merge, dim_anom = 3) + + #Load observational and forecast set of variable that needs to be calibrated and downscaled: file.to.load <- paste0(dir.rdata, "data_all.RData") if(file.exists(file.to.load)){ @@ -333,8 +341,8 @@ lon.low.res <- as.vector(cal.merge$lon) lat.low.res <- as.vector(cal.merge$lat) lon.high.res <- as.vector(obs.high.res$lon) lat.high.res <- as.vector(obs.high.res$lat) -lon.eur <- as.vector(obs.msl.eur.merge$lon) -lat.eur <- as.vector(obs.msl.eur.merge$lat) +lon.eur <- as.vector(obs.msl.eur.merge.an$lon) +lat.eur <- as.vector(obs.msl.eur.merge.an$lat) #amount of lead times in months. For ECMWF Sys5 it is 7: amt.lead.mon <- as.numeric(dim(cal.merge$data)["monthly"]) @@ -387,9 +395,9 @@ i.mbr.obs <- 1 for(i.mbr in seq(1, amt.mbr)){ for(i.mon in seq(1, amt.lead.mon)){ for(i.sdate in seq(1, amt.sdate)){ - i.mbr <- 1 - i.mon = 1 - i.sdate = 24 + #i.mbr <- 1 + #i.mon = 1 + #i.sdate = 24 date.to.use <- sub.time[ i.sdate, i.mon] date.an.lst <- sub.time[ , i.mon] cat("i.mbr = ", i.mbr, ", i.mon =", i.mon, ", i.sdate = ", @@ -400,10 +408,10 @@ for(i.mbr in seq(1, amt.mbr)){ exp.low.res.tmp <- exp.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] cal.low.res.tmp <- cal.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] #Extract the large-scale pressure field of that day - exp.msl.eur.tmp <- exp.msl.eur.merge$data[i.dataset, i.mbr, i.sdate, , , i.mon] + exp.msl.eur.tmp <- exp.msl.eur.merge.an$data[i.dataset, i.mbr, i.sdate, , , i.mon] #Extract all observations that will be used to find analogs - obs.msl.eur.tmp <- obs.msl.eur.merge$data[i.dataset, i.mbr.obs, , , , i.mon]#-i.sdate + obs.msl.eur.tmp <- obs.msl.eur.merge.an$data[i.dataset, i.mbr.obs, , , , i.mon]#-i.sdate obs.low.res.tmp <- obs.low.res.merge$data[i.dataset, i.mbr.obs, , , , i.mon] #-i.sdate obs.high.res.tmp <- obs.high.res.merge$data[i.dataset, i.mbr.obs, , , , i.mon] #-i.sdate names(dim(obs.high.res.tmp)) <- c("time", "lat", "lon") @@ -428,9 +436,77 @@ for(i.mbr in seq(1, amt.mbr)){ excludeTime = date.to.use, AnalogsInfo = T, nAnalogs = 1000) + + + if(make.plot.msk){ + corr.date <- as.character(res$dates[1]) #select the date of the most + corr.dex <- which(date.an.lst == corr.date) + + #The following figure shows the uncalibrated raw model field (analogous to Fig. 9a) + file.fig <- paste0("mbr_", i.mbr, "_mon_", i.mon, + "_sdate_", date.to.use, "_exp.low.res.pdf") + pdf(file = file.fig) + PlotEquiMap( + exp.low.res.tmp[ , ], + lon = obs.low.res.merge$lon, + lat = obs.low.res.merge$lat, + filled.continents = F, + intylat = 2, + intxlon = 2, + title_scale = 0.7, #bar_limits = c(0, 60), + units = "precipitation (mm)") + dev.off() + + #The following figure includes the calibrated model field (analogous to Fig. 9b) + file.fig <- paste0("mbr_", i.mbr, "_mon_", i.mon, + "_sdate_", date.to.use, "_cal.low.res.pdf") + pdf(file = file.fig) + PlotEquiMap( + cal.low.res.tmp, + lon = obs.low.res.merge$lon, + lat = obs.low.res.merge$lat, + filled.continents = F, + intylat = 2, + intxlon = 2, + title_scale = 0.7, #bar_limits = c(0, 60), + units = "precipitation (mm)") + + #The following figure includes the analog upscaled field (analogous to Fig. 9c) + file.fig <- paste0("mbr_", i.mbr, "_mon_", i.mon, + "_sdate_", date.to.use, "_obs.low.res.pdf") + pdf(file = file.fig) + PlotEquiMap( + obs.low.res.tmp[corr.dex, , ], + lon = obs.low.res.merge$lon, + lat = obs.low.res.merge$lat, + filled.continents = F, + intylat = 2, + intxlon = 2, + title_scale = 0.7, #bar_limits = c(0, 60), + units = "precipitation (mm)") + dev.off() + + #The following figure includes the analog field (analogous to Fig. 9d) + file.fig <- paste0("mbr_", i.mbr, "_mon_", i.mon, + "_sdate_", date.to.use, "_obs.high.res.pdf") + pdf(file = file.fig) + PlotEquiMap( + obs.high.res.tmp[corr.dex, , ], + lon = obs.high.res.merge$lon, + lat = obs.high.res.merge$lat, + filled.continents = F, + intylat = 2, + intxlon = 2, + title_scale = 0.7, #bar_limits = c(0, 60), + units = "precipitation (mm)") + dev.off() + + + } } } } } + -- GitLab From 69cc3b077a54fc7c84289e22437ae8b6a4eb57c8 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 4 Oct 2021 10:36:54 +0200 Subject: [PATCH 46/48] remove tests from the package --- .Rbuildignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.Rbuildignore b/.Rbuildignore index b2d8e5fc..fa596e70 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -5,4 +5,4 @@ ./.nc$ .*^(?!data)\.RData$ .*\.gitlab-ci.yml$ -#^tests$ +^tests$ -- GitLab From 7305bc8ad2013f171fb5477e73b46600320f2dc6 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 4 Oct 2021 10:52:39 +0200 Subject: [PATCH 47/48] news --- NEWS.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index 154c6307..6d66f0a2 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,11 +1,13 @@ ### CSTools 4.0.1 -<<<<<<< HEAD **Submission date to CRAN: XX-06-2021** - New features: + Dynamical Bias Correction method: `CST_ProxiesAttractors` and `CST_DynBiasCorrection` (optionally `Predictability`) + CST_BiasCorrection and BiasCorrection allows to calibrate a forecast given the calibration in the hindcast by using parameter 'exp_cor'. + + Use cases + + CST_SaveExp includes parameter extra_string + + PlotCombinedMap includes parameter cex_bar_titles - Fixes: + Calibration retains correlation absolute value -- GitLab From c972dba35a75ba30660edbe5e920a27b5eb3c5d7 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 4 Oct 2021 10:55:04 +0200 Subject: [PATCH 48/48] roxygen2 version installed in R 3.4.2 --- DESCRIPTION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DESCRIPTION b/DESCRIPTION index 318b382f..52999f2a 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -85,4 +85,4 @@ VignetteBuilder: knitr License: Apache License 2.0 Encoding: UTF-8 LazyData: true -RoxygenNote: 7.0.1 +RoxygenNote: 7.0.2 -- GitLab