Commits (152)
.*\.git$
.*\.gitignore$
.*\.gitlab$
.*\.tar.gz$
.*\.pdf$
./.nc$
.*^(?!data)\.RData$
.*\.gitlab-ci.yml$
.lintr
^tests$
#^inst/doc$
^inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF100\.R$
^inst/doc/UseCase1_WindEvent_March2018\.R$
^inst/doc/UseCase2_PrecipitationDownscaling_RainFARM_RF4\.R$
^inst/doc/UseCase3_data_preparation_SCHEME_model\.R$
^inst/doc/launch_UseCase2_PrecipitationDownscaling_RF4\.sh$
^inst/doc/usecase/UseCase2_PrecipitationDownscaling_RainFARM_RF100\.R$
^inst/doc/usecase/UseCase1_WindEvent_March2018\.R$
^inst/doc/usecase/UseCase2_PrecipitationDownscaling_RainFARM_RF4\.R$
^inst/doc/usecase/UseCase3_data_preparation_SCHEME_model\.R$
^inst/doc/usecase/launch_UseCase2_PrecipitationDownscaling_RF4\.sh$
......@@ -9,3 +9,10 @@ build:
- R CMD build --resave-data .
- R CMD check --as-cran --no-manual --run-donttest CSTools_*.tar.gz
- R -e 'covr::package_coverage()'
lint-check:
stage: build
script:
- module load R/4.1.2-foss-2015a-bare
- echo "Run lintr on the package..."
- Rscript -e 'lintr::lint_package(path = ".")'
linters: linters_with_tags( # lintr_3.1.1
tags = c("package_development", "readability", "best_practices"),
line_length_linter = line_length_linter(100L),
T_and_F_symbol_linter = NULL,
quotes_linter = NULL,
commented_code_linter = NULL,
implicit_integer_linter = NULL,
vector_logic_linter = NULL,
extraction_operator_linter = NULL,
function_left_parentheses_linter = NULL,
semicolon_linter = NULL,
indentation_linter = NULL,
unnecessary_nested_if_linter = NULL,
if_not_else_linter = NULL,
object_length_linter = NULL,
infix_spaces_linter(exclude_operators = "~")
)
exclusions: list(
"inst",
"R/AnalogsPred_train.R",
"R/BEI_PDFBest.R",
"R/BEI_Weights.R",
"R/CST_AdamontAnalog.R",
"R/CST_AdamontQQCorr.R",
"R/Analogs.R",
"R/CST_AnalogsPredictors.R",
"R/CST_BEI_Weighting.R",
"R/CST_CategoricalEnsCombination.R",
"R/CST_DynBiasCorrection.R",
"R/CST_EnsClustering.R",
"R/PlotCombinedMap.R",
"R/PlotForecastPDF.R",
"R/PlotMostLikelyQuantileMap.R",
"R/PlotPDFsOLE.R",
"R/PlotTriangles4Categories.R",
"R/PlotWeeklyClim.R",
"tests/testthat/",
"tests/testthat.R"
)
Package: CSTools
Title: Assessing Skill of Climate Forecasts on Seasonal-to-Decadal Timescales
Version: 5.0.1
Version: 5.2.0
Authors@R: c(
person("Nuria", "Perez-Zanon", , "nuria.perez@bsc.es", role = "aut", comment = c(ORCID = "0000-0001-8568-3071")),
person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "aut", comment = c(ORCID = "0000-0001-5221-0147")),
......@@ -62,6 +62,7 @@ Depends:
easyVerification
Imports:
s2dv,
startR,
rainfarmr,
multiApply (>= 2.1.1),
ClimProjDiags,
......@@ -78,17 +79,17 @@ Imports:
utils,
verification,
lubridate,
scales
scales,
easyNCDF
Suggests:
zeallot,
testthat,
knitr,
markdown,
rmarkdown,
startR
rmarkdown
VignetteBuilder: knitr
License: GPL-3
Encoding: UTF-8
LazyData: true
RoxygenNote: 7.2.0
RoxygenNote: 7.2.3
Config/testthat/edition: 3
......@@ -19,6 +19,7 @@ export(CST_BEI_Weighting)
export(CST_BiasCorrection)
export(CST_Calibration)
export(CST_CategoricalEnsCombination)
export(CST_ChangeDimNames)
export(CST_DynBiasCorrection)
export(CST_EnsClustering)
export(CST_InsertDim)
......@@ -69,6 +70,7 @@ export(s2dv_cube)
export(training_analogs)
import(RColorBrewer)
import(abind)
import(easyNCDF)
import(ggplot2)
import(lubridate)
import(multiApply)
......
# CSTools 5.2.0 (Release date: 25-01-2024)
### Development
- New function CST_ChangeDimNames
- CST_SplitDim: added dimension names and split also Dates
- CST_SaveExp: save time bounds and global attributes; improved code
### Other
- Updated README
- Added citation file
# CSTools 5.1.1 (Release date: 19-10-2023)
### Fixes
- Added startR namespace in all CST_Start calls of the vignettes and sample data.
# CSTools 5.1.0 (Release date: 17-10-2023)
### Fixes
- Calibration() show warnings in atomic function when multiple cores are used.
- PlotForecastPDF fix background color in different machines.
- Correct CST_Subset indices for metadata.
- CST_Analogs: Add sdate_dim parameter and improve initial checks.
- Remove CST_Anomaly repeated checks in order to accept no member dimensions in obs.
- CST_SaveExp developments: improve warning, save metadata correctly.
- Improve print method in order that time_bounds are correctly displayed.
### Development
- PlotWeeklyClim to allow years outside the reference period.
- PlotWeeklyClim to allow setting y limits.
- New function CST_Start().
- PlotCombinedMap() has upper triangle_end; the color bars can have different breaks.
- New print method.
- CST_MultiEOF development treat spatial NAs.
- Correct PlotCombinedMap error.
- Substitute mentions of CST_Load by CST_Start.
- Modify necessary vignettes to the use of CST_Start.
# CSTools 5.0.1 (Release date: 06-06-2023)
- Resubmit to CRAN because it was archived due to dependency issue
**Fixes**
### Fixes
- Standardize the coordinates of 's2dv_cube' by setting the coordinates to vectors with NULL dimensions
- Include a check for SaveExp output filenames that only contain date information
- In SaveExp the dates parameter is allowed to have additional time dimensions of length 1
- Removed dot from the internal function named mergedatasets() used in CST_CategoricalEnsCombination
**New Features**
### Development
- New parameter 'startdates' in CST_SaveExp used to manually decide the name of the output files
**Other**
### Other
- Switch to testthat version 3
# CSTools 5.0.0 (Release date: 05-04-2023)
**Fixes**
### Fixes
- Correct vignettes: Analogs, MultiModelSkill and MultivarRMSE
- Add 'ncores' to s2dv function calls in CST_Anomaly
- Reduce computing time of examples and tests and improve documentation
**New features**
### Development
- Add dat_dim parameter in CST_BiasCorrection and CST_Calibration
- New plotting function for case studies temporal visualisation: PlotWeeklyClim
- Deprecate indices in dim_anom parameter of CST_Anomaly
......@@ -31,26 +70,28 @@
- New color set in PlotForecastPDF Vitigeoss colors
- New function CST_InsertDim
**Other**
### Other
- Added contribution from ArticXchange project due to PlotWeeklyClim
- Update NEWS.md with the correct format
- Change Licence
# CSTools 4.1.1 (Release date: 10-11-2022)
**Fixes**
### Fixes
- CST_Analogs corrected input of ClimProjDiags::Subset()
- PlotCombinedMap corrected use of 'cex_bar_titles' parameter
- CST_Anomaly added 'memb_dim', 'dat_dim' and 'ftime_dim' and improved use for 'dim_anom' parameters
# CSTools 4.1.0 (Release date: 25-10-2022)
**New features**
# CSTools 4.1.0 (Release date: 25-10-2022)
### Development
- Dependency on package 's2dverification' is changed to 's2dv'
- CST_BiasCorrection new parameters 'memb_dim', 'sdate_dim', 'ncores'
- CST_Calibration is able to calibrate forecast with new parameter 'exp_cor'
- CST_QuantileMapping uses cross-validation and provides option to remove NAs; new parameters 'memb_dim', 'sdate_dim', 'window_dim' and 'na.rm'; 'sample_dim' and 'sample_length' are removed
- s2dv_cube() new parameter 'time_dim'
**Fixes**
### Fixes
- as.s2dv_cube() detects latitude and longitude structure in startR_array object
- Data correction: 'lonlat_data' is renamed to 'lonlat_temp'; 'lonlat_prec' is corrected by one-day shift
- Typo and parameter correction in vignette 'MostLikelyTercile_vignette'
......@@ -58,20 +99,22 @@
- PlotMostLikelyQuantileMap() works with s2dv::PlotLayout
# CSTools 4.0.1 (Release date: 05-10-2021)
**New features**
### Development
- Dynamical Bias Correction method: `CST_ProxiesAttractors` and `CST_DynBiasCorrection` (optionally `Predictability`)
- CST_BiasCorrection and BiasCorrection allows to calibrate a forecast given the calibration in the hindcast by using parameter 'exp_cor'
- Use cases
- CST_SaveExp includes parameter extra_string
- PlotCombinedMap includes parameter cex_bar_titles
**Fixes**
### Fixes
- Calibration retains correlation absolute value
- Calibration fixed when cal.methodi == rpc-based, apply_to == sign, eval.method == 'leave-one-out' and the correlation is not significant
- PlotMostLikelyQuantileMap reoder latitudes of an array provided in 'dots' parameter
# CSTools 4.0.0 (Release date: 23-02-2021)
**New features**
### Development
- ADAMONT downscaling method: requires CST_AdamontAnalogs and CST_AdamontQQCor functions
- Analogs method using Predictors: requires training_analogs and CST_AnalogsPredictors
- PlotPDFsOLE includes parameters to modify legend style
......@@ -88,7 +131,7 @@
- Analogs vignette
- Data Storage and retrieval vignette
**Fixes**
### Fixes
- PlotForecastPDF correctly displays terciles labels
- CST_SaveExp correctly save time units
- CST_SplitDims returns ordered output following ascending order provided in indices when it is numeric
......@@ -100,7 +143,8 @@
- Decrease package size compresing vignettes figures and removing areave_data sample
# CSTools 3.1.0 (Release date: 02-07-2020)
**New features**
### Development
- EnsClustering vignette
- EnsClustering has a new parameter 'time_dim'
- CST_BiasCorrection has na.rm paramter
......@@ -111,7 +155,7 @@
- CST_RFTemp/RF_Temp functions available for downscaling temperature
- Weather Regimes vignette
**Fixes**
### Fixes
- CST_Anomaly handles exp, obs or both
- PlotForecastPDF vignette displays figures correctly
- Calibration function is exposed to users
......@@ -123,17 +167,19 @@
- CST_SaveExp uses multiApply and save time dimension correctly
# CSTools 3.0.0 (Release date: 10-02-2020)
**New features**
### Development
- CST_MergeDims and MergeDims
- Version working with R 3.4.2
- PlotForecastPDF handles independent terciles, extremes and observations for each panel
**Fixes**
### Fixes
- CST_Calibration handles missing values
- BEI functions handle missing values
# CSTools 2.0.0 (Release date: 25-11-2019)
**New features**
# CSTools 2.0.0 (Release date: 25-11-2019)
### Development
- CST_Analogs Analogs downscaling method,
- CST_MultiEOFS for multiple variables,
- Ensemble Clustering,
......@@ -151,13 +197,14 @@
- Adding reference to S2S4E H2020 project into the DESCRIPTION file
- Adding NEWS.md file
**Fixes**
### Fixes
- Minor fix in CST_BiasCorrection when checking parameter 'obs'
- Minor fix in data lonlat_prec to be of class 's2dv_cube'
- Minor fix in RainFARM vignette
### CSTools 1.0.1 (Release date: 19-06-2019)
**Fixes and new features**
### Fixes and development
- Correcting test of PlotForecastPDF for compatibility with ggplot2 release
- New function PlotCombinedMap
- Adding reference to MEDSCOPE ERA4CS Project into the DESCRIPTION file
......
......@@ -24,7 +24,7 @@
#'This function has not constrains of specific regions, variables to downscale,
#'or data to be used (seasonal forecast data, climate projections data,
#'reanalyses data). The regrid into a finner scale is done interpolating with
#'CST_Load. Then, this interpolation is corrected selecting the analogs in the
#'CST_Start. Then, this interpolation is corrected selecting the analogs in the
#'large and local scale in based of the observations. The function is an
#'adapted version of the method of Yiou et al 2013. For an advanced search of
#'Analogs (multiple Analogs, different criterias, further information from the
......@@ -54,6 +54,8 @@
#' analog of parameter 'expVar'.
#'@param obsVar An 's2dv_cube' containing the field of the same variable as the
#' passed in parameter 'expVar' for the same region.
#'@param sdate_dim A character string indicating the name of the start date
#' dimension. By default, it is set to 'sdate'.
#'@param region A vector of length four indicating the minimum longitude, the
#' maximum longitude, the minimum latitude and the maximum latitude.
#'@param criteria A character string indicating the criteria to be used for the
......@@ -77,7 +79,8 @@
#' and dates are taken from element \code{$attrs$Dates} from expL.
#'@param time_obsL A character string indicating the date of the observations
#' in the date format (i.e. "yyyy-mm-dd"). By default it is NULL and dates are
#' taken from element \code{$attrs$Dates} from obsL.
#' taken from element \code{$attrs$Dates} from obsL. It must have time
#' dimensions.
#'@param region A vector of length four indicating the minimum longitude,
#' the maximum longitude, the minimum latitude and the maximum latitude.
#'@param nAnalogs Number of Analogs to be selected to apply the criterias
......@@ -101,8 +104,7 @@
#' best analog, for instance for downscaling.
#'@param ncores The number of cores to use in parallel computation
#'
#'@seealso \code{\link{CST_Load}}, \code{\link[s2dv]{Load}} and
#'\code{\link[s2dv]{CDORemap}}
#'@seealso \code{\link{CST_Start}}, \code{\link[startR]{Start}}
#'
#'@return An 's2dv_cube' object containing an array with the dowscaled values of
#'the best analogs in element 'data'. If 'AnalogsInfo' is TRUE, 'data' is a list
......@@ -117,6 +119,7 @@
#' format = "%d-%m-%y")
#'dim(time_obsL) <- c(time = 10)
#'time_expL <- time_obsL[1]
#'dim(time_expL) <- c(time = 1)
#'lon <- seq(-1, 5, 1.5)
#'lat <- seq(30, 35, 1.5)
#'coords <- list(lon = seq(-1, 5, 1.5), lat = seq(30, 35, 1.5))
......@@ -131,9 +134,11 @@
#'
#'@import multiApply
#'@import abind
#'@import s2dv
#'@importFrom ClimProjDiags SelBox Subset
#'@export
CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL,
sdate_dim = 'sdate', region = NULL,
criteria = 'Large_dist', excludeTime = NULL,
time_expL = NULL, time_obsL = NULL,
nAnalogs = NULL, AnalogsInfo = FALSE,
......@@ -141,16 +146,13 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
# Check 's2dv_cube'
if (!inherits(expL, "s2dv_cube") || !inherits(obsL, "s2dv_cube")) {
stop("Parameter 'expL' and 'obsL' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'expL' and 'obsL' must be of the class 's2dv_cube'.")
}
if (!is.null(expVar) && !inherits(expVar, "s2dv_cube")) {
stop("Parameter 'expVar' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'expVar' must be of the class 's2dv_cube'.")
}
if (!is.null(obsVar) && !inherits(obsVar, "s2dv_cube")) {
stop("Parameter 'obsVar' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'obsVar' must be of the class 's2dv_cube'.")
}
# Check 'obsL' object structure
......@@ -215,7 +217,8 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
lonL = as.vector(obsL$coords[[lon_name]]),
latL = as.vector(obsL$coords[[lat_name]]),
expVar = expVar$data,
obsVar = obsVar$data, criteria = criteria,
obsVar = obsVar$data, sdate_dim = sdate_dim,
criteria = criteria,
excludeTime = excludeTime, region = region,
lonVar = as.vector(lonVar), latVar = as.vector(latVar),
nAnalogs = nAnalogs, AnalogsInfo = AnalogsInfo,
......@@ -228,6 +231,7 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
}
expL$data <- res
expL$dims <- dim(res)
if (!is.null(obsL$coords[[lon_name]]) | !is.null(obsL$coords[[lat_name]])) {
if (is.null(region)) {
......@@ -285,7 +289,7 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
#'ones. This function has not constrains of specific regions, variables to
#'downscale, or data to be used (seasonal forecast data, climate projections
#'data, reanalyses data). The regrid into a finner scale is done interpolating
#'with CST_Load. Then, this interpolation is corrected selecting the analogs in
#'with CST_Start. Then, this interpolation is corrected selecting the analogs in
#'the large and local scale in based of the observations. The function is an
#'adapted version of the method of Yiou et al 2013.
#'
......@@ -308,10 +312,12 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
#' the same latitudinal and longitudinal dimensions as parameter 'expL' and a
#' single temporal dimension with the maximum number of available observations.
#'@param time_obsL A character string indicating the date of the observations
#' in the format "dd-mm-yyyy". Reference time to search for analogs.
#' in the format "dd-mm-yyyy". Reference time to search for analogs. It must
#' have time dimensions.
#'@param time_expL An array of N named dimensions (coinciding with time
#' dimensions in expL) of character string(s) indicating the date(s) of the
#' experiment in the format "dd-mm-yyyy". Time(s) to find the analogs.
#' experiment in the format "dd-mm-yyyy". Time(s) to find the analogs. If it
#' is not an scalar it must have named dimensions.
#'@param lonL A vector containing the longitude of parameter 'expL'.
#'@param latL A vector containing the latitude of parameter 'expL'.
#'@param excludeTime An array of N named dimensions (coinciding with time
......@@ -326,6 +332,8 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
#' function will be the analog of parameter 'expVar'.
#'@param obsVar An array of N named dimensions containing the field of the
#' same variable as the passed in parameter 'expVar' for the same region.
#'@param sdate_dim A character string indicating the name of the start date
#' dimension. By default, it is set to 'sdate'.
#'@param AnalogsInfo A logical value. If it is TRUE it returns a list
#' with two elements: 1) the downscaled field and
#' 2) the AnalogsInfo which contains: a) the number of the best
......@@ -372,6 +380,7 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
#'obsSLP <- c(rnorm(1:180), expSLP * 1.2)
#'dim(obsSLP) <- c(time = 10, lat = 4, lon = 5)
#'time_obsSLP <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-")
#'dim(time_obsSLP) <- c(time = 10)
#'downscale_field <- Analogs(expL = expSLP, obsL = obsSLP,
#' time_obsL = time_obsSLP,time_expL = "01-01-1994")
#'
......@@ -417,11 +426,12 @@ CST_Analogs <- function(expL, obsL, expVar = NULL, obsVar = NULL, region = NULL,
#' AnalogsInfo = TRUE)
#'@import multiApply
#'@import abind
#'@import s2dv
#'@importFrom ClimProjDiags SelBox Subset
#'@export
Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
lonL = NULL, latL = NULL, expVar = NULL,
obsVar = NULL, criteria = "Large_dist",
lonL = NULL, latL = NULL, expVar = NULL, obsVar = NULL,
sdate_dim = 'sdate', criteria = "Large_dist",
excludeTime = NULL, lonVar = NULL, latVar = NULL,
region = NULL, nAnalogs = NULL,
AnalogsInfo = FALSE, ncores = NULL) {
......@@ -536,12 +546,61 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
}
if (!inherits(time_obsL, "character")) {
warning('imposing time_obsL to be a character')
dims_time_obsL <- dim(time_obsL)
time_obsL <- format(as.Date(time_obsL), '%d-%m-%Y')
dim(time_obsL) <- dims_time_obsL
}
if (!inherits(time_expL, "character")) {
warning('imposing time_expL to be a character')
dims_time_expL <- dim(time_expL)
time_expL <- format(as.Date(time_expL), '%d-%m-%Y')
dim(time_expL) <- dims_time_expL
}
# time_obsL, time_expL (2)
if (is.null(names(dim(time_obsL)))) {
stop("Parameter 'time_obsL' must have named dimensions.")
}
if (!is.character(sdate_dim)) {
stop("Parameter 'sdate_dim' must be a character string.")
}
if (!sdate_dim %in% names(dim(time_obsL))) {
if (length(dim(time_obsL)) == 1) {
dim(time_obsL) <- c(dim(time_obsL), sdate = 1)
} else {
stop("Parameters 'time_obsL' must have 'sdate_dim' dimension name. ",
"If it has multiple time dimensions.")
}
}
if (length(time_expL) != 1) {
if (is.null(names(dim(time_expL)))) {
stop("Parameter 'time_expL' must have named dimensions.")
}
} else {
dim(time_expL) <- 1
}
if (!sdate_dim %in% names(dim(time_expL))) {
if (length(dim(time_expL)) == 1) {
dim(time_expL) <- c(dim(time_expL), sdate = 1)
} else {
stop("Parameters 'time_expL' must have 'sdate_dim' dimension name. ",
"If it has multiple time dimensions.")
}
}
if (length(dim(time_obsL)) == 2) {
if (which(sdate_dim %in% names(dim(time_obsL))) == 1) {
time_obsL <- Reorder(time_obsL, c(2,1))
}
} else {
warning("Parameter 'time_obsL' should have forecast time and start date dimension in this order.")
}
if (length(dim(time_expL)) == 2) {
if (which(sdate_dim %in% names(dim(time_expL))) == 1) {
time_expL <- Reorder(time_expL, c(2,1))
}
} else {
warning("Parameter 'time_expL' should have forecast time and start date dimension in this order.")
}
# excludeTime
if (!is.null(excludeTime)) {
if (!inherits(excludeTime, "character")) {
......@@ -549,23 +608,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
excludeTime <- format(as.Date(excludeTime),'%d-%m-%Y')
}
}
# time_obsL
if (is.null(time_obsL)) {
stop("Parameter 'time_obsL' cannot be NULL")
}
if (any(names(dim(obsL)) %in% 'ftime')) {
if (any(names(dim(obsL)) %in% 'time')) {
stop("Multiple temporal dimensions ('ftime' and 'time') found",
"in parameter 'obsL'.")
} else {
time_pos_obsL <- which(names(dim(obsL)) == 'ftime')
names(dim(obsL))[time_pos_obsL] <- 'time'
if (any(names(dim(expL)) %in% 'ftime')) {
time_pos_expL <- which(names(dim(expL)) == 'ftime')
names(dim(expL))[time_pos_expL] <- 'time'
}
}
}
# obsVar, expVar
if (!is.null(obsVar)) {
if (any(names(dim(obsVar)) %in% 'ftime')) {
if (any(names(dim(obsVar)) %in% 'time')) {
......@@ -581,6 +624,20 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
}
}
}
# obsL
if (any(names(dim(obsL)) %in% 'ftime')) {
if (any(names(dim(obsL)) %in% 'time')) {
stop("Multiple temporal dimensions ('ftime' and 'time') found",
"in parameter 'obsL'.")
} else {
time_pos_obsL <- which(names(dim(obsL)) == 'ftime')
names(dim(obsL))[time_pos_obsL] <- 'time'
if (any(names(dim(expL)) %in% 'ftime')) {
time_pos_expL <- which(names(dim(expL)) == 'ftime')
names(dim(expL))[time_pos_expL] <- 'time'
}
}
}
if ((any(names(dim(obsL)) %in% 'sdate')) &&
(any(names(dim(obsL)) %in% 'time'))) {
dims_obsL <- dim(obsL)
......@@ -604,7 +661,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
if (any(names(dim(obsL)) %in% 'time')) {
dims_obsL <- dim(obsL)
pos_time <- which(names(dim(obsL)) == 'time')
if(length(time_obsL) != dim(obsL)[pos_time]) {
if (length(time_obsL) != dim(obsL)[pos_time]) {
stop("'time_obsL' and 'obsL' must have same length in the temporal
dimension.")
}
......@@ -618,6 +675,7 @@ Analogs <- function(expL, obsL, time_obsL, time_expL = NULL,
}
}
}
# obsVar
if (!is.null(obsVar)) {
if (any(names(dim(obsVar)) %in% 'sdate')) {
if (any(names(dim(obsVar)) %in% 'time')) {
......
......@@ -9,10 +9,10 @@
#'computation is carried out independently for experimental and observational
#'data products.
#'
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function, containing the seasonal forecast experiment data in the element
#' named \code{$data}.
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function, containing the observed data in the element named \code{$data}.
#'@param dim_anom A character string indicating the name of the dimension
#' along which the climatology will be computed. The default value is 'sdate'.
......@@ -57,7 +57,7 @@
#'anom <- CST_Anomaly(exp = exp, obs = obs, cross = FALSE, memb = TRUE)
#'
#'@seealso \code{\link[s2dv]{Ano_CrossValid}}, \code{\link[s2dv]{Clim}} and
#'\code{\link{CST_Load}}
#'\code{\link{CST_Start}}
#'
#'@import multiApply
#'@importFrom s2dv InsertDim Clim Ano_CrossValid Reorder
......@@ -69,8 +69,7 @@ CST_Anomaly <- function(exp = NULL, obs = NULL, dim_anom = 'sdate',
# Check 's2dv_cube'
if (!inherits(exp, 's2dv_cube') & !is.null(exp) ||
!inherits(obs, 's2dv_cube') & !is.null(obs)) {
stop("Parameter 'exp' and 'obs' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'exp' and 'obs' must be of the class 's2dv_cube'.")
}
# exp and obs
if (is.null(exp$data) & is.null(obs$data)) {
......@@ -91,13 +90,10 @@ CST_Anomaly <- function(exp = NULL, obs = NULL, dim_anom = 'sdate',
any(is.null(names(dim(obs$data))))| any(nchar(names(dim(obs$data))) == 0)) {
stop("Parameter 'exp' and 'obs' must have dimension names in element 'data'.")
}
if (!all(names(dim(exp$data)) %in% names(dim(obs$data))) |
!all(names(dim(obs$data)) %in% names(dim(exp$data)))) {
stop("Parameter 'exp' and 'obs' must have same dimension names in element 'data'.")
}
dim_exp <- dim(exp$data)
dim_obs <- dim(obs$data)
dimnames_data <- names(dim_exp)
dimnames_exp <- names(dim_exp)
dimnames_obs <- names(dim_obs)
# dim_anom
if (!is.character(dim_anom)) {
stop("Parameter 'dim_anom' must be a character string.")
......@@ -129,19 +125,12 @@ CST_Anomaly <- function(exp = NULL, obs = NULL, dim_anom = 'sdate',
if (!is.character(memb_dim) | length(memb_dim) > 1) {
stop("Parameter 'memb_dim' must be a character string.")
}
if (!memb_dim %in% names(dim_exp) | !memb_dim %in% names(dim_obs)) {
stop("Parameter 'memb_dim' is not found in 'exp' or in 'obs' dimension.")
}
}
# dat_dim
if (!is.null(dat_dim)) {
if (!is.character(dat_dim)) {
stop("Parameter 'dat_dim' must be a character vector.")
}
if (!all(dat_dim %in% names(dim_exp)) | !all(dat_dim %in% names(dim_obs))) {
stop("Parameter 'dat_dim' is not found in 'exp' or 'obs' dimension in element 'data'. ",
"Set it as NULL if there is no dataset dimension.")
}
}
# filter_span
if (!is.null(filter_span)) {
......@@ -161,7 +150,7 @@ CST_Anomaly <- function(exp = NULL, obs = NULL, dim_anom = 'sdate',
if (!is.character(ftime_dim)) {
stop("Parameter 'ftime_dim' must be a character string.")
}
if (!ftime_dim %in% names(dim_exp) | !memb_dim %in% names(dim_obs)) {
if (!ftime_dim %in% names(dim_exp) | !ftime_dim %in% names(dim_obs)) {
stop("Parameter 'ftime_dim' is not found in 'exp' or in 'obs' dimension in element 'data'.")
}
}
......@@ -206,15 +195,17 @@ CST_Anomaly <- function(exp = NULL, obs = NULL, dim_anom = 'sdate',
ano <- NULL
# Permuting back dimensions to original order
clim_exp <- Reorder(clim_exp, dimnames_data)
clim_obs <- Reorder(clim_obs, dimnames_data)
clim_exp <- Reorder(clim_exp, dimnames_exp)
clim_obs <- Reorder(clim_obs, dimnames_obs)
ano$exp <- exp$data - clim_exp
ano$obs <- obs$data - clim_obs
}
exp$data <- ano$exp
exp$dims <- dim(ano$exp)
obs$data <- ano$obs
obs$dims <- dim(ano$obs)
# Outputs
# ~~~~~~~~~
......
......@@ -5,14 +5,14 @@
#'described in Torralba et al. (2017). The adjusted forecasts have an equivalent
#'standard deviation and mean to that of the reference dataset.
#'
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function, containing the seasonal forecast experiment data in the element
#' named \code{$data} with at least time and member dimensions.
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function, containing the observed data in the element named \code{$data}
#' with at least time dimension.
#'@param exp_cor An object of class \code{s2dv_cube} as returned by
#' \code{CST_Load} function, containing the seasonal forecast experiment to be
#' \code{CST_Start} function, containing the seasonal forecast experiment to be
#' corrected with at least time dimension. If it is NULL, the 'exp' forecast
#' will be corrected. If there is only one corrected dataset, it should not
#' have dataset dimension. If there is a corresponding corrected dataset for
......@@ -44,9 +44,9 @@
#'
#'@examples
#'mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7)
#'dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7)
#'dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, time = 5, lat = 6, lon = 7)
#'obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7)
#'dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7)
#'dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, time = 5, lat = 6, lon = 7)
#'lon <- seq(0, 30, 5)
#'lat <- seq(0, 25, 5)
#'coords <- list(lat = lat, lon = lon)
......@@ -134,9 +134,9 @@ CST_BiasCorrection <- function(exp, obs, exp_cor = NULL, na.rm = FALSE,
#'
#'@examples
#'mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7)
#'dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7)
#'dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, time = 5, lat = 6, lon = 7)
#'obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7)
#'dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7)
#'dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, time = 5, lat = 6, lon = 7)
#'a <- BiasCorrection(exp = mod1, obs = obs1)
#'@import multiApply
#'@export
......
......@@ -18,16 +18,16 @@
#'(2014). It is equivalent to function \code{Calibration} but for objects
#'of class \code{s2dv_cube}.
#'
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param exp An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function with at least 'sdate' and 'member' dimensions, containing the
#' seasonal hindcast experiment data in the element named \code{data}. The
#' hindcast is used to calibrate the forecast in case the forecast is provided;
#' if not, the same hindcast will be calibrated instead.
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Load}
#'@param obs An object of class \code{s2dv_cube} as returned by \code{CST_Start}
#' function with at least 'sdate' dimension, containing the observed data in
#' the element named \code{$data}.
#'@param exp_cor An optional object of class \code{s2dv_cube} as returned by
#' \code{CST_Load} function with at least 'sdate' and 'member' dimensions,
#' \code{CST_Start} function with at least 'sdate' and 'member' dimensions,
#' containing the seasonal forecast experiment data in the element named
#' \code{data}. If the forecast is provided, it will be calibrated using the
#' hindcast and observations; if not, the hindcast will be calibrated instead.
......@@ -106,7 +106,7 @@
#'Quarterly Journal of the Royal Meteorological Society, 141(688), 807-818.
#'\doi{10.1002/qj.2397}
#'
#'@seealso \code{\link{CST_Load}}
#'@seealso \code{\link{CST_Start}}
#'
#'@examples
#'# Example 1:
......@@ -287,7 +287,7 @@ CST_Calibration <- function(exp, obs, exp_cor = NULL, cal.method = "mse_min",
#'Quarterly Journal of the Royal Meteorological Society, 141(688), 807-818.
#'\doi{10.1002/qj.2397}
#'
#'@seealso \code{\link{CST_Load}}
#'@seealso \code{\link{CST_Start}}
#'
#'@examples
#'mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7)
......
#'Change the name of one or more dimensions for an object of class s2dv_cube
#'
#'Change the names of the dimensions specified in 'original_names' to the names
#'in 'new_names'. The coordinate names and the dimensions of any attributes
#'are also modified accordingly.
#'
#'@author Agudetse Roures Victoria, \email{victoria.agudetse@bsc.es}
#'
#'@param data An object of class \code{s2dv_cube} whose dimension names
#' should be changed.
#'@param original_names A single character string or a vector indicating the
#' dimensions to be renamed.
#'@param new_names A single character string or a vector indicating the new
#' dimension names, in the same order as the dimensions in 'original_names'.
#'
#'@return An object of class \code{s2dv_cube} with similar data, coordinates and
#'attributes as the \code{data} input, but with modified dimension names.
#'
#'@examples
#'# Example with sample data:
#'# Check original dimensions and coordinates
#'lonlat_temp$exp$dims
#'names(lonlat_temp$exp$coords)
#'dim(lonlat_temp$exp$attrs$Dates)
#'# Change 'dataset' to 'dat' and 'ftime' to 'time'
#'exp <- CST_ChangeDimNames(lonlat_temp$exp,
#' original_names = c("dataset", "ftime"),
#' new_names = c("dat", "time"))
#'# Check new dimensions and coordinates
#'exp$dims
#'names(exp$coords)
#'dim(exp$attrs$Dates)
#'
#'@export
CST_ChangeDimNames <- function(data, original_names, new_names) {
if (!inherits(data, "s2dv_cube")) {
stop("Parameter 'data' must be an object of class 's2dv_cube'.")
}
if (!is.character(original_names)) {
stop("Parameter 'original_names' must be a character string or a ",
"vector of character strings.")
}
if (!is.character(new_names)) {
stop("Parameter 'new_names' must be a character string or a ",
"vector of character strings.")
}
if (!(length(original_names) == length(new_names))) {
stop("The number of dimension names in 'new_names' must be the same ",
"as in 'original_names'.")
}
if (!all(original_names %in% names(data$dims))) {
stop("Some of the dimensions in 'original_names' could not be found in ",
"'data'.")
}
for (index in 1:length(original_names)) {
original_name <- original_names[index]
new_name <- new_names[index]
# Step 1: Change dims
names(data$dims)[which(names(data$dims) == original_name)] <- new_name
# Step 2: Change coords
names(data$coords)[which(names(data$coords) == original_name)] <- new_name
# Step 3: Change attrs
# 3.1 - Dates
if (original_name %in% names(dim(data$attrs$Dates))) {
names(dim(data$attrs$Dates))[which(names(dim(data$attrs$Dates))
== original_name)] <- new_name
}
# 3.2 - Variable metadata
if (original_name %in% names(data$attrs$Variable$metadata)) {
names(data$attrs$Variable$metadata)[which(names(data$attrs$Variable$metadata)
== original_name)] <- new_name
}
# 3.3 - Source files
if (original_name %in% names(dim(data$attrs$source_files))) {
names(dim(data$attrs$source_files))[which(names(dim(data$attrs$source_files))
== original_name)] <- new_name
}
}
# Change data dimnames after renaming all dimensions
dim(data$data) <- data$dims
if (!is.null(attributes(data$data)$dimensions)) {
attributes(data$data)$dimensions <- names(data$dims)
}
# Change $Dates 'dim' attribute
attr(attributes(data$attrs$Dates)$dim, "names") <- names(dim(data$attrs$Dates))
return(data)
}
......@@ -15,8 +15,6 @@
#' \code{merge_dims} will be used.
#'@param na.rm A logical indicating if the NA values should be removed or not.
#'
#'@import abind
#'@importFrom ClimProjDiags Subset
#'@examples
#'data <- 1 : c(2 * 3 * 4 * 5 * 6 * 7)
#'dim(data) <- c(time = 7, lat = 2, lon = 3, monthly = 4, member = 6,
......@@ -33,11 +31,35 @@ CST_MergeDims <- function(data, merge_dims = c('ftime', 'monthly'),
rename_dim = NULL, na.rm = FALSE) {
# Check 's2dv_cube'
if (!inherits(data, 's2dv_cube')) {
stop("Parameter 'data' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'data' must be of the class 's2dv_cube'.")
}
if (is.null(rename_dim)) {
rename_dim <- merge_dims[1]
}
# data
data$data <- MergeDims(data$data, merge_dims = merge_dims,
rename_dim = rename_dim, na.rm = na.rm)
# dims
data$dims <- dim(data$data)
# rename_dim
if (length(rename_dim) > 1) {
rename_dim <- as.character(rename_dim[1])
}
# coords
data$coords[merge_dims] <- NULL
data$coords[[rename_dim]] <- 1:dim(data$data)[rename_dim]
attr(data$coords[[rename_dim]], 'indices') <- TRUE
# attrs
if (all(merge_dims %in% names(dim(data$attrs$Dates)))) {
data$attrs$Dates <- MergeDims(data$attrs$Dates, merge_dims = merge_dims,
rename_dim = rename_dim, na.rm = na.rm)
} else if (any(merge_dims %in% names(dim(data$attrs$Dates)))) {
warning("The dimensions of 'Dates' array will be different from ",
"the temporal dimensions in 'data'. Parameter 'merge_dims' ",
"only includes one temporal dimension of 'Dates'.")
}
return(data)
}
#'Function to Split Dimension
......@@ -56,12 +78,12 @@ CST_MergeDims <- function(data, merge_dims = c('ftime', 'monthly'),
#' \code{merge_dims} will be used.
#'@param na.rm A logical indicating if the NA values should be removed or not.
#'
#'@import abind
#'@importFrom ClimProjDiags Subset
#'@examples
#'data <- 1 : 20
#'dim(data) <- c(time = 10, lat = 2)
#'new_data <- MergeDims(data, merge_dims = c('time', 'lat'))
#'@import abind
#'@importFrom ClimProjDiags Subset
#'@export
MergeDims <- function(data, merge_dims = c('time', 'monthly'),
rename_dim = NULL, na.rm = FALSE) {
......
......@@ -58,13 +58,11 @@ CST_QuantileMapping <- function(exp, obs, exp_cor = NULL, sdate_dim = 'sdate',
ncores = NULL, ...) {
# Check 's2dv_cube'
if (!inherits(exp, 's2dv_cube') || !inherits(obs, 's2dv_cube')) {
stop("Parameter 'exp' and 'obs' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'exp' and 'obs' must be of the class 's2dv_cube'.")
}
if (!is.null(exp_cor)) {
if (!inherits(exp_cor, 's2dv_cube')) {
stop("Parameter 'exp_cor' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'exp_cor' must be of the class 's2dv_cube'.")
}
}
......
......@@ -144,7 +144,7 @@ CST_RFTemp <- function(data, oro, xlim = NULL, ylim = NULL, lapse = 6.5,
#'The output grid can be irregularly spaced in lon and/or lat.
#'@references Method described in ERA4CS MEDSCOPE milestone M3.2:
#'High-quality climate prediction data available to WP4 here:
#'\ url{https://www.medscope-project.eu/the-project/deliverables-reports/}
#'\url{https://www.medscope-project.eu/the-project/deliverables-reports/}
#'and in H2020 ECOPOTENTIAL Deliverable No. 8.1:
#'High resolution (1-10 km) climate, land use and ocean change scenarios here:
#'\url{https://ec.europa.eu/research/participants/documents/downloadPublic?documentIds=080166e5b6cd2324&appId=PPGMS}.
......
This diff is collapsed.
......@@ -10,7 +10,7 @@
#'
#'@param data A 's2dv_cube' object
#'@param split_dim A character string indicating the name of the dimension to
#' split.
#' split. It is set as 'time' by default.
#'@param indices A vector of numeric indices or dates. If left at NULL, the
#' dates provided in the s2dv_cube object (element Dates) will be used.
#'@param freq A character string indicating the frequency: by 'day', 'month' and
......@@ -21,6 +21,12 @@
#' dimension.
#'@param insert_ftime An integer indicating the number of time steps to add at
#' the begining of the time series.
#'@param ftime_dim A character string indicating the name of the forecast time
#' dimension. It is set as 'time' by default.
#'@param sdate_dim A character string indicating the name of the start date
#' dimension. It is set as 'sdate' by default.
#'@param return_indices A logical value that if it is TRUE, the indices
#' used in splitting the dimension will be returned. It is FALSE by default.
#'
#'@details Parameter 'insert_ftime' has been included for the case of using
#'daily data, requiring split the temporal dimensions by months (or similar) and
......@@ -51,63 +57,97 @@
#'new_data <- CST_SplitDim(data, indices = time, freq = 'year')
#'@import abind
#'@importFrom ClimProjDiags Subset
#'@importFrom s2dv Reorder
#'@export
CST_SplitDim <- function(data, split_dim = 'time', indices = NULL,
freq = 'monthly', new_dim_name = NULL,
insert_ftime = NULL) {
insert_ftime = NULL, ftime_dim = 'time',
sdate_dim = 'sdate', return_indices = FALSE) {
# Check 's2dv_cube'
if (!inherits(data, 's2dv_cube')) {
stop("Parameter 'data' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'data' must be of the class 's2dv_cube'.")
}
if (!is.null(insert_ftime)) {
if (!is.numeric(insert_ftime)) {
stop("Parameter 'insert_ftime' should be an integer.")
}
if (length(insert_ftime) > 1) {
warning("Parameter 'insert_ftime' must be of length 1, and only the",
" first element will be used.")
insert_ftime <- insert_ftime[1]
}
# Check Dates
if (is.null(dim(data$attrs$Dates))) {
warning("Parameter 'Dates' must have dimensions, 'insert_ftime' won't ",
"be used.")
insert_ftime <- NULL
}
}
if (!is.null(insert_ftime)) {
# adding NAs at the begining of the data in ftime dim
ftimedim <- which(names(dim(data$data)) == ftime_dim)
dims <- dim(data$data)
dims[ftimedim] <- insert_ftime
empty_array <- array(NA, dims)
data$data <- abind(empty_array, data$data, along = ftimedim)
names(dim(data$data)) <- names(dims)
# Reorder dates
data$attrs$Dates <- Reorder(data$attrs$Dates, c(ftime_dim, sdate_dim))
dates <- data$attrs$Dates
dates_subset <- Subset(dates, sdate_dim, 1)
# adding dates to Dates for the new NAs introduced
if ((dates_subset[2] - dates_subset[1]) == 1) {
timefreq <- 'days'
} else {
if (length(insert_ftime) > 1) {
warning("Parameter 'insert_ftime' must be of length 1, and only the",
" first element will be used.")
insert_ftime <- insert_ftime[1]
}
# adding NAs at the begining of the data in ftime dim
ftimedim <- which(names(dim(data$data)) == 'ftime')
dims <- dim(data$data)
dims[ftimedim] <- insert_ftime
empty_array <- array(NA, dims)
data$data <- abind(empty_array, data$data, along = ftimedim)
names(dim(data$data)) <- names(dims)
# adding dates to Dates for the new NAs introduced
if ((data$attrs$Dates[2] - data$attrs$Dates[1]) == 1) {
timefreq <- 'days'
} else {
timefreq <- 'months'
warning("Time frequency of forecast time is considered monthly.")
}
start <- data$attrs$Dates
dim(start) <- c(ftime = length(start)/dims['sdate'], sdate = dims['sdate'])
# new <- array(NA, prod(dim(data$data)[c('ftime', 'sdate')]))
# Pending fix transform to UTC when concatenaiting
data$attrs$Dates <- do.call(c, lapply(1:dim(start)[2], function(x) {
seq(start[1,x] - as.difftime(insert_ftime,
units = timefreq),
start[dim(start)[1],x], by = timefreq, tz = "UTC")}))
timefreq <- 'months'
warning("Time frequency of forecast time is considered monthly.")
}
dim(dates) <- c(length(dates)/dims[sdate_dim], dims[sdate_dim])
names(dim(dates)) <- c(ftime_dim, sdate_dim)
# new <- array(NA, prod(dim(data$data)[c('ftime', 'sdate')]))
# Pending fix transform to UTC when concatenaiting
data$attrs$Dates <- do.call(c, lapply(1:dim(dates)[2], function(x) {
seq(dates[1,x] - as.difftime(insert_ftime,
units = timefreq),
dates[dim(dates)[1],x], by = timefreq, tz = "UTC")}))
}
if (is.null(indices)) {
if (any(split_dim %in% c('ftime', 'time', 'sdate'))) {
if (any(split_dim %in% c(ftime_dim, sdate_dim))) {
indices <- data$attrs$Dates
if (any(names(dim(data$data)) %in% 'sdate')) {
if (any(names(dim(data$data)) %in% sdate_dim)) {
if (!any(names(dim(data$data)) %in% split_dim)) {
stop("Parameter 'split_dims' must be one of the dimension ",
"names in parameter 'data'.")
}
indices <- indices[1 : dim(data$data)[which(names(dim(data$data)) == split_dim)]]
indices <- indices[1:dim(data$data)[which(names(dim(data$data)) == split_dim)]]
}
}
}
data$data <- SplitDim(data$data, split_dim = split_dim, indices = indices,
freq = freq, new_dim_name = new_dim_name)
return(data)
# Call the function
res <- SplitDim(data = data$data, split_dim = split_dim,
indices = indices, freq = freq,
new_dim_name = new_dim_name,
dates = data$attrs$Dates,
return_indices = return_indices)
if (inherits(res, 'list')) {
data$data <- res$data
# Split dim on Dates
if (!is.null(res$dates)) {
data$attrs$Dates <- res$dates
}
} else {
data$data <- res
}
data$dims <- dim(data$data)
# Coordinates
# TO DO: Subset splitted coordinate and add the new dimension coordinate.
if (return_indices) {
return(list(data = data, indices = res$indices))
} else {
return(data)
}
}
#'Function to Split Dimension
#'
......@@ -129,6 +169,11 @@ CST_SplitDim <- function(data, split_dim = 'time', indices = NULL,
#' the length in which to subset the dimension.
#'@param new_dim_name A character string indicating the name of the new
#' dimension.
#'@param dates An optional parameter containing an array of dates of class
#' 'POSIXct' with the corresponding time dimensions of 'data'. It is NULL
#' by default.
#'@param return_indices A logical value that if it is TRUE, the indices
#' used in splitting the dimension will be returned. It is FALSE by default.
#'@examples
#'data <- 1 : 20
#'dim(data) <- c(time = 10, lat = 2)
......@@ -145,7 +190,8 @@ CST_SplitDim <- function(data, split_dim = 'time', indices = NULL,
#'@importFrom ClimProjDiags Subset
#'@export
SplitDim <- function(data, split_dim = 'time', indices, freq = 'monthly',
new_dim_name = NULL) {
new_dim_name = NULL, dates = NULL,
return_indices = FALSE) {
# check data
if (is.null(data)) {
stop("Parameter 'data' cannot be NULL.")
......@@ -167,7 +213,7 @@ SplitDim <- function(data, split_dim = 'time', indices, freq = 'monthly',
"one and only the first element will be used.")
}
if (!any(names(dims) %in% split_dim)) {
stop("Parameter 'split_dims' must be one of the dimension ",
stop("Parameter 'split_dim' must be one of the dimension ",
"names in parameter 'data'.")
}
pos_split <- which(names(dims) == split_dim)
......@@ -210,8 +256,8 @@ SplitDim <- function(data, split_dim = 'time', indices, freq = 'monthly',
})
if ('try-error' %in% class(indices) |
sum(is.na(indices)) == length(indices)) {
stop("Dates provided in parameter 'indices' must be of class",
" 'POSIXct' or convertable to 'POSIXct'.")
stop("Dates provided in parameter 'indices' must be of class ",
"'POSIXct' or convertable to 'POSIXct'.")
}
}
}
......@@ -230,7 +276,7 @@ SplitDim <- function(data, split_dim = 'time', indices, freq = 'monthly',
} else if (freq == 'year') {
indices <- as.numeric(strftime(indices, format = "%Y"))
repited <- unique(indices)
} else if (freq == 'monthly' ) {
} else if (freq == 'monthly') {
indices <- as.numeric(strftime(indices, format = "%m%Y"))
repited <- unique(indices)
} else {
......@@ -255,15 +301,41 @@ SplitDim <- function(data, split_dim = 'time', indices, freq = 'monthly',
data <- lapply(repited, function(x) {rebuild(x, data, along = split_dim,
indices = indices, max_times)})
data <- abind(data, along = length(dims) + 1)
if (is.character(freq)) {
names(dim(data)) <- c(names(dims), freq)
} else {
names(dim(data)) <- c(names(dims), 'index')
# Add new dim name
if (is.null(new_dim_name)) {
if (is.character(freq)) {
new_dim_name <- freq
} else {
new_dim_name <- 'index'
}
}
if (!is.null(new_dim_name)) {
names(dim(data)) <- c(names(dims), new_dim_name)
names(dim(data)) <- c(names(dims), new_dim_name)
# Split also Dates
dates_exist <- FALSE
if (!is.null(dates)) {
if (any(split_dim %in% names(dim(dates)))) {
datesdims <- dim(dates)
dates <- lapply(repited, function(x) {rebuild(x, dates, along = split_dim,
indices = indices, max_times)})
dates <- abind(dates, along = length(datesdims) + 1)
dates <- as.POSIXct(dates, origin = '1970-01-01', tz = "UTC")
names(dim(dates)) <- c(names(datesdims), new_dim_name)
}
dates_exist <- TRUE
}
# Return objects
if (all(dates_exist, return_indices)) {
return(list(data = data, dates = dates, indices = indices))
} else if (all(dates_exist, !return_indices)) {
return(list(data = data, dates = dates))
} else if (all(!dates_exist, return_indices)) {
return(list(data = data, indices = indices))
} else {
return(data)
}
return(data)
}
rebuild <- function(x, data, along, indices, max_times) {
......
......@@ -9,7 +9,10 @@
#'`s2dv_cube` object.
#'
#'It receives any number of parameters (`...`) that are automatically forwarded
#'to the `startR::Start` function. See details in `?startR::Start`.
#'to the `startR::Start` function. See details in `?startR::Start`. The
#'auxiliary functions used to define dimensions need to be called within the
#'startR namespace (e.g. startR::indices(), startR::values(), startR::Sort(),
#'startR::CircularSort(), startR::CDORemapper(), ...).
#'
#'@param ... Parameters that are automatically forwarded to the `startR::Start`
#' function. See details in `?startR::Start`.
......
......@@ -79,15 +79,15 @@ CST_Subset <- function(x, along, indices, drop = FALSE, var_dim = NULL,
}
# Subset data
x$data <- ClimProjDiags::Subset(x$data, along = along,
indices = indices,
drop = drop)
x$data <- Subset(x$data, along = along,
indices = indices,
drop = drop)
# Adjust dimensions
x$dims <- dim(x$data)
# Adjust coordinates
for (dimension in 1:length(along)) {
dim_name <- along[dimension]
index <- indices[[dimension]]
index <- indices[dimension]
# Only rename coordinates that have not been dropped
if (dim_name %in% names(x$dims)) {
# Subset coordinate by indices
......@@ -113,10 +113,10 @@ CST_Subset <- function(x, along, indices, drop = FALSE, var_dim = NULL,
}
if ((!is.null(x$attrs$source_files)) &&
(dim_name %in% names(dim(x$attrs$source_files)))) {
x$attrs$source_files <- ClimProjDiags::Subset(x$attrs$source_files,
along = dim_name,
indices = index,
drop = drop)
x$attrs$source_files <- Subset(x$attrs$source_files,
along = dim_name,
indices = index,
drop = drop)
}
}
# Remove metadata from variables that were dropped
......@@ -128,10 +128,10 @@ CST_Subset <- function(x, along, indices, drop = FALSE, var_dim = NULL,
if (!(length(time_along) == 0)) {
time_indices <- indices[match(time_along, along)]
original_dates <- x$attrs$Dates
x$attrs$Dates <- ClimProjDiags::Subset(x$attrs$Dates,
along = time_along,
indices = time_indices,
drop = drop)
x$attrs$Dates <- Subset(x$attrs$Dates,
along = time_along,
indices = time_indices,
drop = drop)
}
# Subset metadata
for (variable in 1:length(names(x$attrs$Variable$metadata))) {
......@@ -150,12 +150,12 @@ CST_Subset <- function(x, along, indices, drop = FALSE, var_dim = NULL,
# Function to subset with attributes
.subset_with_attrs <- function(x, ...) {
args_subset <- list(...)
if (is.null(dim(x)) | length(dim(x)) == 1) {
l <- x[args_subset[['indices']]]
if (any(is.null(dim(x)), length(dim(x)) == 1)) {
l <- x[args_subset[['indices']][[1]]]
} else {
l <- ClimProjDiags::Subset(x, along = args_subset[['along']],
indices = args_subset[['indices']],
drop = args_subset[['drop']])
l <- Subset(x, along = args_subset[['along']],
indices = args_subset[['indices']],
drop = args_subset[['drop']])
}
attr.names <- names(attributes(x))
attr.names <- attr.names[attr.names != 'names']
......
......@@ -72,8 +72,7 @@ CST_WeatherRegimes <- function(data, ncenters = NULL,
ncores = NULL) {
# Check 's2dv_cube'
if (!inherits(data, 's2dv_cube')) {
stop("Parameter 'data' must be of the class 's2dv_cube', ",
"as output by CSTools::CST_Load.")
stop("Parameter 'data' must be of the class 's2dv_cube'.")
}
# Check 'exp' object structure
if (!all(c('data', 'coords') %in% names(data))) {
......
......@@ -35,6 +35,35 @@
#' colour bar will be automatically interpolated to match the number of breaks.
#' Each item in this list can be named, and the name will be used as title for
#' the corresponding colour bar (equivalent to the parameter 'bar_titles').
#'@param bar_limits Parameter from s2dv::ColorBar. Vector of two numeric values
#' with the extremes of the range of values represented in the colour bar. If
#' 'var_limits' go beyond this interval, the drawing of triangle extremes is
#' triggered at the corresponding sides, painted in 'col_inf' and 'col_sup'.
#' Either of them can be set as NA and will then take as value the
#' corresponding extreme in 'var_limits' (hence a triangle end won't be
#' triggered for these sides). Takes as default the extremes of 'brks' if
#' available, else the same values as 'var_limits'.
#'@param triangle_ends Parameter from s2dv::ColorBar. Vector of two logical
#' elements, indicating whether to force the drawing of triangle ends at each
#' of the extremes of the colour bar. This choice is automatically made from
#' the provided 'brks', 'bar_limits', 'var_limits', 'col_inf' and 'col_sup',
#' but the behaviour can be manually forced to draw or not to draw the triangle
#' ends with this parameter. If 'cols' is provided, 'col_inf' and 'col_sup'
#' will take priority over 'triangle_ends' when deciding whether to draw the
#' triangle ends or not.
#'@param col_inf Parameter from s2dv::ColorBar. Colour to fill the inferior
#' triangle end with. Useful if specifying colours manually with parameter
#' 'cols', to specify the colour and to trigger the drawing of the lower
#' extreme triangle, or if 'cols' is not specified, to replace the colour
#' automatically generated by ColorBar().
#'@param col_sup Parameter from s2dv::ColorBar. Colour to fill the superior
#' triangle end with. Useful if specifying colours manually with parameter
#' 'cols', to specify the colour and to trigger the drawing of the upper
#' extreme triangle, or if 'cols' is not specified, to replace the colour
#' automatically generated by ColorBar().
#'@param bar_extra_margin Parameter from s2dv::ColorBar. Extra margins to be
#' added around the colour bar, in the format c(y1, x1, y2, x2). The units are
#' margin lines. Takes rep(0, 4) by default.
#'@param col_unknown_map Colour to use to paint the grid cells for which a map
#' is not possible to be chosen according to 'map_select_fun' or for those
#' values that go beyond 'display_range'. Takes the value 'white' by default.
......