#*******************************************************************************
# Title: Example script to create 's2dv_cube' objects
# Author: Eva Rifà Rovira
# Date: 16/01/2024
#*******************************************************************************
# This example shows how to create an 's2dv_cube' object.
# There are two ways of creating an 's2dv_cube' object.
# (1) With the function s2dv_cube(): create it from scratch with any data.
# (2) With the function CST_Start(). This function returns an 's2dv_cube'
# from an 'startR_array'.
# Needed packages
library(CSTools)
library(startR)
################################################################################
#-----------------------------------------------------
# Example 1: Function s2dv_cube() from defined data
#-----------------------------------------------------
# Minimal use case, with s2dv_cube function.
# In this example we use the function s2dv_cube() to create an object of class
# 's2dv_cube' with the correct structure.
# (1) We define the array with named dimensions:
dat <- array(1:100, dim = c(time = 10, lat = 4, lon = 10))
# (2) We define the coordinates as a list of vectors:
coords <- list(time = 1:10, lat = 43:40, lon = 0:9)
# (3) The metadata:
metadata <- list(tas = list(level = '2m'),
lon = list(cdo_grid_name = 'r360x181'),
lat = list(cdo_grid_name = 'r360x181'))
# (4) The creation of Dates array.
# First the initial date:
ini_date <- as.POSIXct('2010-01-01', format = '%Y-%m-%d')
# The sequence of dates
dates <- seq(ini_date, by = 'days', length.out = 10)
# We define the dates dimensions
dim(dates) <- c(time = 10)
# (5) We call the function s2dv_cube()
dat_cube <- s2dv_cube(data = dat, coords = coords,
varName = 'tas', metadata = metadata,
Dates = dates,
when = "2019-10-23 19:15:29 CET",
source_files = c("/path/to/file1.nc", "/path/to/file2.nc"),
Datasets = 'test_dataset')
# We print the result to see the 's2dv_cube' structure:
# > dat_cube
# 's2dv_cube'
# Data [ 1, 2, 3, 4, 5, 6, 7, 8 ... ]
# Dimensions ( time = 10, lat = 4, lon = 10 )
# Coordinates
# * time : 1, 2, 3, 4, 5, 6, 7, 8, 9, 10
# * lat : 43, 42, 41, 40
# * lon : 0, 1, 2, 3, 4, 5, 6, 7, 8, 9
# Attributes
# Dates : 2010-01-01 2010-01-02 2010-01-03 2010-01-04 2010-01-05 ...
# varName : tas
# metadata :
# tas
# other : level
# lon
# other : cdo_grid_name
# lat
# other : cdo_grid_name
# Datasets : test_dataset
# when : 2019-10-23 19:15:29 CET
# source_files : /path/to/file1.nc ...
#-----------------------------------------------------
# Example 2: Function as.s2dv_cube()
#-----------------------------------------------------
# (1) Example using CST_Start
# NOTE 1: CST_Start() is just a wrapper of function Start() with the
# transformation to 's2dv_cube' object.
# NOTE 2: In order that the input argument auxiliary functions from startR
# work, we need to call them explicitly the startR namespace.
# (e.g. startR::indices())
# We just need to define a CST_Start call with all the information:
repos1 <- "/esarchive/exp/ecmwf/system5_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
repos2 <- "/esarchive/exp/ecmwf/system4_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
res <- CST_Start(dat = list(list(name = 'system4_m1', path = repos2),
list(name = 'system5_m1', path = repos1)),
var = c('tas', 'sfcWind'),
sdate = c('20160101', '20170101'),
ensemble = startR::indices(1:2),
time = startR::indices(1:2),
lat = startR::indices(1:10),
lon = startR::indices(1:10),
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(time = 'sdate',
longitude = 'dat',
latitude = 'dat'),
metadata_dims = c('dat', 'var'),
retrieve = TRUE)
# Now we can explore the object:
# 1st level
names(res)
# "data" "dims" "coords" "attrs"
dim(res$data)
# dat var sdate ensemble time lat lon
# 2 2 2 2 2 10 10
res$coords$lon
# [1] 0.000000 0.703125 1.406250 2.109375 2.812500 3.515625 4.218750 4.921875
# [9] 5.625000 6.328125
attr(res$coords$lon, 'indices')
# [1] FALSE
# NOTE: The attribute 'indices' is FALSE, it means that the longitude elements
# are the actual values of longitude coordinate.
res$coords$ensemble
# [1] 1 2
# attr(,"indices")
# [1] TRUE
# Now we take a look into the Dates array. It must have the time dimensions
# of the data.
dim(res$attrs$Dates)
# sdate time
# 2 2
# To see the nested list structure of the object, we just need to use the
# function str():
str(res)
#-----------------------------------------------------
# (2) Example using as.s2dv_cube() function
# We'll load the data with Start and then we'll transform the 'startR_array'
# to 's2dv_cube' object with the function as.s2dv_cube(). We are going
# to load the same data as before, with the same call:
repos1 <- "/esarchive/exp/ecmwf/system5_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
repos2 <- "/esarchive/exp/ecmwf/system4_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
res <- Start(dat = list(list(name = 'system4_m1', path = repos2),
list(name = 'system5_m1', path = repos1)),
var = c('tas', 'sfcWind'),
sdate = c('20160101', '20170101'),
ensemble = startR::indices(1:2),
time = startR::indices(1:2),
lat = startR::indices(1:10),
lon = startR::indices(1:10),
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(time = 'sdate',
longitude = 'dat',
latitude = 'dat'),
metadata_dims = c('dat', 'var'),
retrieve = TRUE)
# Now, we use the function as.s2dv_cube() to transform the 'startR_array'
# into an 's2dv_cube':
res_cube <- as.s2dv_cube(res)
# If we call directly the object directly into the terminal, we can see
# all the elements nicely:
# > res_cube
# 's2dv_cube'
# Data [ 248.241973876953, 247.365753173828, 6.80753087997437, 5.46453714370728, 247.256896972656, 248.500869750977, 6.25862503051758, 5.76889991760254 ... ]
# Dimensions ( dat = 2, var = 2, sdate = 2, ensemble = 2, time = 2, lat = 10, lon = 10 )
# Coordinates
# * dat : system4_m1, system5_m1
# * var : tas, sfcWind
# * sdate : 20160101, 20170101
# ensemble : 1, 2
# time : 1, 2
# * lat : 89.4628215685774, 88.7669513528422, 88.0669716474306, 87.366063433082, 86.6648030134408, 85.9633721608804, 85.2618460607126, 84.5602613830534, 83.8586381286076, 83.1569881285417
# * lon : 0, 0.703125, 1.40625, 2.109375, 2.8125, 3.515625, 4.21875, 4.921875, 5.625, 6.328125
# Attributes
# Dates : 2016-02-01 2017-02-01 2016-03-01 2017-03-01
# varName : tas sfcWind
# metadata :
# time
# units : hours since 2016-01-01 00:00:00
# other : ndims, size, standard_name, calendar
# lon
# units : degrees_east
# long name : longitude
# other : ndims, size, standard_name, axis
# lat
# units : degrees_north
# long name : latitude
# other : ndims, size, standard_name, axis
# tas
# units : K
# long name : 2 metre temperature
# other : prec, dim, unlim, make_missing_value, missval, hasAddOffset, hasScaleFact, code, table, grid_type
# sfcWind
# units : m s**-1
# long name : 10 meter windspeed
# other : prec, dim, unlim, make_missing_value, missval, hasAddOffset, hasScaleFact, code, table, grid_type
# Datasets : system4_m1 ...
# when : 2024-01-17 11:38:27
# source_files : /esarchive/exp/ecmwf/system4_m1/monthly_mean/tas_f6h/tas_20160101.nc ...
# load_parameters :
# ( system4_m1 ) : dat = system4_m1, var = tas ..., sdate = 20160101 ...
# ...
################################################################################
\ No newline at end of file
#*******************************************************************************
# Title: Example script to save 's2dv_cube' to NetCDF using CST_SaveExp
# Author: Eva Rifà Rovira
# Date: 29/11/2024
#*******************************************************************************
# In this script, we'll see multiple ways to store the 's2dv_cube' (CST_SaveExp)
# or the multidimensional array (SaveExp) to NetCDF.
# Needed packages:
library(CSTools)
library(CSIndicators)
library(s2dv)
library(startR)
################################################################################
#-----------------------------------------------------
# Example 1: Multidimensional array and Dates, without metadata and coordinates
#-----------------------------------------------------
# (1.1) Minimal use case, without Dates
data <- array(1:5, dim = c(sdate = 5, lon = 4, lat = 4))
SaveExp(data, ftime_dim = NULL, memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, single_file = TRUE)
SaveExp(data, ftime_dim = NULL, memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = NULL, single_file = FALSE) # same result
# (1.2) Forecast time dimension, without Dates
data <- array(1:5, dim = c(ftime = 5, lon = 4, lat = 4))
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = NULL, single_file = TRUE)
# (1.3) Start date dimension, without Dates
data <- array(1:5, dim = c(sdate = 5, lon = 4, lat = 4))
SaveExp(data, ftime_dim = NULL, memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = 'sdate', single_file = TRUE)
# (1.4) Only forecast time dimension (no sdate), with Dates
data <- array(1:5, dim = c(ftime = 5, lon = 4, lat = 4))
dates <- c('20000101', '20010102', '20020103', '20030104', '20040105')
dates <- as.Date(dates, format = "%Y%m%d", tz = "UTC")
dim(dates) <- c(ftime = 5)
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = NULL, Dates = dates, single_file = FALSE)
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = NULL, Dates = dates, single_file = TRUE)
# For this case we have the same result using: single_file = FALSE /TRUE.
# (1.5) Forecast time and 1 sdate, with Dates
data <- array(1:5, dim = c(sdate = 1, ftime = 5, lon = 4, lat = 4))
dates <- c('20000101', '20010102', '20020103', '20030104', '20040105')
dates <- as.Date(dates, format = "%Y%m%d", tz = "UTC")
dim(dates) <- c(sdate = 1, ftime = 5)
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = 'sdate', Dates = dates, single_file = FALSE)
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = 'sdate', Dates = dates, single_file = TRUE)
# (1.6) Test global attributes
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = 'sdate', Dates = dates, single_file = TRUE,
extra_string = 'test',
global_attrs = list(system = 'tes1', reference = 'test2'))
# (1.7) Test global attributes
SaveExp(data, ftime_dim = 'ftime', memb_dim = NULL, dat_dim = NULL,
var_dim = NULL, sdate_dim = 'sdate', Dates = dates, single_file = FALSE,
extra_string = 'test',
global_attrs = list(system = 'tes1', reference = 'test2'))
#-----------------------------------------------------
# Example 2: Test sample data from Start and from Load
#-----------------------------------------------------
# (2.1) Test SaveExp
exp <- CSTools::lonlat_prec_st
data <- exp$data
Dates = exp$attrs$Dates
coords = exp$coords
varname = exp$attrs$Variable$varName
metadata = exp$attrs$Variable$metadata
SaveExp(data = data, Dates = Dates, coords = coords, varname = varname,
metadata = metadata, ftime_dim = 'ftime', startdates = 1:4,
var_dim = 'var', memb_dim = 'member', dat_dim = 'dataset',
sdate_dim = 'sdate', single_file = FALSE)
SaveExp(data = data, Dates = Dates, coords = coords, varname = varname,
metadata = metadata, ftime_dim = 'ftime', startdates = 1:4,
var_dim = 'var', memb_dim = 'member', dat_dim = 'dataset',
sdate_dim = 'sdate', single_file = TRUE)
# (2.2) lonlat_temp_st$exp in a single file with units 'hours since'
# (2.2.1) We save the data
data <- lonlat_temp_st$exp
CST_SaveExp(data = data, ftime_dim = 'ftime',
var_dim = 'var', dat_dim = 'dataset', sdate_dim = 'sdate',
units_hours_since = TRUE, single_file = TRUE)
# (2.2.2) Now we read the output with Start:
sdate <- as.vector(lonlat_temp_st$exp$coords$sdate)
path <- paste0(getwd(),"/$var$_", sdate[1], "_", sdate[length(sdate)], ".nc")
out <- Start(dat = path,
var = 'tas',
member = 'all',
sdate = 'all',
ftime = 'all',
lat = 'all',
lon = 'all',
return_vars = list(lon = 'dat',
lat = 'dat',
ftime = NULL,
sdate = NULL),
retrieve = TRUE)
attributes(out)$Variables$common$ftime
out_cube <- as.s2dv_cube(out)
out_cube <- CST_ChangeDimNames(out_cube,
original_names = c("dat"),
new_names = c("dataset"))
all.equal(data$data, out_cube$data)
identical(data$data, out_cube$data)
# Plot the results and compare
PlotEquiMap(out_cube$data[,,1,1,1,,], lon = out_cube$coords$lon,
lat = out_cube$coords$lat, filled.continents = FALSE)
PlotEquiMap(lonlat_temp_st$exp$data[,,1,1,1,,], lon = out_cube$coords$lon,
lat = out_cube$coords$lat, filled.continents = FALSE)
# (2.3) lonlat_temp_st$exp in a single file with units of time frequency
# (2.3.1) we save the data
data <- lonlat_temp_st$exp
CST_SaveExp(data = data, ftime_dim = 'ftime',
var_dim = 'var', dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = TRUE, units_hours_since = FALSE)
dates <- lonlat_temp_st$exp$attrs$Dates
# (2.3.2) Now we read the output with Start:
sdate <- as.vector(lonlat_temp_st$exp$coords$sdate)
path <- paste0(getwd(),"/$var$_", sdate[1], "_", sdate[length(sdate)], ".nc")
out <- Start(dat = path,
var = 'tas',
lon = 'all',
lat = 'all',
ftime = 'all',
sdate = 'all',
member = 'all',
return_vars = list(
lon = 'dat',
lat = 'dat',
ftime = NULL,
sdate = NULL),
retrieve = TRUE)
attributes(out)$Variables$common$ftime
# [1] "1 months" "2 months" "3 months"
out_cube2 <- as.s2dv_cube(out)
# (2.4) lonlat_temp_st$exp in separated files with units of hours since
# (2.4.1) we save the data
data <- lonlat_temp_st$exp
CST_SaveExp(data = data, ftime_dim = 'ftime',
var_dim = 'var', dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = FALSE)
# (2.4.2) we load the data
sdate <- as.vector(lonlat_temp_st$exp$coords$sdate)
path <- paste0(getwd(),"/dat1/$var$/$var$_$sdate$.nc")
out <- Start(dat = path, var = 'tas',
sdate = sdate,
lon = 'all',
lat = 'all',
ftime = 'all',
member = 'all',
return_vars = list(lon = 'dat',
lat = 'dat',
ftime = 'sdate'),
retrieve = TRUE)
out_cube1 <- as.s2dv_cube(out)
# (2.5) lonlat_prec_st$exp in a single file with units of time frequency
# (2.5.1) we save the data
data <- lonlat_prec_st
CST_SaveExp(data = data, ftime_dim = 'ftime',
var_dim = 'var', dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = TRUE, units_hours_since = FALSE)
# (2.5.2) we read the data
sdate <- as.vector(data$coords$sdate)
path <- paste0(getwd(),"/$var$_", sdate[1], "_", sdate[length(sdate)], ".nc")
out <- Start(dat = path,
var = 'prlr',
lon = 'all',
lat = 'all',
ftime = 'all',
sdate = 'all',
member = 'all',
return_vars = list(
lon = 'dat',
lat = 'dat',
ftime = NULL,
sdate = NULL),
retrieve = TRUE)
attributes(out)$Variables$common$ftime
# [1] "1 days" "2 days" "3 days" "4 days" "5 days" "6 days" "7 days"
# [8] "8 days" "9 days" "10 days" "11 days" "12 days" "13 days" "14 days"
# [15] "15 days" "16 days" "17 days" "18 days" "19 days" "20 days" "21 days"
# [22] "22 days" "23 days" "24 days" "25 days" "26 days" "27 days" "28 days"
# [29] "29 days" "30 days" "31 days"
out_cube <- as.s2dv_cube(out)
# (2.6) Test observations: lonlat_temp
# (2.6.1) Save the data
data <- lonlat_temp$obs
CST_SaveExp(data = data, ftime_dim = 'ftime', memb_dim = NULL,
var_dim = NULL, dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = TRUE, units_hours_since = FALSE)
# (2.6.2) Now we read the output with Start:
sdate <- c('20001101', '20051101')
path <- paste0(getwd(),"/$var$_", sdate[1], "_", sdate[length(sdate)], ".nc")
out <- Start(dat = path,
var = 'tas', # tas
lon = 'all',
lat = 'all',
ftime = 'all',
member = 1,
sdate = 'all',
return_vars = list(
lon = 'dat',
lat = 'dat',
ftime = NULL,
sdate = NULL),
retrieve = TRUE)
dim(out)
attributes(out)$Variables$common$ftime
# (2.7) Test lonlat_prec
# (2.7.1) Save the data
data <- lonlat_prec
CST_SaveExp(data = data, ftime_dim = 'ftime', memb_dim = NULL,
var_dim = NULL, dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = TRUE, units_hours_since = FALSE)
# (2.7.2) Now we read the output with Start:
sdate <- as.vector(data$coords$sdate)
path <- paste0(getwd(),"/$var$_", sdate[1], "_", sdate[length(sdate)], ".nc")
out <- Start(dat = path,
var = 'prlr', # tas
lon = 'all',
lat = 'all',
ftime = 'all',
sdate = 'all',
member = 'all',
return_vars = list(
lon = 'dat',
lat = 'dat',
ftime = NULL,
sdate = NULL),
retrieve = TRUE)
dim(out)
lonlat_prec$dims
# (2.8) Test with ftime_dim NULL
data <- lonlat_temp$exp
data <- CST_Subset(data, along = 'ftime', indices = 1, drop = 'selected')
CST_SaveExp(data = data, ftime_dim = NULL,
var_dim = NULL, dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = FALSE, units_hours_since = FALSE)
#-----------------------------------------------------
# Example 3: Special cases
#-----------------------------------------------------
# (3.1) Two variables and two datasets in separated files
# (3.1.1) We load the data from Start
repos <- "/esarchive/exp/ecmwf/system5_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
repos2 <- "/esarchive/exp/ecmwf/system4_m1/monthly_mean/$var$_f6h/$var$_$sdate$.nc"
data3 <- Start(dat = list(list(name = 'system4_m1', path = repos2),
list(name = 'system5_m1', path = repos)),
var = c('tas', 'sfcWind'),
sdate = c('20160101', '20170101'),
ensemble = indices(1),
time = indices(1:2),
lat = indices(1:10),
lon = indices(1:10),
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(time = 'sdate',
longitude = 'dat',
latitude = 'dat'),
metadata_dims = c('dat', 'var'),
retrieve = T)
cube3 <- as.s2dv_cube(data3)
# (3.1.2) We save the data
CST_SaveExp(data = cube3, ftime_dim = 'time', var_dim = 'var',
memb_dim = 'ensemble', dat_dim = 'dat')
# (3.1.3) We read again the data with start
repos <- paste0(getwd(), "/system4_m1/$var$/$var$_$sdate$.nc")
repos2 <- paste0(getwd(), "/system5_m1/$var$/$var$_$sdate$.nc")
data3out <- Start(dat = list(list(name = 'system4_m1', path = repos2),
list(name = 'system5_m1', path = repos)),
var = c('tas', 'sfcWind'),
sdate = c('20160101', '20170101'),
ensemble = indices(1),
time = indices(1:2),
lat = indices(1:10),
lon = indices(1:10),
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(time = 'sdate',
longitude = 'dat',
latitude = 'dat'),
metadata_dims = c('dat', 'var'),
retrieve = T)
summary(data3out)
summary(data3)
dim(data3)
dim(data3out)
# (3.2) Two variables and two datasets in the same file
CST_SaveExp(data = cube3, ftime_dim = 'time', var_dim = 'var',
memb_dim = 'ensemble', dat_dim = 'dat',
single_file = TRUE)
# TODO: Read the output with Start
# (3.3) Observations (from startR usecase)
repos_exp <- paste0('/esarchive/exp/ecearth/a1tr/cmorfiles/CMIP/EC-Earth-Consortium/',
'EC-Earth3/historical/r24i1p1f1/Amon/$var$/gr/v20190312/',
'$var$_Amon_EC-Earth3_historical_r24i1p1f1_gr_$sdate$01-$sdate$12.nc')
exp <- Start(dat = repos_exp,
var = 'tas',
sdate = as.character(c(2005:2008)),
time = indices(1:3),
lat = 1:10,
lat_reorder = Sort(),
lon = 1:10,
lon_reorder = CircularSort(0, 360),
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(lon = NULL,
lat = NULL,
time = 'sdate'),
retrieve = FALSE)
dates <- attr(exp, 'Variables')$common$time
repos_obs <- '/esarchive/recon/ecmwf/erainterim/monthly_mean/$var$_f6h/$var$_$date$.nc'
obs <- Start(dat = repos_obs,
var = 'tas',
date = unique(format(dates, '%Y%m')),
time = values(dates), #dim: [sdate = 4, time = 3]
lat = 1:10,
lat_reorder = Sort(),
lon = 1:10,
lon_reorder = CircularSort(0, 360),
time_across = 'date',
merge_across_dims = TRUE,
split_multiselected_dims = TRUE,
synonims = list(lat = c('lat', 'latitude'),
lon = c('lon', 'longitude')),
return_vars = list(lon = NULL,
lat = NULL,
time = 'date'),
retrieve = TRUE)
obscube <- as.s2dv_cube(obs)
CST_SaveExp(data = obscube, ftime_dim = 'time', var_dim = 'var',
memb_dim = NULL, dat_dim = 'dat',
single_file = TRUE, extra_string = 'obs_tas')
CST_SaveExp(data = obscube, ftime_dim = 'time', var_dim = 'var',
memb_dim = NULL, dat_dim = 'dat',
single_file = FALSE, extra_string = 'obs_tas')
#-----------------------------------------------------
# Example 4: Time bounds:
#-----------------------------------------------------
# example: /esarchive/exp/ncep/cfs-v2/weekly_mean/s2s/tas_f24h/tas_20231128.nc
library(CSIndicators)
exp <- CSTools::lonlat_prec_st
exp$attrs$Dates <- Reorder(exp$attrs$Dates, c(2,1))
res <- CST_PeriodAccumulation(data = exp, time_dim = 'ftime',
start = list(10, 03), end = list(20, 03))
# > dim(res$attrs$Dates)
# sdate
# 3
# (4.1) All data in a single file
CST_SaveExp(data = res, ftime_dim = NULL, var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset',
startdates = res$attrs$Dates, single_file = TRUE)
# (4.1.1) Same with SaveExp
SaveExp(data = res$data, coords = res$coords,
Dates = NULL, time_bounds = res$attrs$time_bounds,
ftime_dim = NULL, var_dim = 'var',
varname = res$attrs$Variable$varName,
metadata = res$attrs$Variable$metadata,
memb_dim = 'member', dat_dim = 'dataset',
startdates = res$attrs$Dates, single_file = TRUE)
# (4.2) All data in separated files
CST_SaveExp(data = res, ftime_dim = NULL, var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset',
startdates = res$attrs$Dates, single_file = FALSE)
# (4.2.1) Same with SaveExp
SaveExp(data = res$data, coords = res$coords,
Dates = res$attrs$Dates, time_bounds = res$attrs$time_bounds,
ftime_dim = NULL, var_dim = 'var',
varname = res$attrs$Variable$varName,
metadata = res$attrs$Variable$metadata,
memb_dim = 'member', dat_dim = 'dataset',
startdates = res$attrs$Dates, single_file = FALSE)
# (4.3)
CST_SaveExp(data = res, ftime_dim = NULL, var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset',
startdates = 1:4, single_file = FALSE)
# (4.4) We change the time dimensions to ftime and sdate_dim = NULL
dim(res$attrs$time_bounds[[1]]) <- c(time = 3)
dim(res$attrs$time_bounds[[2]]) <- c(time = 3)
dim(res$attrs$Dates) <- c(time = 3)
dim(res$data) <- c(dataset = 1, var = 1, member = 6, time = 3, lat = 4, lon = 4)
# (4.4.1) All data in a single file
CST_SaveExp(data = res, ftime_dim = 'time', var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset', sdate_dim = NULL,
startdates = res$attrs$Dates, single_file = TRUE)
# (4.4.2) All data in separated files
CST_SaveExp(data = res, ftime_dim = 'time', var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset', sdate_dim = NULL,
startdates = res$attrs$Dates, single_file = FALSE)
# (4.5) Forecast time units
CST_SaveExp(data = res, ftime_dim = 'time', var_dim = 'var',
memb_dim = 'member', dat_dim = 'dataset', sdate_dim = NULL,
startdates = res$attrs$Dates, single_file = TRUE,
units_hours_since = FALSE)
#-----------------------------------------------------
# Example 5: Read data with Load
#-----------------------------------------------------
data <- lonlat_temp$exp
# data <- lonlat_temp$obs
# data <- lonlat_prec
CST_SaveExp(data = data, ftime_dim = 'ftime',
var_dim = NULL, dat_dim = 'dataset', sdate_dim = 'sdate',
single_file = FALSE, units_hours_since = FALSE)
# Now we read the output with Load:
# startDates <- c('20001101', '20011101', '20021101',
# '20031101', '20041101', '20051101')
# infile <- list(path = paste0(getwd(),
# '/system5c3s/$VAR_NAME$/$VAR_NAME$_$START_DATE$.nc'))
# out_lonlat_temp <- CST_Load(var = 'tas', exp = list(infile), obs = NULL,
# sdates = startDates,
# nmember = 15,
# leadtimemax = 3,
# latmin = 27, latmax = 48,
# lonmin = -12, lonmax = 40,
# output = "lonlat")
# NOTE: This case hasn't been developed since the function to load data
# that will be maintianed will be CST_Start.
################################################################################
\ No newline at end of file
#*******************************************************************************
# Title: Script to modify the dimensions of the 's2dv_cube'
# Author: Eva Rifà Rovira
# Date: 18/01/2024
#*******************************************************************************
# In this example, we will explore different methods to modify the dimensions
# of the 's2dv_cube':
# (1) Changing dimension names
# (2) Adding new dimensions
# (3) Merge 2 dimensions
# (4) Split a dimension
# Needed packages:
library(CSTools)
################################################################################
#-----------------------------------------------------
# Example 1: Change dimension names with CST_ChangeDimNames
#-----------------------------------------------------
# With using this function, we can change the dimension names in all elements
# of the 's2dv_cube' object:
# (1) Check original dimensions and coordinates
lonlat_temp$exp$dims
names(lonlat_temp$exp$coords)
dim(lonlat_temp$exp$attrs$Dates)
# (2) Change 'dataset' to 'dat' and 'ftime' to 'time'
exp <- CST_ChangeDimNames(lonlat_temp$exp,
original_names = c("dataset", "ftime", "lon", "lat"),
new_names = c("dat", "time", "longitude", "latitude"))
# (3) Check new dimensions and coordinates
exp$dims
names(exp$coords)
dim(exp$attrs$Dates)
#-----------------------------------------------------
# Example 2: Insert a new dimension with CST_InsertDim
#-----------------------------------------------------
# With this function, we can add a dimension into the 's2dv_cube'.
# NOTE: When the dimension that we want to add has length greater than 1, the
# values of the data are repeated for that new dimension.
# (1) Check original dimensions and coordinates
lonlat_temp$exp$dims
names(lonlat_temp$exp$coords)
# (2) Add 'variable' dimension
exp <- CST_InsertDim(lonlat_temp$exp,
posdim = 2,
lendim = 2,
name = "variable",
values = c("tas", "tos"))
# (3) Check new dimensions and coordinates
exp$dims
exp$coords$variable
# We see that the values will be repeated along the new dimension:
exp$data[, , 1, 1, 1, 1, 1]
#-----------------------------------------------------
# Example 3: Merge two dimensions with CST_MergeDims
#-----------------------------------------------------
# In this example, we will merge the dimensions corresponding to the latitude
# and the longitude of the data. The new dimension will be named 'grid'.
# (1) Call the function:
new_data <- CST_MergeDims(lonlat_temp$exp, merge_dims = c('lat', 'lon'),
rename_dim = 'grid')
# (2) Check the dimensions of the data:
dim(new_data$data)
# dataset member sdate ftime grid
# 1 15 6 3 1166
# (3) Check the names of the coordinates:
names(new_data$coords)
# [1] "dataset" "member" "sdate" "ftime" "grid"
# (4) Explore the object by printing it in the terminal:
new_data
# NOTE: Be aware that when we print the object, we see that its name in
# "Coordinates" field appears without the asterisk (*) at its left. This means
# that the values of that coordinate, are indices, not the actual values. We
# can also find this information with the attribute "indices":
attributes(new_data$coords$grid)
# $indices
# [1] TRUE
# (5) Now, we want to merge time dimensions start date and forecast time:
new_data <- CST_MergeDims(data = lonlat_temp_st$exp, merge_dims = c('sdate', 'ftime'))
# In this case, the Dates dimensions will be merged too.
# (6) Check the dimensions of Dates:
dim(new_data$attrs$Dates)
# sdate
# 18
# NOTE: When we want to merge temporal and other dimensions nature,
# the Dates dimensions are kept as the original. In this case, the function
# returns a Warning Message, we must pay attention!
new_data <- CST_MergeDims(data = lonlat_temp$exp,
merge_dims = c('lat', 'ftime'),
rename_dim = 'newdim')
#-----------------------------------------------------
# Example 4: Split two dimensions with SplitDim and CST_SplitDim
#-----------------------------------------------------
# In this example, we will start working with the function SplitDim,
# that it can be used to split dimensions of an array.
# NOTE: Take into account that time dimensions will be treated differently than
# other dimensions:
# (1) Decadal example: We define an array of consecutive days of different years:
dates <- seq(as.Date("01-01-2000", "%d-%m-%Y", tz = 'UTC'),
as.Date("31-12-2005","%d-%m-%Y", tz = 'UTC'), "day")
dim(dates) <- c(time = 2192)
# (2) Now, we will split the array in a new 'year' dimension:
dates_year <- SplitDim(dates, indices = dates,
split_dim = 'time', freq = 'year')
dim(dates_year)
# time year
# 366 6
# (3) Now, we can try: freq = 'month' and 'day'
dates_month <- SplitDim(dates, indices = dates,
split_dim = 'time', freq = 'month')
dates_day <- SplitDim(dates, indices = dates,
split_dim = 'time', freq = 'day')
# (4) Finnally, we need to convert them again from numeric to 'POSIXct':
dates_year <- as.POSIXct(dates_year * 24 * 3600, origin = '1970-01-01', tz = 'UTC')
dates_month <- as.POSIXct(dates_month * 24 * 3600, origin = '1970-01-01', tz = 'UTC')
dates_day <- as.POSIXct(dates_day * 24 * 3600, origin = '1970-01-01', tz = 'UTC')
#-----------------------------------------------------
# In the following example, we will use the sample data of the package. We
# will use lonlat_prec_st because it is daily data:
# NOTE: By Jan 2024, a development is needed regarding updates in other fields
# of the 's2dv_cube'
# (1) Call the function CST_SplitDim with adding 'day' dimension:
data_day <- CST_SplitDim(lonlat_prec_st, indices = lonlat_prec_st$attrs$Dates[1, ],
split_dim = 'ftime', freq = 'day')
# (2) Explore the dimensions of the data array
dim(data_day$data)
# dataset var member sdate ftime lat lon day
# 1 1 6 3 1 4 4 31
# (3) Call the function CST_SplitDim with adding 'month' dimension:
data_month <- CST_SplitDim(lonlat_prec_st, indices = lonlat_prec_st$attrs$Dates[1,],
split_dim = 'ftime', freq = 'month')
dim(data_month$data)
# dataset var member sdate ftime lat lon month
# 1 1 6 3 31 4 4 1
# (4) Call the function CST_SplitDim with adding 'year' dimension:
data_year <- CST_SplitDim(lonlat_prec_st, indices = lonlat_prec_st$attrs$Dates[,1],
split_dim = 'sdate', freq = 'year')
dim(data_year$data)
# dataset var member sdate ftime lat lon year
# 1 1 6 1 31 4 4 3
################################################################################
\ No newline at end of file
#*******************************************************************************
# Title: Example script to subset any dimension of an 's2dv_cube'
# Author: Eva Rifà Rovira
# Date: 16/11/2024
#*******************************************************************************
# This example shows how to subset any dimension of an 's2dv_cube'. To do it,
# we will use the function CST_Subset. This function is the 's2dv_cube' method
# version of Subset from the package ClimProjDiags.
# (1) First we will see how Subset works.
# (2) Then, we will use CST_Subset with an 's2dv_cube'
# Needed packages:
library(CSTools)
library(ClimProjDiags)
################################################################################
#-----------------------------------------------------
# Example 1: Subset an example array
#-----------------------------------------------------
# This is a minimal use case about spatial coordinates subset.
# (1) We create the array amd we print it:
dat <- array(1:100, dim = c(lat = 10, lon = 10))
dat
# [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10]
# [1,] 1 11 21 31 41 51 61 71 81 91
# [2,] 2 12 22 32 42 52 62 72 82 92
# [3,] 3 13 23 33 43 53 63 73 83 93
# [4,] 4 14 24 34 44 54 64 74 84 94
# [5,] 5 15 25 35 45 55 65 75 85 95
# [6,] 6 16 26 36 46 56 66 76 86 96
# [7,] 7 17 27 37 47 57 67 77 87 97
# [8,] 8 18 28 38 48 58 68 78 88 98
# [9,] 9 19 29 39 49 59 69 79 89 99
# [10,] 10 20 30 40 50 60 70 80 90 100
# (2) We call the function Subset from ClimProjDiags and we see the result:
dat_subset <- Subset(x = dat, along = c('lat', 'lon'), indices = list(1:5, 1:7),
drop = 'all')
dat_subset
# [,1] [,2] [,3] [,4] [,5] [,6] [,7]
# [1,] 1 11 21 31 41 51 61
# [2,] 2 12 22 32 42 52 62
# [3,] 3 13 23 33 43 53 63
# [4,] 4 14 24 34 44 54 64
# [5,] 5 15 25 35 45 55 65
#-----------------------------------------------------
# Example 2: Subset an 's2dv_cube' using sample data
#-----------------------------------------------------
# In this example we will not drop any dimension, we will select only the first
# member, the first and the second start dates, and also subset the longitude and
# keep only the values from [0, 21]:
# (1) Explore the sample data:
dat <- lonlat_temp_st$exp
dat$dims
# dataset var member sdate ftime lat lon
# 1 1 15 6 3 22 53
dat
# 's2dv_cube'
# Data [ 279.994110107422, 280.337463378906, 279.450866699219, ... ]
# Dimensions ( dataset = 1, var = 1, member = 15, sdate = 6, ftime = 3,
# lat = 22, lon = 53 )
# Coordinates
# * dataset : dat1
# * var : tas
# member : 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15
# * sdate : 20001101, 20011101, 20021101, 20031101, 20041101, 20051101
# ftime : 1, 2, 3
# * lat : 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, ...
# * lon : 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, ...
# Attributes
# Dates : 2000-11-01 2001-11-01 2002-11-01 2003-11-01 2004-11-01 ...
# varName : tas
# metadata :
# lat
# units : degrees_north
# long name : latitude
# lon
# units : degrees_east
# long name : longitude
# ftime
# units : hours since 2000-11-01 00:00:00
# tas
# units : K
# long name : 2 metre temperature
# Datasets : dat1
# when : 2023-10-02 10:11:06
# source_files : /monthly_mean/tas_f6h/tas_20001101.nc ...
# load_parameters :
# ( dat1 ) : dataset = dat1, var = tas, sdate = 20001101 ...
# ...
# (2) Call the function CST_Subset:
dat_subset <- CST_Subset(x = dat, along = c('member', 'sdate', 'lon'),
indices = list(1, 1:2, 1:22), drop = 'none')
# (3) Explore the 's2dv_cube'
dat_subset
# 's2dv_cube'
# Data [ 279.994110107422, 277.161102294922, 278.825836181641, 276.8271484375, 276.052703857422, 276.950805664062, 280.677215576172, 277.285247802734 ... ]
# Dimensions ( dataset = 1, var = 1, member = 1, sdate = 2, ftime = 3, lat = 22, lon = 22 )
# Coordinates
# * dataset : dat1
# * var : tas
# member : 1
# * sdate : 20001101, 20011101
# ftime : 1, 2, 3
# * lat : 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27
# * lon : 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21
# Attributes
# Dates : 2000-11-01 2001-11-01 2000-12-01 2001-12-01 2001-01-01 ...
# varName : tas
# metadata :
# ftime
# units : hours since 2000-11-01 00:00:00
# other : ndims, size, standard_name, calendar
# lat
# units : degrees_north
# long name : latitude
# other : ndims, size, standard_name, axis
# lon
# units : degrees_east
# long name : longitude
# other : ndims, size, standard_name, axis
# tas
# units : K
# long name : 2 metre temperature
# other : prec, dim, unlim, make_missing_value, missval, hasAddOffset, hasScaleFact, code, table
# Datasets : dat1
# when : 2023-10-02 10:11:06
# source_files : /esarchive/exp/ecmwf/system5c3s/monthly_mean/tas_f6h/tas_20001101.nc ...
# load_parameters :
# ( dat1 ) : dataset = dat1, var = tas, sdate = 20001101 ...
# ...
################################################################################
\ No newline at end of file
......@@ -13,6 +13,7 @@ Analogs(
latL = NULL,
expVar = NULL,
obsVar = NULL,
sdate_dim = "sdate",
criteria = "Large_dist",
excludeTime = NULL,
lonVar = NULL,
......@@ -40,11 +41,13 @@ the same latitudinal and longitudinal dimensions as parameter 'expL' and a
single temporal dimension with the maximum number of available observations.}
\item{time_obsL}{A character string indicating the date of the observations
in the format "dd-mm-yyyy". Reference time to search for analogs.}
in the format "dd-mm-yyyy". Reference time to search for analogs. It must
have time dimensions.}
\item{time_expL}{An array of N named dimensions (coinciding with time
dimensions in expL) of character string(s) indicating the date(s) of the
experiment in the format "dd-mm-yyyy". Time(s) to find the analogs.}
experiment in the format "dd-mm-yyyy". Time(s) to find the analogs. If it
is not an scalar it must have named dimensions.}
\item{lonL}{A vector containing the longitude of parameter 'expL'.}
......@@ -58,6 +61,9 @@ function will be the analog of parameter 'expVar'.}
\item{obsVar}{An array of N named dimensions containing the field of the
same variable as the passed in parameter 'expVar' for the same region.}
\item{sdate_dim}{A character string indicating the name of the start date
dimension. By default, it is set to 'sdate'.}
\item{criteria}{A character string indicating the criteria to be used for the
selection of analogs:
\itemize{\item{Large_dist} minimum Euclidean distance in the large scale pattern;
......@@ -137,7 +143,7 @@ the selection of the best analogs in a short number of posibilities, the best
ones. This function has not constrains of specific regions, variables to
downscale, or data to be used (seasonal forecast data, climate projections
data, reanalyses data). The regrid into a finner scale is done interpolating
with CST_Load. Then, this interpolation is corrected selecting the analogs in
with CST_Start. Then, this interpolation is corrected selecting the analogs in
the large and local scale in based of the observations. The function is an
adapted version of the method of Yiou et al 2013.
}
......@@ -148,6 +154,7 @@ dim(expSLP) <- c(lat = 4, lon = 5)
obsSLP <- c(rnorm(1:180), expSLP * 1.2)
dim(obsSLP) <- c(time = 10, lat = 4, lon = 5)
time_obsSLP <- paste(rep("01", 10), rep("01", 10), 1994 : 2003, sep = "-")
dim(time_obsSLP) <- c(time = 10)
downscale_field <- Analogs(expL = expSLP, obsL = obsSLP,
time_obsL = time_obsSLP,time_expL = "01-01-1994")
......
......@@ -60,9 +60,9 @@ standard deviation and mean to that of the reference dataset.
}
\examples{
mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7)
dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7)
dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, time = 5, lat = 6, lon = 7)
obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7)
dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7)
dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, time = 5, lat = 6, lon = 7)
a <- BiasCorrection(exp = mod1, obs = obs1)
}
\references{
......
......@@ -9,6 +9,7 @@ CST_Analogs(
obsL,
expVar = NULL,
obsVar = NULL,
sdate_dim = "sdate",
region = NULL,
criteria = "Large_dist",
excludeTime = NULL,
......@@ -42,6 +43,9 @@ analog of parameter 'expVar'.}
\item{obsVar}{An 's2dv_cube' containing the field of the same variable as the
passed in parameter 'expVar' for the same region.}
\item{sdate_dim}{A character string indicating the name of the start date
dimension. By default, it is set to 'sdate'.}
\item{region}{A vector of length four indicating the minimum longitude,
the maximum longitude, the minimum latitude and the maximum latitude.}
......@@ -69,7 +73,8 @@ and dates are taken from element \code{$attrs$Dates} from expL.}
\item{time_obsL}{A character string indicating the date of the observations
in the date format (i.e. "yyyy-mm-dd"). By default it is NULL and dates are
taken from element \code{$attrs$Dates} from obsL.}
taken from element \code{$attrs$Dates} from obsL. It must have time
dimensions.}
\item{nAnalogs}{Number of Analogs to be selected to apply the criterias
'Local_dist' or 'Local_cor'. This is not the necessary the number of analogs
......@@ -119,7 +124,7 @@ analogs.
This function has not constrains of specific regions, variables to downscale,
or data to be used (seasonal forecast data, climate projections data,
reanalyses data). The regrid into a finner scale is done interpolating with
CST_Load. Then, this interpolation is corrected selecting the analogs in the
CST_Start. Then, this interpolation is corrected selecting the analogs in the
large and local scale in based of the observations. The function is an
adapted version of the method of Yiou et al 2013. For an advanced search of
Analogs (multiple Analogs, different criterias, further information from the
......@@ -135,6 +140,7 @@ time_obsL <- as.POSIXct(paste(rep("01", 10), rep("01", 10), 1994:2003, sep = "-"
format = "\%d-\%m-\%y")
dim(time_obsL) <- c(time = 10)
time_expL <- time_obsL[1]
dim(time_expL) <- c(time = 1)
lon <- seq(-1, 5, 1.5)
lat <- seq(30, 35, 1.5)
coords <- list(lon = seq(-1, 5, 1.5), lat = seq(30, 35, 1.5))
......@@ -155,8 +161,7 @@ from surface pressure using analogues. Clim. Dyn., 41, 1419-1437.
\email{pascal.yiou@lsce.ipsl.fr}
}
\seealso{
\code{\link{CST_Load}}, \code{\link[s2dv]{Load}} and
\code{\link[s2dv]{CDORemap}}
\code{\link{CST_Start}}, \code{\link[startR]{Start}}
}
\author{
M. Carmen Alvarez-Castro, \email{carmen.alvarez-castro@cmcc.it}
......
......@@ -19,11 +19,11 @@ CST_Anomaly(
)
}
\arguments{
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function, containing the seasonal forecast experiment data in the element
named \code{$data}.}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function, containing the observed data in the element named \code{$data}.}
\item{dim_anom}{A character string indicating the name of the dimension
......@@ -86,7 +86,7 @@ anom <- CST_Anomaly(exp = exp, obs = obs, cross = FALSE, memb = TRUE)
}
\seealso{
\code{\link[s2dv]{Ano_CrossValid}}, \code{\link[s2dv]{Clim}} and
\code{\link{CST_Load}}
\code{\link{CST_Start}}
}
\author{
Perez-Zanon Nuria, \email{nuria.perez@bsc.es}
......
......@@ -16,16 +16,16 @@ CST_BiasCorrection(
)
}
\arguments{
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function, containing the seasonal forecast experiment data in the element
named \code{$data} with at least time and member dimensions.}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function, containing the observed data in the element named \code{$data}
with at least time dimension.}
\item{exp_cor}{An object of class \code{s2dv_cube} as returned by
\code{CST_Load} function, containing the seasonal forecast experiment to be
\code{CST_Start} function, containing the seasonal forecast experiment to be
corrected with at least time dimension. If it is NULL, the 'exp' forecast
will be corrected. If there is only one corrected dataset, it should not
have dataset dimension. If there is a corresponding corrected dataset for
......@@ -63,9 +63,9 @@ standard deviation and mean to that of the reference dataset.
}
\examples{
mod1 <- 1 : (1 * 3 * 4 * 5 * 6 * 7)
dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, ftime = 5, lat = 6, lon = 7)
dim(mod1) <- c(dataset = 1, member = 3, sdate = 4, time = 5, lat = 6, lon = 7)
obs1 <- 1 : (1 * 1 * 4 * 5 * 6 * 7)
dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, ftime = 5, lat = 6, lon = 7)
dim(obs1) <- c(dataset = 1, member = 1, sdate = 4, time = 5, lat = 6, lon = 7)
lon <- seq(0, 30, 5)
lat <- seq(0, 25, 5)
coords <- list(lat = lat, lon = lon)
......
......@@ -22,18 +22,18 @@ CST_Calibration(
)
}
\arguments{
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{exp}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function with at least 'sdate' and 'member' dimensions, containing the
seasonal hindcast experiment data in the element named \code{data}. The
hindcast is used to calibrate the forecast in case the forecast is provided;
if not, the same hindcast will be calibrated instead.}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Load}
\item{obs}{An object of class \code{s2dv_cube} as returned by \code{CST_Start}
function with at least 'sdate' dimension, containing the observed data in
the element named \code{$data}.}
\item{exp_cor}{An optional object of class \code{s2dv_cube} as returned by
\code{CST_Load} function with at least 'sdate' and 'member' dimensions,
\code{CST_Start} function with at least 'sdate' and 'member' dimensions,
containing the seasonal forecast experiment data in the element named
\code{data}. If the forecast is provided, it will be calibrated using the
hindcast and observations; if not, the hindcast will be calibrated instead.
......@@ -181,7 +181,7 @@ Quarterly Journal of the Royal Meteorological Society, 141(688), 807-818.
\doi{10.1002/qj.2397}
}
\seealso{
\code{\link{CST_Load}}
\code{\link{CST_Start}}
}
\author{
Verónica Torralba, \email{veronica.torralba@bsc.es}
......
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/CST_ChangeDimNames.R
\name{CST_ChangeDimNames}
\alias{CST_ChangeDimNames}
\title{Change the name of one or more dimensions for an object of class s2dv_cube}
\usage{
CST_ChangeDimNames(data, original_names, new_names)
}
\arguments{
\item{data}{An object of class \code{s2dv_cube} whose dimension names
should be changed.}
\item{original_names}{A single character string or a vector indicating the
dimensions to be renamed.}
\item{new_names}{A single character string or a vector indicating the new
dimension names, in the same order as the dimensions in 'original_names'.}
}
\value{
An object of class \code{s2dv_cube} with similar data, coordinates and
attributes as the \code{data} input, but with modified dimension names.
}
\description{
Change the names of the dimensions specified in 'original_names' to the names
in 'new_names'. The coordinate names and the dimensions of any attributes
are also modified accordingly.
}
\examples{
# Example with sample data:
# Check original dimensions and coordinates
lonlat_temp$exp$dims
names(lonlat_temp$exp$coords)
dim(lonlat_temp$exp$attrs$Dates)
# Change 'dataset' to 'dat' and 'ftime' to 'time'
exp <- CST_ChangeDimNames(lonlat_temp$exp,
original_names = c("dataset", "ftime"),
new_names = c("dat", "time"))
# Check new dimensions and coordinates
exp$dims
names(exp$coords)
dim(exp$attrs$Dates)
}
\author{
Agudetse Roures Victoria, \email{victoria.agudetse@bsc.es}
}
This diff is collapsed.
......@@ -10,14 +10,17 @@ CST_SplitDim(
indices = NULL,
freq = "monthly",
new_dim_name = NULL,
insert_ftime = NULL
insert_ftime = NULL,
ftime_dim = "time",
sdate_dim = "sdate",
return_indices = FALSE
)
}
\arguments{
\item{data}{A 's2dv_cube' object}
\item{split_dim}{A character string indicating the name of the dimension to
split.}
split. It is set as 'time' by default.}
\item{indices}{A vector of numeric indices or dates. If left at NULL, the
dates provided in the s2dv_cube object (element Dates) will be used.}
......@@ -32,6 +35,15 @@ dimension.}
\item{insert_ftime}{An integer indicating the number of time steps to add at
the begining of the time series.}
\item{ftime_dim}{A character string indicating the name of the forecast time
dimension. It is set as 'time' by default.}
\item{sdate_dim}{A character string indicating the name of the start date
dimension. It is set as 'sdate' by default.}
\item{return_indices}{A logical value that if it is TRUE, the indices
used in splitting the dimension will be returned. It is FALSE by default.}
}
\description{
This function split a dimension in two. The user can select the
......
......@@ -21,7 +21,10 @@ multidimensional distributed data sets. Then, the output is transformed into
}
\details{
It receives any number of parameters (`...`) that are automatically forwarded
to the `startR::Start` function. See details in `?startR::Start`.
to the `startR::Start` function. See details in `?startR::Start`. The
auxiliary functions used to define dimensions need to be called within the
startR namespace (e.g. startR::indices(), startR::values(), startR::Sort(),
startR::CircularSort(), startR::CDORemapper(), ...).
}
\examples{
\dontrun{
......
......@@ -151,7 +151,7 @@ Quarterly Journal of the Royal Meteorological Society, 141(688), 807-818.
\doi{10.1002/qj.2397}
}
\seealso{
\code{\link{CST_Load}}
\code{\link{CST_Start}}
}
\author{
Verónica Torralba, \email{veronica.torralba@bsc.es}
......
......@@ -73,6 +73,36 @@ colour bar will be automatically interpolated to match the number of breaks.
Each item in this list can be named, and the name will be used as title for
the corresponding colour bar (equivalent to the parameter 'bar_titles').}
\item{bar_limits}{Parameter from s2dv::ColorBar. Vector of two numeric values
with the extremes of the range of values represented in the colour bar. If
'var_limits' go beyond this interval, the drawing of triangle extremes is
triggered at the corresponding sides, painted in 'col_inf' and 'col_sup'.
Either of them can be set as NA and will then take as value the
corresponding extreme in 'var_limits' (hence a triangle end won't be
triggered for these sides). Takes as default the extremes of 'brks' if
available, else the same values as 'var_limits'.}
\item{triangle_ends}{Parameter from s2dv::ColorBar. Vector of two logical
elements, indicating whether to force the drawing of triangle ends at each
of the extremes of the colour bar. This choice is automatically made from
the provided 'brks', 'bar_limits', 'var_limits', 'col_inf' and 'col_sup',
but the behaviour can be manually forced to draw or not to draw the triangle
ends with this parameter. If 'cols' is provided, 'col_inf' and 'col_sup'
will take priority over 'triangle_ends' when deciding whether to draw the
triangle ends or not.}
\item{col_inf}{Parameter from s2dv::ColorBar. Colour to fill the inferior
triangle end with. Useful if specifying colours manually with parameter
'cols', to specify the colour and to trigger the drawing of the lower
extreme triangle, or if 'cols' is not specified, to replace the colour
automatically generated by ColorBar().}
\item{col_sup}{Parameter from s2dv::ColorBar. Colour to fill the superior
triangle end with. Useful if specifying colours manually with parameter
'cols', to specify the colour and to trigger the drawing of the upper
extreme triangle, or if 'cols' is not specified, to replace the colour
automatically generated by ColorBar().}
\item{col_unknown_map}{Colour to use to paint the grid cells for which a map
is not possible to be chosen according to 'map_select_fun' or for those
values that go beyond 'display_range'. Takes the value 'white' by default.}
......@@ -106,6 +136,10 @@ following order: bottom, left, top, and right. If not specified, use the
default of par("mar"), c(5.1, 4.1, 4.1, 2.1). Used as 'margin_scale' in
s2dv::PlotEquiMap.}
\item{bar_extra_margin}{Parameter from s2dv::ColorBar. Extra margins to be
added around the colour bar, in the format c(y1, x1, y2, x2). The units are
margin lines. Takes rep(0, 4) by default.}
\item{fileout}{File where to save the plot. If not specified (default) a
graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, bmp
and tiff}
......
......@@ -70,5 +70,5 @@ dim(pdf_2) <- c(statistic = 2)
PlotPDFsOLE(pdf_1, pdf_2)
}
\author{
Eroteida Sanchez-Garcia - AEMET, //email{esanchezg@aemet.es}
Eroteida Sanchez-Garcia - AEMET, \email{esanchezg@aemet.es}
}
This diff is collapsed.