diff --git a/DESCRIPTION b/DESCRIPTION index d393462c66b5762a6017af53646cc2ca16c9a689..bde7bca03c19468236c24e7a21984aff0eb7476e 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,19 +1,25 @@ Package: s2dverification Title: Set of Common Tools for Forecast Verification -Version: 2.7.3 +Version: 2.8.0 Authors@R: c( person("Virginie", "Guemas", , "virginie.guemas@bsc.es", role = "aut"), person("Nicolau", "Manubens", , "nicolau.manubens@bsc.es", role = c("aut", "cre")), person("Javier", "Garcia-Serrano", , "javier.garcia@bsc.es", role = "aut"), person("Neven", "Fuckar", , "neven.fuckar@bsc.es", role = "aut"), + person("Luis", "Rodrigues", , "lrodrigues@ic3.cat", role = "aut"), person("Louis-Philippe", "Caron", , "louis-philippe.caron@bsc.es", role = "aut"), person("Omar", "Bellprat", , "omar.bellprat@bsc.es", role = "aut"), person("Veronica", "Torralba", , "veronica.torralba@bsc.es", role = "aut"), + person("Alasdair", "Hunter", , "alasdair.hunter@bsc.es", role = "aut"), person("Chloe", "Prodhomme", , "chloe.prodhomme@bs.es", role = "aut"), + person("Constantin", "Ardilouze", , "constantin.ardilouze@meteo.fr", role = "aut"), + person("Lauriane", "Batte", , "lauriane.batte@meteo.fr", role = "aut"), person("Martin", "Menegoz", , "martin.menegoz@bsc.es", role = "aut"), person("Fabian", "Lienert", , "fabian.lienert@bsc.es", role = "aut"), + person("Nube", "Gonzalez", , "nube.gonzalez@bsc.es", role = "aut"), person("Ludovic", "Auger", , "ludovic.auger@meteo.fr", role = "aut"), - person("Isabel", "Andreu-Burillo", , "isabel.andreu.burillo@ic3.cat", role = "aut")) + person("Isabel", "Andreu-Burillo", , "isabel.andreu.burillo@ic3.cat", role = "aut"), + person("Ramiro", "Saurral", , "ramiro.saurral@ic3.cat", role = "aut")) Description: Set of tools to verify forecasts through the computation of typical prediction scores against one or more observational datasets or reanalyses (a reanalysis being a physical extrapolation of observations that relies on the equations from a model, not a pure observational dataset). Intended for seasonal to decadal climate forecasts although can be useful to verify other kinds of forecasts. The package can be helpful in climate sciences for other purposes than forecasting. Depends: R (>= 2.14.1), @@ -32,7 +38,7 @@ Imports: plyr Suggests: easyVerification -License: GPL-3 +License: LGPL-3 URL: https://earth.bsc.es/gitlab/es/s2dverification/wikis/home BugReports: https://earth.bsc.es/gitlab/es/s2dverification/issues LazyData: true diff --git a/R/AnimateMap.R b/R/AnimateMap.R index 86bacf35fd13322d1df124499e912eb757f85ff8..86ca0bffe84e764ac25a47d1b475dac67d05462a 100644 --- a/R/AnimateMap.R +++ b/R/AnimateMap.R @@ -1,10 +1,12 @@ -AnimateMap <- function(var, lon, lat, toptitle = c("", "", "", - "", "", "", "", "", "", "", ""), sizetit = 1, units = "", - monini = 1, freq = 12, msk95lev = FALSE, brks = NULL, cols = NULL, - filled.continents = FALSE, lonmin = 0, lonmax = 360, latmin = -90, - latmax = 90, intlon = 20, intlat = 30, drawleg = TRUE, subsampleg = 1, - colNA = "white", equi = TRUE, fileout = c("output1_animvsltime.gif", - "output2_animvsltime.gif", "output3_animvsltime.gif"), ...) { +AnimateMap <- function(var, lon, lat, toptitle = rep("", 11), sizetit = 1, + units = "", monini = 1, freq = 12, msk95lev = FALSE, + brks = NULL, cols = NULL, filled.continents = FALSE, + lonmin = 0, lonmax = 360, latmin = -90, latmax = 90, + intlon = 20, intlat = 30, drawleg = TRUE, + subsampleg = 1, colNA = "white", equi = TRUE, + fileout = c("output1_animvsltime.gif", + "output2_animvsltime.gif", + "output3_animvsltime.gif"), ...) { # Process the user graphical parameters that may be passed in the call ## Graphical parameters to exclude excludedArgs <- c("bg", "col", "fin", "lab", "lend", "new", "pin", "ps") diff --git a/R/PlotAno.R b/R/PlotAno.R index b9a71534bea40d8ec8ebcef33ab13aaf88e05fd4..a302e00132566a301ab2c8a870967cd31b3da60d 100644 --- a/R/PlotAno.R +++ b/R/PlotAno.R @@ -1,14 +1,9 @@ -PlotAno <- function(exp_ano, obs_ano = NULL, sdates, - toptitle = c('', '', '', '', '', '', '', '', '', '', '', '', - '', '', ''), - ytitle = c('', '', '', '', '', '', '', '', '', '', '', '', - '', '', ''), - limits = NULL, legends = NULL, freq = 12, biglab = FALSE, - fill = TRUE, memb = TRUE, ensmean = TRUE, linezero = FALSE, - points = FALSE, vlines = NULL, sizetit = 1, - fileout = c('output1_plotano.eps', - 'output2_plotano.eps', 'output3_plotano.eps', - 'output4_plotano.eps', 'output5_plotano.eps'), +PlotAno <- function(exp_ano, obs_ano = NULL, sdates, toptitle = rep('', 15), + ytitle = rep('', 15), limits = NULL, legends = NULL, + freq = 12, biglab = FALSE, fill = TRUE, memb = TRUE, + ensmean = TRUE, linezero = FALSE, points = FALSE, + vlines = NULL, sizetit = 1, + fileout = paste0('output', 1:5, '_plotano.eps'), width = 8, height = 5, size_units = 'in', res = 100, ...) { # Process the user graphical parameters that may be passed in the call ## Graphical parameters to exclude diff --git a/R/Utils.R b/R/Utils.R index f958ecd10a854d4676a4b145ba4b040fa57f7d23..d5e3b1495521875527f58d21c1ad074f799f4811 100644 --- a/R/Utils.R +++ b/R/Utils.R @@ -330,13 +330,24 @@ } cat(paste0("! Warning: the dataset with index ", tail(work_piece[['indices']], 1), " in '", - work_piece[['dataset_type']], "' doesn't start at longitude 0 and will be re-interpolated in order to align its longitudes with the standard CDO grids definable with the names 'tgrid' or 'rx', which are by definition starting at the longitude 0.")) + work_piece[['dataset_type']], "' doesn't start at longitude 0 and will be re-interpolated in order to align its longitudes with the standard CDO grids definable with the names 'tgrid' or 'rx', which are by definition starting at the longitude 0.\n")) if (!is.null(mask)) { cat(paste0("! Warning: a mask was provided for the dataset with index ", tail(work_piece[['indices']], 1), " in '", - work_piece[['dataset_type']], "'. This dataset has been re-interpolated to align its longitudes to start at 0. You must re-interpolate the corresponding mask to align its longitudes to start at 0 as well, if you haven't done so yet. Running cdo remapcon,", common_grid_name, " original_mask_file.nc new_mask_file.nc will fix it.")) + work_piece[['dataset_type']], "'. This dataset has been re-interpolated to align its longitudes to start at 0. You must re-interpolate the corresponding mask to align its longitudes to start at 0 as well, if you haven't done so yet. Running cdo remapcon,", common_grid_name, " original_mask_file.nc new_mask_file.nc will fix it.\n")) } } + if (remap_needed && (grid_lons < common_grid_lons || grid_lats < common_grid_lats)) { + if (!is.null(work_piece[['progress_amount']])) { + cat("\n") + } + cat(paste0("! Warning: the dataset with index ", tail(work_piece[['indices']], 1), + " in '", work_piece[['dataset_type']], "' is originally on ", + "a grid coarser than the common grid and it has been ", + "extrapolated. Check the results carefully. It is ", + "recommended to specify as common grid the coarsest grid ", + "among all requested datasets via the parameter 'grid'.\n")) + } # Now calculate if the user requests for a lonlat subset or for the # entire field lonmin <- work_piece[['lon_limits']][1] diff --git a/man/AnimateMap.Rd b/man/AnimateMap.Rd index ed30e0190eb509dd5a24d45fdadc1386ecd53cd6..3376a68521e14f85ac2ef3e7f8817bc521b72f36 100644 --- a/man/AnimateMap.Rd +++ b/man/AnimateMap.Rd @@ -10,9 +10,8 @@ along the forecast time (lead-time) for all input experiments and input observational datasets. } \usage{ -AnimateMap(var, lon, lat, toptitle = c("", "", "", "", "", "", "", "", "", - "", ""), sizetit = 1, units = "", monini = 1, freq = 12, - msk95lev = FALSE, brks = NULL, cols = NULL, +AnimateMap(var, lon, lat, toptitle = rep("", 11), sizetit = 1, units = "", + monini = 1, freq = 12, msk95lev = FALSE, brks = NULL, cols = NULL, filled.continents = FALSE, lonmin = 0, lonmax = 360, latmin = -90, latmax = 90, intlon = 20, intlat = 30, drawleg = TRUE, subsampleg = 1, colNA='white', equi = TRUE, @@ -122,7 +121,6 @@ For more information about the parameters see `par`. } \details{ Examples of input: ------------------- 1- Outputs from clim (exp, obs, memb = FALSE): (nmod, nltime, nlat, nlon) @@ -208,9 +206,9 @@ AnimateMap(rms, sampleData$lon, sampleData$lat, toptitle = fileout = 'rmse_dec.gif') } \author{ -History: -1.0 - 2012-04 (V. Guemas, \email{virginie.guemas at bsc.es}) - Original code -1.1 - 2014-04 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Formatting to CRAN -1.2 - 2015-05 (V. Guemas, \email{virginie.guemas at bsc.es}) - Use of PlotEquiMap and PlotStereoMap +History:\cr +1.0 - 2012-04 (V. Guemas, \email{virginie.guemas at bsc.es}) - Original code\cr +1.1 - 2014-04 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Formatting to CRAN\cr +1.2 - 2015-05 (V. Guemas, \email{virginie.guemas at bsc.es}) - Use of PlotEquiMap and PlotStereoMap\cr } \keyword{dynamic} diff --git a/man/ArrayToNetCDF.Rd b/man/ArrayToNetCDF.Rd index 9e0188b46c67ad080faa8baad633b61d12788bb0..b857eaf2396b05aa9f0dcd26eea5dbc249bf520e 100644 --- a/man/ArrayToNetCDF.Rd +++ b/man/ArrayToNetCDF.Rd @@ -4,9 +4,14 @@ \description{This function takes as input one or a list of multidimensional R arrays and stores them in a NetCDF file, using the \code{ncdf4} package. The full path and name of the resulting file must be specified. Metadata can be attached to the arrays and propagated into the NetCDF file in 3 possible ways:\cr \itemize{ \item{Via the list names if a list of arrays is provided:}{Each name in the input list, corresponding to one multidimensional array, will be interpreted as the name of the variable it contains.\cr -E.g: \code{ArrayToNetCDF(arrays = list(temperature = array(1:9, c(3, 3))), file_path = 'example.nc')}} +E.g:\cr + \code{ +ArrayToNetCDF(arrays = list(temperature = array(1:9, c(3, 3))), + file_path = 'example.nc') + } + } \item{Via the dimension names of each provided array:}{The dimension names of each of the provided arrays will be interpreted as names for the dimensions of the NetCDF files. Read further for special dimension names that will trigger special behaviours, such as 'time' and 'var'.\cr -E.g: +E.g:\cr \code{ temperature <- array(rnorm(10 * 50 * 100), dim = c(10, 50, 100)) names(dim(temperature)) <- c('time', 'latitude', 'longitude') @@ -14,7 +19,7 @@ ArrayToNetCDF(list(temperature = temperature), file_path = 'example.nc') } } \item{Via the attribute 'variables' of each provided array:}{The arrays can be provided with metadata in an attribute named 'variables', which is expected to be a named list of named lists, where the names of the container list are the names of the variables present in the provided array, and where each sub-list contains metadata for each of the variables. The attribute names and values supported in the sub-lists must follow the same format the package \code{ncdf4} uses to represent the NetCDF file headers.\cr -E.g: +E.g:\cr \code{ a <- array(1:400, dim = c(5, 10, 4, 2)) metadata <- list( diff --git a/man/CDORemap.Rd b/man/CDORemap.Rd index ef6e812ab0a9738739bebd52641b22b339c87a6c..7960d7bbf5395f06d921f9465f55c9e7aeb59365 100644 --- a/man/CDORemap.Rd +++ b/man/CDORemap.Rd @@ -61,7 +61,8 @@ tas2 <- CDORemap(tas, lon, lat, 't170grid', 'bil', TRUE) # Arrays of any number of dimensions in any order can be provided. num_lats <- 25 num_lons <- 50 -tas <- array(1:(10*num_lats*10*num_lons*10), dim = c(10, num_lats, 10, num_lons, 10)) +tas <- array(1:(10*num_lats*10*num_lons*10), + dim = c(10, num_lats, 10, num_lons, 10)) names(dim(tas)) <- c('a', 'lat', 'b', 'lon', 'c') lon <- seq(0, 360 - 360/num_lons, length.out = num_lons) metadata <- list(lon = list(units = 'degrees_east')) @@ -88,7 +89,8 @@ tas2 <- CDORemap(tas, lon, lat, 't17grid', 'bil', FALSE) # even if avoid_wrties = FALSE. num_lats <- 25 num_lons <- 50 -tas <- array(1:(10*num_lats*10*num_lons*10), dim = c(10, num_lats, 10, num_lons)) +tas <- array(1:(10*num_lats*10*num_lons*10), + dim = c(10, num_lats, 10, num_lons)) names(dim(tas)) <- c('a', 'lat', 'b', 'lon') lon <- seq(0, 360 - 360/num_lons, length.out = num_lons) metadata <- list(lon = list(units = 'degrees_east')) @@ -112,11 +114,13 @@ num_lats <- 25 num_lons <- 50 tas <- array(1:(1*num_lats*num_lons), dim = c(num_lats, num_lons)) names(dim(tas)) <- c('y', 'x') -lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), dim = c(num_lons, num_lats)) +lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), + dim = c(num_lons, num_lats)) metadata <- list(lon = list(units = 'degrees_east')) names(dim(lon)) <- c('x', 'y') attr(lon, 'variables') <- metadata -lat <- t(array(seq(-90, 90, length.out = num_lats), dim = c(num_lats, num_lons))) +lat <- t(array(seq(-90, 90, length.out = num_lats), + dim = c(num_lats, num_lons))) metadata <- list(lat = list(units = 'degrees_north')) names(dim(lat)) <- c('x', 'y') attr(lat, 'variables') <- metadata @@ -125,13 +129,16 @@ tas2 <- CDORemap(tas, lon, lat, 'r100x50', 'bil') # An example of an interpolation from an irregular grid onto a gaussian grid num_lats <- 25 num_lons <- 50 -tas <- array(1:(10*num_lats*10*num_lons*10), dim = c(10, num_lats, 10, num_lons)) +tas <- array(1:(10*num_lats*10*num_lons*10), + dim = c(10, num_lats, 10, num_lons)) names(dim(tas)) <- c('a', 'j', 'b', 'i') -lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), dim = c(num_lons, num_lats)) +lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), + dim = c(num_lons, num_lats)) metadata <- list(lon = list(units = 'degrees_east')) names(dim(lon)) <- c('i', 'j') attr(lon, 'variables') <- metadata -lat <- t(array(seq(-90, 90, length.out = num_lats), dim = c(num_lats, num_lons))) +lat <- t(array(seq(-90, 90, length.out = num_lats), + dim = c(num_lats, num_lons))) metadata <- list(lat = list(units = 'degrees_north')) names(dim(lat)) <- c('i', 'j') attr(lat, 'variables') <- metadata @@ -140,11 +147,14 @@ tas2 <- CDORemap(tas, lon, lat, 't17grid', 'bil') # Again, the dimensions can be in any order num_lats <- 25 num_lons <- 50 -tas <- array(1:(10*num_lats*10*num_lons), dim = c(10, num_lats, 10, num_lons)) +tas <- array(1:(10*num_lats*10*num_lons), + dim = c(10, num_lats, 10, num_lons)) names(dim(tas)) <- c('a', 'j', 'b', 'i') -lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), dim = c(num_lons, num_lats)) +lon <- array(seq(0, 360 - 360/num_lons, length.out = num_lons), + dim = c(num_lons, num_lats)) names(dim(lon)) <- c('i', 'j') -lat <- t(array(seq(-90, 90, length.out = num_lats), dim = c(num_lats, num_lons))) +lat <- t(array(seq(-90, 90, length.out = num_lats), + dim = c(num_lats, num_lons))) names(dim(lat)) <- c('i', 'j') tas2 <- CDORemap(tas, lon, lat, 't17grid', 'bil') tas2 <- CDORemap(tas, lon, lat, 't17grid', 'bil', FALSE) @@ -152,6 +162,7 @@ tas2 <- CDORemap(tas, lon, lat, 't17grid', 'bil', FALSE) tas2 <- CDORemap(tas, lon, lat, 'external_file.nc', 'bil') } } +\note{This function was tested with CDO v.1.6.3.} \author{ History:\cr 0.0 - 2017-01 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Original code. diff --git a/man/Clim.Rd b/man/Clim.Rd index a4225653bd63a7ab384c5f95d24beb03607bcb20..fba9d492d79879d715fcd8981e00259af2356e94 100644 --- a/man/Clim.Rd +++ b/man/Clim.Rd @@ -3,9 +3,10 @@ \title{Computes Bias Corrected Climatologies} \description{ This function computes only per-pair climatologies from the experimental and observational matrices output from \code{Load()}. -To compute plain climatologies from only experimental or observational data from \code{Load()}, the following code can be used: +To compute plain climatologies from only experimental or observational data from \code{Load()}, the following code can be used:\cr \code{ -clim <- array(apply(obs_data, c(1, 4, 5, 6), mean), dim = dim(obs_datta)[-c(2, 3)]) +clim <- array(apply(obs_data, c(1, 4, 5, 6), mean), + dim = dim(obs_datta)[-c(2, 3)]) } The function \code{Clim()} computes per-pair climatologies using one of the following methods: \itemize{ diff --git a/man/Load.Rd b/man/Load.Rd index dfd61331609d7ec7f582db192efa2c82a4496aed..32e02ada994f8116c5a46f52869fdd73af9d0ad4 100644 --- a/man/Load.Rd +++ b/man/Load.Rd @@ -340,9 +340,12 @@ The default value is 2. In some cases, when specifying a path pattern (either in the parameters 'exp'/'obs' or in a configuration file) one can specify path patterns that contain shell globbing expressions. Too much freedom in putting globbing expressions in the path patterns can be dangerous and make \code{Load()} find a file in the file system for a start date for a dataset that really does not belong to that dataset. For example, if the file system contains two directories for two different experiments that share a part of their path and the path pattern contains globbing expressions: /experiments/model1/expA/monthly_mean/tos/tos_19901101.nc /experiments/model2/expA/monthly_mean/tos/tos_19951101.nc -And the path pattern is used as in the example right below to load data of only the experiment 'expA' of the model 'model1' for the starting dates '19901101' and '19951101', \code{Load()} will undesiredly yield data for both starting dates, even if in fact there is data only for the first one: - expA <- list(path = '/experiments/*/expA/monthly_mean/$VAR_NAME$/$VAR_NAME$_$START_DATE$.nc') - data <- Load('tos', list(expA), NULL, c('19901101', '19951101')) +And the path pattern is used as in the example right below to load data of only the experiment 'expA' of the model 'model1' for the starting dates '19901101' and '19951101', \code{Load()} will undesiredly yield data for both starting dates, even if in fact there is data only for the first one:\cr + \code{ +expA <- list(path = file.path('/experiments/*/expA/monthly_mean/$VAR_NAME$', + '$VAR_NAME$_$START_DATE$.nc') +data <- Load('tos', list(expA), NULL, c('19901101', '19951101')) + } To avoid these situations, the parameter \code{path_glob_permissive} is set by default to \code{'partial'}, which forces \code{Load()} to replace all the globbing expressions of a path pattern of a data set by fixed values taken from the path of the first found file for each data set, up to the folder right before the final files (globbing expressions in the file name will not be replaced, only those in the path to the file). Replacement of globbing expressions in the file name can also be triggered by setting \code{path_glob_permissive} to \code{FALSE} or \code{'no'}. If needed to keep all globbing expressions, \code{path_glob_permissive} can be set to \code{TRUE} or \code{'yes'}. } } @@ -411,10 +414,10 @@ And the following attributes: } \author{ History:\cr -0.1 - 2011-03 (V. Guemas, \email{virginie.guemas at ic3.cat}) - Original code\cr -1.0 - 2013-09 (N. Manubens, \email{nicolau.manubens at ic3.cat}) - Formatting to CRAN\cr -1.2 - 2015-02 (N. Manubens, \email{nicolau.manubens at ic3.cat}) - Generalisation + parallelisation\cr -1.3 - 2015-07 (N. Manubens, \email{nicolau.manubens at ic3.cat}) - Improvements related to configuration file mechanism\cr +0.1 - 2011-03 (V. Guemas, \email{virginie.guemas at bsc.es}) - Original code\cr +1.0 - 2013-09 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Formatting to CRAN\cr +1.2 - 2015-02 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Generalisation + parallelisation\cr +1.3 - 2015-07 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Improvements related to configuration file mechanism\cr 1.4 - 2016-01 (N. Manubens, \email{nicolau.manubens at bsc.es}) - Added subsetting capabilities\cr } \examples{ diff --git a/man/PlotAno.Rd b/man/PlotAno.Rd index c0185e41b0b9c4fec168adf252041cb6d883c5bb..f36ee3bba1a93cd5ee55107899d5e024c7d575e2 100644 --- a/man/PlotAno.Rd +++ b/man/PlotAno.Rd @@ -7,13 +7,11 @@ Plot Raw Or Smoothed Anomalies Plots timeseries of raw or smoothed anomalies of any variable output from \code{Load()} or \code{Ano()} or or \code{Ano_CrossValid()} or \code{Smoothing()}. } \usage{ -PlotAno(exp_ano, obs_ano = NULL, sdates, toptitle = c("", "", "", "", "", "", - "", "", "", "", "", "", "", "", ""), ytitle = c("", "", "", "", "", - "", "", "", "", "", "", "", "", "", ""), limits = NULL, - legends = NULL, freq = 12, biglab = FALSE, fill = TRUE, memb = TRUE, - ensmean = TRUE, linezero = FALSE, points = FALSE, vlines = NULL, - sizetit = 1, fileout = c("output1_plotano.eps", "output2_plotano.eps", - "output3_plotano.eps", "output4_plotano.eps", "output5_plotano.eps"), +PlotAno(exp_ano, obs_ano = NULL, sdates, toptitle = rep("", 15), + ytitle = rep("", 15), limits = NULL, legends = NULL, freq = 12, + biglab = FALSE, fill = TRUE, memb = TRUE, ensmean = TRUE, + linezero = FALSE, points = FALSE, vlines = NULL, sizetit = 1, + fileout = paste0("output", 1:5, "_plotano.eps"), width = 8, height = 5, size_units = 'in', res = 100, ...) } \arguments{ diff --git a/man/PlotLayout.Rd b/man/PlotLayout.Rd index c36e7fdb041462de23bf289cf18c821ded8ad264..9e0c2cb7937db03122e02a89a83365f2cc0bb54d 100644 --- a/man/PlotLayout.Rd +++ b/man/PlotLayout.Rd @@ -167,7 +167,8 @@ sampleData <- s2dverification:::.LoadSampleData('tos', c('experiment'), latmin = 27, latmax = 48, lonmin = -12, lonmax = 40) } -PlotLayout(PlotEquiMap, c('lat', 'lon'), sampleData$mod[1, , 1, 1, , ], sampleData$lon, sampleData$lat, +PlotLayout(PlotEquiMap, c('lat', 'lon'), sampleData$mod[1, , 1, 1, , ], + sampleData$lon, sampleData$lat, toptitle = 'Predicted tos for Nov 1960 from 1st Nov', titles = paste('Member', 1:15)) } diff --git a/man/RMS.Rd b/man/RMS.Rd index 34980b658275f642a6ac7576a15175992e4c3034..874fb969b8bda396f2b363242b5e4a9d22fb24e4 100644 --- a/man/RMS.Rd +++ b/man/RMS.Rd @@ -14,7 +14,8 @@ The confidence interval relies on a chi2 distribution.\cr .RMS provides the same functionality but taking a matrix of ensemble members as input (exp). } \usage{ -RMS(var_exp, var_obs, posloop = 1, posRMS = 2, compROW = NULL, limits = NULL, siglev = 0.95, conf = TRUE) +RMS(var_exp, var_obs, posloop = 1, posRMS = 2, compROW = NULL, + limits = NULL, siglev = 0.95, conf = TRUE) .RMS(exp, obs, siglev = 0.95, conf = TRUE) } @@ -78,7 +79,8 @@ dim_to_smooth <- 4 # Smooth along lead-times smooth_ano_exp <- Smoothing(ano_exp, runmean_months, dim_to_smooth) smooth_ano_obs <- Smoothing(ano_obs, runmean_months, dim_to_smooth) dim_to_mean <- 2 # Mean along members -required_complete_row <- 3 # Discard start-dates for which some leadtimes are missing +# Discard start-dates for which some leadtimes are missing +required_complete_row <- 3 leadtimes_per_startdate <- 60 rms <- RMS(Mean1Dim(smooth_ano_exp, dim_to_mean), Mean1Dim(smooth_ano_obs, dim_to_mean), diff --git a/man/SVD.Rd b/man/SVD.Rd index 7685c483297db498084c3363f9c69bc877cd4100..3c157e22313012de34b05a583b131b5de53b99eb 100644 --- a/man/SVD.Rd +++ b/man/SVD.Rd @@ -106,7 +106,8 @@ sampleData <- s2dverification:::.LoadSampleData('tos', c('experiment'), # one that explains the greatest amount of variability. The example data is # very low resolution so it does not make a lot of sense. ano <- Ano_CrossValid(sampleData$mod, sampleData$obs) -mca <- SVD(Mean1Dim(ano$ano_exp, 2)[1, , 1, , ], Mean1Dim(ano$ano_obs, 2)[1, , 1, , ], +mca <- SVD(Mean1Dim(ano$ano_exp, 2)[1, , 1, , ], + Mean1Dim(ano$ano_obs, 2)[1, , 1, , ], sampleData$lat, sampleData$lat) PlotEquiMap(mca$MCAs_U[1, , ], sampleData$lon, sampleData$lat) plot(mca$ECs_U[1, ]) diff --git a/man/StatSeasAtlHurr.Rd b/man/StatSeasAtlHurr.Rd index 3acb0edb02e82bc817a42eda236909e10299df97..e2ad65a712a011450d1538658230a60f9ab7dcdf 100644 --- a/man/StatSeasAtlHurr.Rd +++ b/man/StatSeasAtlHurr.Rd @@ -54,8 +54,10 @@ TropAno <- matrix(c(-0.22, -.13, 0.07, -0.16, -0.15, nrow = 5, ncol = 5) # The seasonal average of hurricanes for each of the five forecasted years, # for each forecast, would then be given by -hr_count <- StatSeasAtlHurr(atlano = AtlAno, tropano = TropAno, hrvar = 'HR')$mean -print(hr_count) +hr_count <- StatSeasAtlHurr(atlano = AtlAno, + tropano = TropAno, + hrvar = 'HR') +print(hr_count$mean) } \references{ Villarini et al. (2010) Mon Wea Rev, 138, 2681-2705.\cr diff --git a/man/Subset.Rd b/man/Subset.Rd index 6e165f6a0a2ada6e89b5ec4bfe8432acef9befa0..2c883ffa771e0330d97dfa3a47dc65a7ac1c3411 100644 --- a/man/Subset.Rd +++ b/man/Subset.Rd @@ -40,7 +40,10 @@ FALSE, 'selected', and 'non-selected'. } } \examples{ -subset <- Subset(sampleMap$mod, c('dataset', 'sdate', 'ftime'), list(1, 1, 1), drop = 'selected') -PlotLayout(PlotEquiMap, c('lat', 'lon'), subset, sampleMap$lon, sampleMap$lat, titles = paste('Member', 1:3)) +subset <- Subset(sampleMap$mod, c('dataset', 'sdate', 'ftime'), + list(1, 1, 1), drop = 'selected') +PlotLayout(PlotEquiMap, c('lat', 'lon'), subset, + sampleMap$lon, sampleMap$lat, + titles = paste('Member', 1:3)) } \keyword{dplot} diff --git a/man/ToyModel.Rd b/man/ToyModel.Rd index b009ef0fdb6d4c772d01775c912d32b41b26c988..bfbd179e3c8b01748c07def255e076479dfca5e3 100644 --- a/man/ToyModel.Rd +++ b/man/ToyModel.Rd @@ -19,8 +19,9 @@ The forecast can be computed for real observations or observations generated artifically. } \usage{ -ToyModel(alpha = 0.1, beta = 0.4, gamma = 1, sig = 1, trend = 0, nstartd = 30, - nleadt = 4, nmemb = 10, obsini = NULL, fxerr = NULL) +ToyModel(alpha = 0.1, beta = 0.4, gamma = 1, sig = 1, trend = 0, + nstartd = 30, nleadt = 4, nmemb = 10, obsini = NULL, + fxerr = NULL) } \arguments{ \item{alpha}{ diff --git a/man/UltimateBrier.Rd b/man/UltimateBrier.Rd index 59d17090ea49827b2bab87e83381b0de9bb0f245..6f2cb28da234f72c78d397d2ad6686eef825cddc 100644 --- a/man/UltimateBrier.Rd +++ b/man/UltimateBrier.Rd @@ -95,7 +95,11 @@ bs <- UltimateBrier(ano_exp, ano_obs) bss <- UltimateBrier(ano_exp, ano_obs, type = 'BSS') } \author{ -History: -0.1 - 2015-05 (V. Guemas \email{virginie.guemas@ic3.cat}, C. Prodhomme \email{chloe.prodhomme@ic3.cat}, O. Bellprat \email{omar.bellprat@ic3.cat}, V. Torralba \email{veronica.torralba@ic3.cat}, N. Manubens, \email{nicolau.manubens@ic3.cat}) - First version +History:\cr +0.1 - 2015-05 (V. Guemas \email{virginie.guemas at bsc.es},\cr + C. Prodhomme \email{chloe.prodhomme at bsc.es},\cr + O. Bellprat \email{omar.bellprat at bsc.es}\cr + V. Torralba \email{veronica.torralba at bsc.es}\cr + N. Manubens, \email{nicolau.manubens at bsc.es}) - First version } \keyword{datagen} diff --git a/man/s2dverification.Rd b/man/s2dverification.Rd index c1dcfa4f816f3af4f6747d5083d094743c7f315f..32a6b9453e9bbfa4e38c07cec0050b6d8d13d0af 100644 --- a/man/s2dverification.Rd +++ b/man/s2dverification.Rd @@ -10,9 +10,9 @@ Set of tools to verify forecasts through the computation of typical prediction s \tabular{ll}{ Package: \tab s2dverification\cr Type: \tab Package\cr -Version: \tab 2.7.3\cr -Date: \tab 2016-11-09\cr -License: \tab GPLv3\cr +Version: \tab 2.8.0\cr +Date: \tab 2017-02-10\cr +License: \tab LGPLv3\cr } Check an overview of the package functionalities and its modules at \url{https://earth.bsc.es/gitlab/es/s2dverification/wikis/home}. For more information load the package and check the help for each function or the documentation attached to the package. diff --git a/s2dverification-manual.pdf b/s2dverification-manual.pdf index 306d35db29781eaf4e7430c5a67a58682413e516..2d4441ff264369411a34568f19a7c8090e5e28a1 100644 Binary files a/s2dverification-manual.pdf and b/s2dverification-manual.pdf differ