From e3e77ecd9309c2788c5d535c44d7c64bf40f32b0 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 4 Aug 2023 17:07:42 +0200 Subject: [PATCH 01/22] first attempt region plot size --- modules/Visualization/R/plot_skill_metrics.R | 6 ++++-- modules/Visualization/output_size.yml | 12 ++++++++++++ recipe_ecvs_seasonal_oper.yml | 2 +- 3 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 modules/Visualization/output_size.yml diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index 76dae774..2957f6f4 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -169,7 +169,9 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, bar_extra_margin = rep(0.9, 4), extra_margin = rep(1, 4), bar_label_scale = 1.5, - axes_label_scale = 1.3) + axes_label_scale = 1.3, + width = 7,#default i + height = 5) ) } else { # Define function and parameters depending on projection @@ -181,7 +183,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, filled.continents = F, brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup, bar_label_digits = 3, bar_label_scale = 1.5, - axes_label_scale = 1) + axes_label_scale = 1, width = 7, hight = 5) } else { fun <- PlotRobinson common_projections <- c("robinson", "stereographic", "lambert_europe") diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml new file mode 100644 index 00000000..0ed855a3 --- /dev/null +++ b/modules/Visualization/output_size.yml @@ -0,0 +1,12 @@ +region: #units inches + EU: #latmin: 20, latmax: 80, lonmin: -20, lonmax: 40 + PlotEquiMap: + skill_metrics: {width: 6, height: 6} + forecast_ensemble_mean: {width: 6, height: 6} + most_likely_terciles: {width: 7, height: 5} + Projection: + skill_metrics: {width: 8, height: 5} + NA-EU: #Norht Atlantic European region + Mediterranean: + Global: +#... diff --git a/recipe_ecvs_seasonal_oper.yml b/recipe_ecvs_seasonal_oper.yml index d47fd159..f6a7bbe7 100644 --- a/recipe_ecvs_seasonal_oper.yml +++ b/recipe_ecvs_seasonal_oper.yml @@ -47,7 +47,7 @@ Analysis: Visualization: plots: skill_metrics forecast_ensemble_mean most_likely_terciles multi_panel: no - projection: lambert_europe + dots: both ncores: 4 # Optional, int: number of cores, defaults to 1 remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE Output_format: scorecards -- GitLab From 4755551c544be823b5d3885d80e4b76e505dd611 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 21 Aug 2023 17:49:56 +0200 Subject: [PATCH 02/22] Problem with second metric --- modules/Visualization/R/plot_skill_metrics.R | 10 +++++++--- modules/Visualization/Visualization.R | 17 ++++++++++++++--- modules/Visualization/output_size.yml | 5 +++-- recipe_ecvs_seasonal_oper.yml | 2 +- recipe_tas_seasonal_oper.yml | 5 +---- 5 files changed, 26 insertions(+), 13 deletions(-) diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index 2957f6f4..e01099d7 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -1,7 +1,7 @@ library(stringr) plot_skill_metrics <- function(recipe, data_cube, skill_metrics, - outdir, significance = F) { + outdir, significance = F, output_conf) { # recipe: Auto-S2S recipe # archive: Auto-S2S archive # data_cube: s2dv_cube object with the corresponding hindcast data @@ -44,7 +44,6 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, diverging_palette <- "bluered" sequential_palette <- "Reds" } - # Group different metrics by type skill_scores <- c("rpss", "bss90", "bss10", "frpss", "crpss", "mean_bias_ss", "enscorr", "rpss_specs", "bss90_specs", "bss10_specs", @@ -177,13 +176,16 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, # Define function and parameters depending on projection if (projection == 'cylindrical_equidistant') { fun <- PlotEquiMap + output_conf <- output_conf$PlotEquiMap$skill_metric base_args <- list(var = NULL, dots = NULL, lon = longitude, lat = latitude, dot_symbol = 20, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup, bar_label_digits = 3, bar_label_scale = 1.5, - axes_label_scale = 1, width = 7, hight = 5) + axes_label_scale = 1, width = 7, height = 5) + base_args[names(output_conf)] <- output_conf +print(name) } else { fun <- PlotRobinson common_projections <- c("robinson", "stereographic", "lambert_europe") @@ -231,6 +233,8 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, } fileout <- paste0(outfile, "_ft", forecast_time, ".png") # Plot + print(name) + do.call(fun, args = c(base_args, list(toptitle = toptitle, diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 5fb3b946..aacb9ef9 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -15,7 +15,8 @@ plot_data <- function(recipe, data, skill_metrics = NULL, probabilities = NULL, - significance = F) { + significance = F, + output_conf = NULL) { # Try to produce and save several basic plots. # recipe: the auto-s2s recipe as read by read_yaml() # data: list containing the hcst, obs and (optional) fcst s2dv_cube objects @@ -23,7 +24,17 @@ plot_data <- function(recipe, # s2dv_cube objects # skill_metrics: list of arrays containing the computed skill metrics # significance: Bool. Whether to include significance dots where applicable - + if (is.null(output_conf)) { + output_conf <- read_yaml("modules/Visualization/output_size.yml")$region + output_conf <- output_conf[[recipe$Analysis$Region[[1]]$name]] + if (is.null(output_conf)) { + stop("Region not found in the ouput_conf file") # warning? + } + } else{ + if (is.list(output_conf)) { + warning("Shape of the list of configuration?") + } + } plots <- strsplit(recipe$Analysis$Workflow$Visualization$plots, ", | |,")[[1]] recipe$Run$output_dir <- paste0(recipe$Run$output_dir, "/plots/") ## TODO: Sort this out @@ -47,7 +58,7 @@ plot_data <- function(recipe, if ("skill_metrics" %in% plots) { if (!is.null(skill_metrics)) { plot_skill_metrics(recipe, data$hcst, skill_metrics, outdir, - significance) + significance, output_conf) } else { error(recipe$Run$logger, paste0("The skill metric plots have been requested, but the ", diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index 0ed855a3..920ecdf0 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -1,12 +1,13 @@ region: #units inches EU: #latmin: 20, latmax: 80, lonmin: -20, lonmax: 40 PlotEquiMap: - skill_metrics: {width: 6, height: 6} + skill_metrics: {width: 6, height: 6, axes_label_scale: 0.8, bar_label_scale: 1} forecast_ensemble_mean: {width: 6, height: 6} most_likely_terciles: {width: 7, height: 5} + Multipanel: Projection: skill_metrics: {width: 8, height: 5} NA-EU: #Norht Atlantic European region Mediterranean: Global: -#... +# Add other regions diff --git a/recipe_ecvs_seasonal_oper.yml b/recipe_ecvs_seasonal_oper.yml index f6a7bbe7..898356ce 100644 --- a/recipe_ecvs_seasonal_oper.yml +++ b/recipe_ecvs_seasonal_oper.yml @@ -21,7 +21,7 @@ Analysis: ftime_min: 1 # Mandatory, int: First leadtime time step in months ftime_max: 6 # Mandatory, int: Last leadtime time step in months Region: - - {name: "UE", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} + - {name: "EU", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} Regrid: method: bilinear # Mandatory, str: Interpolation method. See docu. type: "to_system" diff --git a/recipe_tas_seasonal_oper.yml b/recipe_tas_seasonal_oper.yml index 75918265..8bfca789 100644 --- a/recipe_tas_seasonal_oper.yml +++ b/recipe_tas_seasonal_oper.yml @@ -21,10 +21,7 @@ Analysis: ftime_min: 1 # Mandatory, int: First leadtime time step in months ftime_max: 6 # Mandatory, int: Last leadtime time step in months Region: - latmin: 20 # Mandatory, int: minimum latitude - latmax: 80 # Mandatory, int: maximum latitude - lonmin: -20 # Mandatory, int: minimum longitude - lonmax: 40 # Mandatory, int: maximum longitude + - {name: "EU", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} Regrid: method: bilinear # Mandatory, str: Interpolation method. See docu. type: "to_system" -- GitLab From ca60c1e3db0b858a7f605a35d8ae618f3d7c8afe Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 22 Aug 2023 18:18:28 +0200 Subject: [PATCH 03/22] first working version configuration --- .../R/plot_most_likely_terciles_map.R | 44 +++++++++++-------- modules/Visualization/R/plot_skill_metrics.R | 7 ++- modules/Visualization/Visualization.R | 11 ++--- modules/Visualization/output_size.yml | 12 +++-- recipe_prlr_seasonal_oper.yml | 5 +-- recipe_tas_seasonal_oper.yml | 6 ++- 6 files changed, 50 insertions(+), 35 deletions(-) diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index 4952a90e..803882d7 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -9,7 +9,8 @@ plot_most_likely_terciles <- function(recipe, probabilities, mask, dots, - outdir) { + outdir, + output_conf) { ## TODO: Add 'anomaly' to plot title # Abort if frequency is daily @@ -126,6 +127,23 @@ plot_most_likely_terciles <- function(recipe, triangle_ends = c(F, F)) # , width = 11, height = 8) ) } else { + output_configuration <- output_conf$PlotEquiMap$most_likely_terciles + base_args <- list(cat_dim = 'bin', + probs = NULL, + lon = longitude, lat = latitude, + coast_width = 1.5, + mask = NULL, + dots = NULL, + col_mask = 'antiquewhite', + cols = cols, + title_scale = 1, + legend_scale = 0.8, + cex_bar_titles = 0.9, + bar_label_digits = 2, + bar_label_scale = 0.7, + axes_label_scale = 1.1, + triangle_ends = c(F, F) , width = 10, height = 8) + base_args[names(output_configuration)] <- output_configuration for (i in 1:length(months)) { # Get forecast time label forecast_time <- match(months[i], month.name) - init_month + 1 @@ -155,23 +173,13 @@ plot_most_likely_terciles <- function(recipe, "%d-%m-%Y")) # Plot fileout <- paste0(outfile, "_ft", forecast_time, ".png") - PlotMostLikelyQuantileMap(cat_dim = 'bin', - probs = i_var_probs[i, , , ], - lon = longitude, lat = latitude, - coast_width = 1.5, - mask = mask_i, - dots = dots_i, - col_mask = 'antiquewhite', - cols = cols, - title_scale = 1, - legend_scale = 0.8, - cex_bar_titles = 0.9, - toptitle = toptitle, - fileout = fileout, - bar_label_digits = 2, - bar_label_scale = 0.7, - axes_label_scale = 1.1, - triangle_ends = c(F, F) , width = 10, height = 8) + base_args$probs <- i_var_probs[i, , , ] + base_args$mask <- mask_i + base_args$dots <- dots_i + do.call(PlotMostLikelyQuantileMap, + args = c(base_args, + list(toptitle = toptitle, + fileout = fileout))) } } } diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index e01099d7..3261dee1 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -176,7 +176,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, # Define function and parameters depending on projection if (projection == 'cylindrical_equidistant') { fun <- PlotEquiMap - output_conf <- output_conf$PlotEquiMap$skill_metric + output_configuration <- output_conf$PlotEquiMap$skill_metric base_args <- list(var = NULL, dots = NULL, lon = longitude, lat = latitude, dot_symbol = 20, title_scale = 0.6, @@ -184,8 +184,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, col_inf = col_inf, col_sup = col_sup, bar_label_digits = 3, bar_label_scale = 1.5, axes_label_scale = 1, width = 7, height = 5) - base_args[names(output_conf)] <- output_conf -print(name) + base_args[names(output_configuration)] <- output_configuration } else { fun <- PlotRobinson common_projections <- c("robinson", "stereographic", "lambert_europe") @@ -194,6 +193,7 @@ print(name) } else { target_proj <- projection } + ## TODO: handle output_conf base_args <- list(data = NULL, mask = NULL, lon = longitude, lat = latitude, lon_dim = 'longitude', lat_dim = 'latitude', @@ -233,7 +233,6 @@ print(name) } fileout <- paste0(outfile, "_ft", forecast_time, ".png") # Plot - print(name) do.call(fun, args = c(base_args, diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index aacb9ef9..f427afc7 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -25,8 +25,9 @@ plot_data <- function(recipe, # skill_metrics: list of arrays containing the computed skill metrics # significance: Bool. Whether to include significance dots where applicable if (is.null(output_conf)) { - output_conf <- read_yaml("modules/Visualization/output_size.yml")$region - output_conf <- output_conf[[recipe$Analysis$Region[[1]]$name]] + output_conf <- read_yaml("modules/Visualization/output_size.yml", + eval.exp = TRUE)$region + output_conf <- output_conf[[recipe$Analysis$Region$name]] if (is.null(output_conf)) { stop("Region not found in the ouput_conf file") # warning? } @@ -96,7 +97,7 @@ plot_data <- function(recipe, probabilities, mask = NULL, dots = NULL, - outdir) + outdir, output_conf = output_conf) } # Plots with masked terciles if (recipe$Analysis$Workflow$Visualization$mask_terciles %in% @@ -114,7 +115,7 @@ plot_data <- function(recipe, probabilities, mask = skill_metrics$rpss, dots = NULL, - outdir) + outdir, output_conf = output_conf) } } # Plots with dotted terciles @@ -132,7 +133,7 @@ plot_data <- function(recipe, probabilities, mask = NULL, dots = skill_metrics$rpss, - outdir) + outdir, output_conf = output_conf) } } } else { diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index 920ecdf0..5c1a08e6 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -1,9 +1,15 @@ region: #units inches EU: #latmin: 20, latmax: 80, lonmin: -20, lonmax: 40 PlotEquiMap: - skill_metrics: {width: 6, height: 6, axes_label_scale: 0.8, bar_label_scale: 1} - forecast_ensemble_mean: {width: 6, height: 6} - most_likely_terciles: {width: 7, height: 5} + skill_metrics: + width: 9 + height: 8.5 + axes_label_scale: 0.8 + bar_label_scale: 1.2 + bar_extra_margin: !expr c(2,1,0.5,1) + forecast_ensemble_mean: {width: 9, height: 8.5, + axes_label_scale: 0.8, bar_label_scale: 1} + most_likely_terciles: {width: 9, height: 9} Multipanel: Projection: skill_metrics: {width: 8, height: 5} diff --git a/recipe_prlr_seasonal_oper.yml b/recipe_prlr_seasonal_oper.yml index fb6d1a1c..ae7acedb 100644 --- a/recipe_prlr_seasonal_oper.yml +++ b/recipe_prlr_seasonal_oper.yml @@ -21,10 +21,7 @@ Analysis: ftime_min: 1 # Mandatory, int: First leadtime time step in months ftime_max: 6 # Mandatory, int: Last leadtime time step in months Region: - latmin: 20 # Mandatory, int: minimum latitude - latmax: 80 # Mandatory, int: maximum latitude - lonmin: -20 # Mandatory, int: minimum longitude - lonmax: 40 # Mandatory, int: maximum longitude + - {name: "EU", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} Regrid: method: bilinear # Mandatory, str: Interpolation method. See docu. type: "to_system" diff --git a/recipe_tas_seasonal_oper.yml b/recipe_tas_seasonal_oper.yml index 8bfca789..13b6a0b9 100644 --- a/recipe_tas_seasonal_oper.yml +++ b/recipe_tas_seasonal_oper.yml @@ -21,7 +21,11 @@ Analysis: ftime_min: 1 # Mandatory, int: First leadtime time step in months ftime_max: 6 # Mandatory, int: Last leadtime time step in months Region: - - {name: "EU", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} + name: "EU" + latmin: 20 + latmax: 80 + lonmin: -20 + lonmax: 40 Regrid: method: bilinear # Mandatory, str: Interpolation method. See docu. type: "to_system" -- GitLab From ca9e2a808e14962595d7b719512c58e9216f8210 Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 23 Aug 2023 15:59:32 +0200 Subject: [PATCH 04/22] conf structure --- modules/Visualization/output_size.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index 5c1a08e6..c36b2d60 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -11,8 +11,8 @@ region: #units inches axes_label_scale: 0.8, bar_label_scale: 1} most_likely_terciles: {width: 9, height: 9} Multipanel: - Projection: - skill_metrics: {width: 8, height: 5} + Robinson: + skill_metrics: {width: 8, height: 5} NA-EU: #Norht Atlantic European region Mediterranean: Global: -- GitLab From 731451f340fe2266dff380cadce8ce05f7c22d5b Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 23 Aug 2023 17:45:46 +0200 Subject: [PATCH 05/22] ensemble mean adjusted --- modules/Visualization/R/plot_ensemble_mean.R | 8 ++++---- modules/Visualization/R/plot_most_likely_terciles_map.R | 2 ++ modules/Visualization/R/plot_skill_metrics.R | 3 ++- modules/Visualization/Visualization.R | 4 ++-- modules/Visualization/output_size.yml | 8 +++++--- recipe_prlr_seasonal_oper.yml | 6 +++++- 6 files changed, 20 insertions(+), 11 deletions(-) diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index 07e35706..301fd2a5 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -1,5 +1,4 @@ -plot_ensemble_mean <- function(recipe, fcst, outdir) { - +plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { ## TODO: Add 'anomaly' to plot title # Abort if frequency is daily if (recipe$Analysis$Variables$freq == "daily_mean") { @@ -19,7 +18,6 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { } else { projection <- "cylindrical_equidistant" } - # Compute ensemble mean ensemble_mean <- s2dv::MeanDims(fcst$data, 'ensemble') # Loop over variable dimension @@ -94,12 +92,14 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { # Define function and parameters depending on projection if (projection == 'cylindrical_equidistant') { fun <- PlotEquiMap + output_configuration <- output_conf$PlotEquiMap$forecast_ensemble_mean base_args <- list(var = NULL, dots = NULL, lon = longitude, lat = latitude, dot_symbol = 20, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, bar_label_digits = 4, bar_label_scale = 1.5, axes_label_scale = 1) + base_args[names(output_configuration)] <- output_configuration } else { fun <- PlotRobinson common_projections <- c("robinson", "stereographic", "lambert_europe") @@ -147,5 +147,5 @@ plot_ensemble_mean <- function(recipe, fcst, outdir) { } } info(recipe$Run$logger, - "##### SKILL METRIC PLOTS SAVED TO OUTPUT DIRECTORY #####") + "##### FORECAST ENSEMBLE MEAN PLOTS SAVED TO OUTPUT DIRECTORY #####") } diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index 803882d7..33a19765 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -134,6 +134,8 @@ plot_most_likely_terciles <- function(recipe, coast_width = 1.5, mask = NULL, dots = NULL, + dot_symbol = 4, + dot_size = 1, col_mask = 'antiquewhite', cols = cols, title_scale = 1, diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index 3261dee1..a2986944 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -179,7 +179,8 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, output_configuration <- output_conf$PlotEquiMap$skill_metric base_args <- list(var = NULL, dots = NULL, lon = longitude, lat = latitude, - dot_symbol = 20, title_scale = 0.6, + dot_symbol = 20, dot_size = 1, + title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup, bar_label_digits = 3, bar_label_scale = 1.5, diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index f427afc7..5d4e4080 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -59,7 +59,7 @@ plot_data <- function(recipe, if ("skill_metrics" %in% plots) { if (!is.null(skill_metrics)) { plot_skill_metrics(recipe, data$hcst, skill_metrics, outdir, - significance, output_conf) + significance, output_conf = output_conf) } else { error(recipe$Run$logger, paste0("The skill metric plots have been requested, but the ", @@ -70,7 +70,7 @@ plot_data <- function(recipe, # Plot forecast ensemble mean if ("forecast_ensemble_mean" %in% plots) { if (!is.null(data$fcst)) { - plot_ensemble_mean(recipe, data$fcst, outdir) + plot_ensemble_mean(recipe, data$fcst, outdir, output_conf = output_conf) } else { error(recipe$Run$logger, paste0("The forecast ensemble mean plot has been requested, but ", diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index c36b2d60..62816167 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -2,14 +2,16 @@ region: #units inches EU: #latmin: 20, latmax: 80, lonmin: -20, lonmax: 40 PlotEquiMap: skill_metrics: - width: 9 + width: 8.5 height: 8.5 axes_label_scale: 0.8 bar_label_scale: 1.2 bar_extra_margin: !expr c(2,1,0.5,1) - forecast_ensemble_mean: {width: 9, height: 8.5, + dot_size: 1.7 + dot_symbol: 4 + forecast_ensemble_mean: {width: 8.5, height: 8.5, axes_label_scale: 0.8, bar_label_scale: 1} - most_likely_terciles: {width: 9, height: 9} + most_likely_terciles: {width: 8.5, height: 9, dot_size: 1.7} Multipanel: Robinson: skill_metrics: {width: 8, height: 5} diff --git a/recipe_prlr_seasonal_oper.yml b/recipe_prlr_seasonal_oper.yml index ae7acedb..64dc7fe8 100644 --- a/recipe_prlr_seasonal_oper.yml +++ b/recipe_prlr_seasonal_oper.yml @@ -21,7 +21,11 @@ Analysis: ftime_min: 1 # Mandatory, int: First leadtime time step in months ftime_max: 6 # Mandatory, int: Last leadtime time step in months Region: - - {name: "EU", latmin: 20, latmax: 80, lonmin: -20, lonmax: 40} + name: "EU" + latmin: 20 + latmax: 80 + lonmin: -20 + lonmax: 40 Regrid: method: bilinear # Mandatory, str: Interpolation method. See docu. type: "to_system" -- GitLab From 91f1fd338fa3d7602df3c1c7d2c295d662e2d0fd Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 23 Aug 2023 18:02:05 +0200 Subject: [PATCH 06/22] adjustments forecast ensemble mean --- modules/Visualization/R/plot_ensemble_mean.R | 11 ++++++++--- modules/Visualization/output_size.yml | 10 +++++++--- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index 301fd2a5..d49d8902 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -67,6 +67,7 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { "\n", "Forecast Ensemble Mean / ", "Init.: ", i_syear) months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], label = T, abb = F) + years <- lubridate::year(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ]) if (recipe$Analysis$Workflow$Visualization$multi_panel) { # Define name of output file and titles @@ -123,9 +124,13 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { } forecast_time <- sprintf("%02d", forecast_time) # Define plot title - toptitle <- paste(system_name, "/", - str_to_title(var_long_name), - "\n", "Forecast Ensemble Mean /", months[i]) + toptitle <- paste0(system_name, " / ", + str_to_title(var_long_name), + "\n", "Ensemble Mean / ", + months[i], " ", years[i], + " / Start date: ", + format(as.Date(i_syear, format="%Y%m%d"), + "%d-%m-%Y")) # Define caption if (identical(fun, PlotRobinson)) { ## TODO: Customize technical details diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index 62816167..fcad0175 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -9,9 +9,13 @@ region: #units inches bar_extra_margin: !expr c(2,1,0.5,1) dot_size: 1.7 dot_symbol: 4 - forecast_ensemble_mean: {width: 8.5, height: 8.5, - axes_label_scale: 0.8, bar_label_scale: 1} - most_likely_terciles: {width: 8.5, height: 9, dot_size: 1.7} + forecast_ensemble_mean: + width: 8.5 + height: 8.5 + axes_label_scale: 0.8 + bar_label_scale: 1.2 + bar_extra_margin: !expr c(2,1,0.5,1) + most_likely_terciles: {width: 8.5, height: 9, dot_size: 2} Multipanel: Robinson: skill_metrics: {width: 8, height: 5} -- GitLab From 7492f08cd189a5f37d75b8d361d365f0391ee41d Mon Sep 17 00:00:00 2001 From: nperez Date: Thu, 24 Aug 2023 11:21:50 +0200 Subject: [PATCH 07/22] extra adjustment PlotCombined hardcoded --- modules/Visualization/R/plot_most_likely_terciles_map.R | 1 + modules/Visualization/R/tmp/PlotCombinedMap.R | 2 +- modules/Visualization/output_size.yml | 6 +++++- recipe_ecvs_seasonal_oper.yml | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index 33a19765..ed5f3635 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -144,6 +144,7 @@ plot_most_likely_terciles <- function(recipe, bar_label_digits = 2, bar_label_scale = 0.7, axes_label_scale = 1.1, + plot_margin = c(5.1, 4.1, 4.1, 2.1), triangle_ends = c(F, F) , width = 10, height = 8) base_args[names(output_configuration)] <- output_configuration for (i in 1:length(months)) { diff --git a/modules/Visualization/R/tmp/PlotCombinedMap.R b/modules/Visualization/R/tmp/PlotCombinedMap.R index 4575f897..9c7a6ede 100644 --- a/modules/Visualization/R/tmp/PlotCombinedMap.R +++ b/modules/Visualization/R/tmp/PlotCombinedMap.R @@ -435,7 +435,7 @@ PlotCombinedMap <- function(maps, lon, lat, subsampleg = NULL, bar_limits = display_range, var_limits = NULL, triangle_ends = NULL, plot = TRUE, draw_separators = TRUE, bar_titles = bar_titles, title_scale = cex_bar_titles, label_scale = legend_scale * 1.5, - extra_margin = c(2, 0, 2, 0)) + extra_margin = c(2, 0, 1.5, 0)) } # If the graphic was saved to file, close the connection with the device diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index fcad0175..6d82b039 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -15,7 +15,11 @@ region: #units inches axes_label_scale: 0.8 bar_label_scale: 1.2 bar_extra_margin: !expr c(2,1,0.5,1) - most_likely_terciles: {width: 8.5, height: 9, dot_size: 2} + most_likely_terciles: + width: 8.5 + height: 8.5 + dot_size: 2 + plot_margin: !expr c(0, 4.1, 4.1, 2.1) Multipanel: Robinson: skill_metrics: {width: 8, height: 5} diff --git a/recipe_ecvs_seasonal_oper.yml b/recipe_ecvs_seasonal_oper.yml index 898356ce..7b8969bf 100644 --- a/recipe_ecvs_seasonal_oper.yml +++ b/recipe_ecvs_seasonal_oper.yml @@ -14,7 +14,7 @@ Analysis: Reference: - {name: ERA5} # Mandatory, str: Reference codename. See docu. Time: - sdate: '0701' ## MMDD + sdate: '0801' ## MMDD fcst_year: '2023' # Optional, int: Forecast year 'YYYY' hcst_start: '1993' # Mandatory, int: Hindcast start year 'YYYY' hcst_end: '2016' # Mandatory, int: Hindcast end year 'YYYY' -- GitLab From 411cf893b684630c92a3bb3ef0f0ed487fc6548c Mon Sep 17 00:00:00 2001 From: nperez Date: Thu, 24 Aug 2023 17:45:56 +0200 Subject: [PATCH 08/22] units in plot mean ensemble forecast --- exec_ecvs_seasonal_oper.R | 3 +++ modules/Visualization/R/plot_ensemble_mean.R | 3 +-- modules/Visualization/Visualization.R | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/exec_ecvs_seasonal_oper.R b/exec_ecvs_seasonal_oper.R index 18f2e493..650d2caf 100644 --- a/exec_ecvs_seasonal_oper.R +++ b/exec_ecvs_seasonal_oper.R @@ -9,6 +9,7 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") source("tools/prepare_outputs.R") +source("modules/Units/Units.R") # Read recipe args = commandArgs(trailingOnly = TRUE) @@ -20,6 +21,8 @@ recipe <- read_atomic_recipe(recipe_file) # Load datasets data <- load_datasets(recipe) +# Change units +data <- Units(recipe, data) # Calibrate datasets data <- calibrate_datasets(recipe, data) # Compute skill metrics diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index d49d8902..3a11b727 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -4,7 +4,6 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { if (recipe$Analysis$Variables$freq == "daily_mean") { stop("Visualization functions not yet implemented for daily data.") } - latitude <- fcst$coords$lat longitude <- fcst$coords$lon archive <- get_archive(recipe) @@ -99,7 +98,7 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { dot_symbol = 20, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, bar_label_digits = 4, bar_label_scale = 1.5, - axes_label_scale = 1) + axes_label_scale = 1, units = units) base_args[names(output_configuration)] <- output_configuration } else { fun <- PlotRobinson diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 5d4e4080..1ba015b1 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -24,7 +24,7 @@ plot_data <- function(recipe, # s2dv_cube objects # skill_metrics: list of arrays containing the computed skill metrics # significance: Bool. Whether to include significance dots where applicable - if (is.null(output_conf)) { + if (is.null(output_conf) && !is.null(recipe$Analysis$Region$name)) { output_conf <- read_yaml("modules/Visualization/output_size.yml", eval.exp = TRUE)$region output_conf <- output_conf[[recipe$Analysis$Region$name]] -- GitLab From 02644c2a87efb5b9efea2e2900b9e45699eceb12 Mon Sep 17 00:00:00 2001 From: nperez Date: Thu, 24 Aug 2023 17:54:19 +0200 Subject: [PATCH 09/22] units plot mean bias --- modules/Visualization/R/plot_skill_metrics.R | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index a2986944..0ecd9d64 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -57,6 +57,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, drop = 'selected')}) for (name in c(skill_scores, scores, "mean_bias", "enssprerr")) { if (name %in% names(skill_metrics)) { + units <- NULL # Define plot characteristics and metric name to display in plot if (name %in% c("rpss", "bss90", "bss10", "frpss", "crpss", "rpss_specs", "bss90_specs", "bss10_specs", @@ -109,6 +110,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, cols <- colorbar[2:(length(colorbar) - 1)] col_inf <- colorbar[1] col_sup <- colorbar[length(colorbar)] + units <- data_cube$attrs$Variable$metadata[[var_name]]$units } # Reorder dimensions skill <- Reorder(skill, c("time", "longitude", "latitude")) @@ -182,7 +184,8 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, dot_symbol = 20, dot_size = 1, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, - col_inf = col_inf, col_sup = col_sup, + col_inf = col_inf, col_sup = col_sup, + units = units, bar_label_digits = 3, bar_label_scale = 1.5, axes_label_scale = 1, width = 7, height = 5) base_args[names(output_configuration)] <- output_configuration -- GitLab From dd6f16648e66623510e29e0b5f923226a0ad812e Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 28 Aug 2023 17:16:10 +0200 Subject: [PATCH 10/22] mask and dots in Ens Forecast --- modules/Visualization/R/plot_ensemble_mean.R | 55 ++++++++++++++++++-- modules/Visualization/Visualization.R | 52 +++++++++++++++++- modules/Visualization/output_size.yml | 2 + recipe_ecvs_seasonal_oper.yml | 4 +- recipe_tas_seasonal_oper.yml | 3 ++ 5 files changed, 109 insertions(+), 7 deletions(-) diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index 3a11b727..8fbe1bfd 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -1,4 +1,4 @@ -plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { +plot_ensemble_mean <- function(recipe, fcst, mask = NULL, dots = NULL, outdir, output_conf) { ## TODO: Add 'anomaly' to plot title # Abort if frequency is daily if (recipe$Analysis$Variables$freq == "daily_mean") { @@ -49,10 +49,19 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { max_value <- max(abs(var_ens_mean)) ugly_intervals <- seq(-max_value, max_value, max_value/20) brks <- pretty(ugly_intervals, n = 12, min.n = 8) + cols <- grDevices::hcl.colors(length(brks) - 1, palette, rev = rev) } else { + if (variable == 'prlr') { + cols <- c("#FFAB38", "white", "#41CBC9") + col_fun <- colorRampPalette(cols) + } else { + cols <- c("#33BFD1", "white", "#FF764D") + col_fun <- colorRampPalette(cols) + } brks <- pretty(range(var_ens_mean, na.rm = T), n = 15, min.n = 8) + cols <- col_fun(length(brks)-1) + } - cols <- grDevices::hcl.colors(length(brks) - 1, palette, rev = rev) for (i_syear in start_date) { if (length(start_date) == 1) { @@ -62,6 +71,28 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { i_var_ens_mean <- var_ens_mean[which(start_date == i_syear), , , ] outfile <- paste0(outdir[[var]], "forecast_ensemble_mean-", i_syear) } + # Mask + if (!is.null(mask)) { + outfile <- paste0(outfile, "_enscormask") + var_mask <- ClimProjDiags::Subset(mask, + along = c("var"), + indices = var, + drop = 'selected') + dim_mask <- dim(var_mask) + var_mask <- as.numeric(var_mask <= 0) + dim(var_mask) <- dim_mask + } + # Dots + if (!is.null(dots)) { + outfile <- paste0(outfile, "_enscordots") + var_dots <- ClimProjDiags::Subset(dots, + along = c("var"), + indices = var, + drop = 'selected') + dim_dots <- dim(var_dots) + var_dots <- as.numeric(var_dots <= 0) + dim(var_dots) <- dim_dots + } toptitle <- paste0(system_name, " / ", str_to_title(var_long_name), "\n", "Forecast Ensemble Mean / ", "Init.: ", i_syear) months <- lubridate::month(fcst$attrs$Dates[1, 1, which(start_date == i_syear), ], @@ -74,6 +105,8 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { # Plots PlotLayout(PlotEquiMap, c('longitude', 'latitude'), i_var_ens_mean, longitude, latitude, + mask = mask, + dots = dots, filled.continents = F, toptitle = toptitle, title_scale = 0.7, @@ -93,7 +126,7 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { if (projection == 'cylindrical_equidistant') { fun <- PlotEquiMap output_configuration <- output_conf$PlotEquiMap$forecast_ensemble_mean - base_args <- list(var = NULL, dots = NULL, + base_args <- list(var = NULL, dots = NULL, mask = NULL, lon = longitude, lat = latitude, dot_symbol = 20, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, @@ -108,7 +141,7 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { } else { target_proj <- projection } - base_args <- list(data = NULL, mask = NULL, + base_args <- list(data = NULL, mask = NULL, dots = NULL, lon = longitude, lat = latitude, lon_dim = 'longitude', lat_dim = 'latitude', target_proj = target_proj, legend = 's2dv', @@ -122,6 +155,18 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { forecast_time <- forecast_time + 12 } forecast_time <- sprintf("%02d", forecast_time) + # Get mask subset + if (!is.null(mask)) { + mask_i <- Subset(var_mask, along = 'time', indices = i, drop = TRUE) + } else { + mask_i <- NULL + } + # Get dots subset + if (!is.null(dots)) { + dots_i <- Subset(var_dots, along = 'time', indices = i, drop = TRUE) + } else { + dots_i <- NULL + } # Define plot title toptitle <- paste0(system_name, " / ", str_to_title(var_long_name), @@ -141,6 +186,8 @@ plot_ensemble_mean <- function(recipe, fcst, outdir, output_conf) { # Modify base arguments base_args[[1]] <- i_var_ens_mean[i, , ] fileout <- paste0(outfile, "_ft", sprintf("%02d", i), ".png") + base_args$mask <- mask_i + base_args$dots <- dots_i # Plot do.call(fun, args = c(base_args, diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 1ba015b1..1104e4e2 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -70,7 +70,57 @@ plot_data <- function(recipe, # Plot forecast ensemble mean if ("forecast_ensemble_mean" %in% plots) { if (!is.null(data$fcst)) { - plot_ensemble_mean(recipe, data$fcst, outdir, output_conf = output_conf) + if (is.null(recipe$Analysis$Workflow$Visualization$mask_ens)) { + recipe$Analysis$Workflow$Visualization$mask <- FALSE + } + if (is.null(recipe$Analysis$Workflow$Visualization$dots)) { + recipe$Analysis$Worklow$Visualization$dots <- FALSE + } + # Plot without mask or dots + if ((recipe$Analysis$Workflow$Visualization$mask_ens + %in% c('both', FALSE)) || + (recipe$Analysis$Workflow$Visualization$dots + %in% c('both', FALSE))) { + plot_ensemble_mean(recipe, data$fcst, outdir, + mask = NULL, dots = NULL, + output_conf = output_conf) + } + # Plots with masked + if (recipe$Analysis$Workflow$Visualization$mask_ens %in% + c('both', TRUE)) { + if (is.null(skill_metrics)) { + error(recipe$Run$logger, + paste0("For the forecast ensemble mean plot, skill_metrics ", + "need to be provided to be masked.")) + } else if (!('enscorr' %in% names(skill_metrics))) { + error(recipe$Run$logger, + paste0("For the forecast ensemble mean plot, enscor metric ", + "need to be provided to be masked")) + } else { + plot_ensemble_mean(recipe, data$fcst, + mask = skill_metrics$enscorr, + dots = NULL, + outdir, output_conf = output_conf) + } + } + # Plots with dotted negative correlated in ens-mean-fcst + if (recipe$Analysis$Workflow$Visualization$dots %in% c('both', TRUE)) { + if (is.null(skill_metrics)) { + error(recipe$Run$logger, + paste0("For the forecast ensemble mean plot, skill_metrics ", + "need to be provided for the dots")) + } else if (!('enscorr' %in% names(skill_metrics))) { + error(recipe$Run$logger, + paste0("For the forecast ensemble mean plot, enscor metric ", + "needs to be provided for the dots")) + } else { + plot_ensemble_mean(recipe, data$fcst, + mask = NULL, + dots = skill_metrics$enscorr, + outdir, output_conf = output_conf) + } + } + } else { error(recipe$Run$logger, paste0("The forecast ensemble mean plot has been requested, but ", diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index 6d82b039..f5edfd7a 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -15,6 +15,8 @@ region: #units inches axes_label_scale: 0.8 bar_label_scale: 1.2 bar_extra_margin: !expr c(2,1,0.5,1) + dot_symbol: 4 + dot_size: 1.7 most_likely_terciles: width: 8.5 height: 8.5 diff --git a/recipe_ecvs_seasonal_oper.yml b/recipe_ecvs_seasonal_oper.yml index 7b8969bf..832f36d5 100644 --- a/recipe_ecvs_seasonal_oper.yml +++ b/recipe_ecvs_seasonal_oper.yml @@ -5,8 +5,8 @@ Description: Analysis: Horizon: seasonal # Mandatory, str: either subseasonal, seasonal, or decadal Variables: - - {name: tas, freq: monthly_mean} - - {name: prlr, freq: monthly_mean} + - {name: tas, freq: monthly_mean, units: C} + - {name: prlr, freq: monthly_mean, units: mm, flux: no} Datasets: System: - {name: ECMWF-SEAS5.1} # system21_m1 system35c3s diff --git a/recipe_tas_seasonal_oper.yml b/recipe_tas_seasonal_oper.yml index 13b6a0b9..f1150d3f 100644 --- a/recipe_tas_seasonal_oper.yml +++ b/recipe_tas_seasonal_oper.yml @@ -7,6 +7,8 @@ Analysis: Variables: name: tas freq: monthly_mean + units: C + flux: FALSE Datasets: System: name: ECMWF-SEAS5.1 # Mandatory, str: system5c3s system21_m1 system35c3s @@ -52,6 +54,7 @@ Analysis: plots: skill_metrics forecast_ensemble_mean most_likely_terciles multi_panel: no mask_terciles: both + dots: both ncores: 4 # Optional, int: number of cores, defaults to 1 remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE Output_format: scorecards -- GitLab From 4bba2103714ae3a3e9f3a54f15239f10767f2978 Mon Sep 17 00:00:00 2001 From: nperez Date: Mon, 28 Aug 2023 17:43:37 +0200 Subject: [PATCH 11/22] fix check --- modules/Visualization/Visualization.R | 2 +- recipe_tas_seasonal_oper.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 1104e4e2..251e835a 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -71,7 +71,7 @@ plot_data <- function(recipe, if ("forecast_ensemble_mean" %in% plots) { if (!is.null(data$fcst)) { if (is.null(recipe$Analysis$Workflow$Visualization$mask_ens)) { - recipe$Analysis$Workflow$Visualization$mask <- FALSE + recipe$Analysis$Workflow$Visualization$mask_ens <- FALSE } if (is.null(recipe$Analysis$Workflow$Visualization$dots)) { recipe$Analysis$Worklow$Visualization$dots <- FALSE diff --git a/recipe_tas_seasonal_oper.yml b/recipe_tas_seasonal_oper.yml index f1150d3f..c5e6482f 100644 --- a/recipe_tas_seasonal_oper.yml +++ b/recipe_tas_seasonal_oper.yml @@ -53,8 +53,8 @@ Analysis: Visualization: plots: skill_metrics forecast_ensemble_mean most_likely_terciles multi_panel: no - mask_terciles: both - dots: both + mask_terciles: no + dots: true ncores: 4 # Optional, int: number of cores, defaults to 1 remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE Output_format: scorecards -- GitLab From 20bc382995d0e6cb7100765e5164c83f7be89d05 Mon Sep 17 00:00:00 2001 From: nperez Date: Tue, 29 Aug 2023 16:00:36 +0200 Subject: [PATCH 12/22] Add logo to plots --- exec_ecvs_seasonal_oper.R | 16 +++++++++++++++- tools/BSC_logo_95.jpg | Bin 0 -> 5997 bytes 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 tools/BSC_logo_95.jpg diff --git a/exec_ecvs_seasonal_oper.R b/exec_ecvs_seasonal_oper.R index 650d2caf..cfd5bb2d 100644 --- a/exec_ecvs_seasonal_oper.R +++ b/exec_ecvs_seasonal_oper.R @@ -30,9 +30,23 @@ skill_metrics <- compute_skill_metrics(recipe, data) # Compute percentiles and probability bins probabilities <- compute_probabilities(recipe, data) # Export all data to netCDF -## TODO: Fix plotting # save_data(recipe, data, skill_metrics, probabilities) # Plot data plot_data(recipe, data, skill_metrics, probabilities, significance = T) +## Add logo +logo <- "/esarchive/scratch/nperez/git/auto-s2s/tools/BSC_logo_95.jpg" +system <- list.files(paste0(recipe$Run$output_dir, "/plots")) +## This line may lead to add to logos: +# variables <- list.files(paste0(recipe$Run$output_dir, "/plots/", system)) +variable <- recipe$Analysis$Variable$name +files <- lapply(variable, function(x) { + f <- list.files(paste0(recipe$Run$output_dir, "/plots/", + system, "/", x)) + full_path <- paste0(recipe$Run$output_dir, "/plots/", + system, "/", x,"/", f)})[[1]] +dim(files) <- c(file = length(files)) +Apply(list(files), target_dims = NULL, function(x) { + system(paste("composite -gravity southeast -geometry +10+10", + logo, x, x))}, ncores = recipe$Analysis$ncores) diff --git a/tools/BSC_logo_95.jpg b/tools/BSC_logo_95.jpg new file mode 100644 index 0000000000000000000000000000000000000000..f961c0409bdd5c973e8521570d96e4ff341e5fcc GIT binary patch literal 5997 zcmbW1Ra6vS*!722kWOL9p-UQuMj8ZEN;-y+X6O_N>0t=z7|9_7q(efwq=pzQ-Q`(kB2`Kok}R3jmW81B(>nu@}Gy0AOI@{4)XmS9njJVq)Xq;$so~XW%6P zVBlZ?Ft9QIEv)}+*cg~tIJkIE08dHD$SElCsaOP=-%x{up#R*!e+In&ANCLb9}Exc z$-h?tQUE6AKMn>a4mJkPe*^!auyIJq0J!ALf)sD?C|QIQK&-+^x@>Q)tV16c0MGtS z#3aQc1;_#Rdq#grNXUH*65DINs<`PYp@^K?^r){%a2+pj8lnlSsF`mqP$wgI8UFZ| zfVU2xC@ZW8^5Vr;Kn0P}T<)kvGa@I~BqeIg@N zVCY}D)3PSVI;~{a3^Nmhe@k&Du(5gJtFYVudeuv)QEiY1BiY4E$0=E-PUYtfex73b zW>be;+KuExIQ;iQD+9s7{d#16oVXxk&CZ*~pdPT5$}F2h#shN-cSV77`KNfDBf@;& zEKEDA>C{AFRD0#T#J)Lyd^(qMpr}s*h%GV8@#2N%>nr+jJeSKAvOE>8lbEz2cPIZ> z9V*2Q-Bae!msoKG@U6g-<;7X5_L9nkYP$=KqygRuRm(Mqkq6rO^M$dL+siWB$W+=o zc@N>_YUVX7hlnkPKkdFTw!4yIRoO-B%~aiZ?=1hOf7eORNlA43`-$-ZS*JV}#5B4awEW`L^Xzy87~IX=io5JGD4gFLS{-EHR>7xzE{Wxo zD^YnUKEl8mT0ZyJ!5N~WIL=Q=z;}Jp7?ZDOU-H??W5r=R>vh?|jE^!448REc;3Dh( zot#Y=@F<;!p8VGt8TbRl3n9o-@nEv2-E@ty2sVB&nbLv{wc^@$)$u8LbfoDG-kX?k zdR2??#H(N5fXZIUed0WR;IZTUNmj(=5t|DgBGPSMX;GPZ#X@90JiudW^NTvm8jPqL zpvy%624N<@$NyBjOPS&8{ z-*&c6w;?W@rqQFl{auNSMTch@Eh6wrmam$WNX@e@MMyRGVTNZ(i~%WoLv?%%$lo1w z?f;O2@X8AvyxkJ-;;$+HhH`hE@HZNhp+}$5Jb-75BjWG_fcs}(&v2r4Y7_LI`qMn{ z?27eGnxc9P(y|PUyrd3;*Qw*_>xK|?M?v9dB!`R+_9@{xYb~NalHWD$OTTJwkBZZ3 z2^f+SxKRP6MqP9SwtcgLOFoPMy2kM1n>SioiI4p|g|A(gD_7zc&AMEtASGoofBkvQ z2&lfAm)2-o`~B&nY+*?Q=i9W<@e~mN;adXrx{X~4N5tb&?puPQt5hhzJss_fj4uiG zTWV=8@QfT~X&j%;Cj~V$hW;C8R<~Y+=QBm1C3SI;IeBX9rcdmI~Jdb~v7VtEaRpO3c zK@v_c1j_zgC`~aVDKvSPPeS;XEF)$5q`SaQ~+f#Nz~ z=)0oH0ZyToS`3*^+OS>{PrVUt&uC9e7Oqb6Ar2g$``2x@QJm;)x#tao=gyV5+gK%q z=yg$jh}o96QJkRb+0}V}gE?iTOl43?l$9xj)SRnOlWC(Lk^Gd^z9-%GmIG~acD$ja z@AwEfkKWXCN|jp>)vr=%3F#YCcGB0UBuC^_*S&!2&#oz&ZANBo(|tI!jV|lya8|%_ zKE0DT)bjTq@)rAOq2#78G_!R4bK!mM7T*0ttl?R3W%=_I z=kWO@Et3U4%_tusJ7rLxWn>hG_bS!&SD_d*>5i{BbX9lGv-hN^xw>tR!~+@NQKJw~ zBlJbnf)h-O$!o?xsSF>b>1R|+ca47cnVA~;{MGG%%=^;}@vYF?UE4g1-_}4H7i13b(OOvK+pd3Ff-bJ9& zvNs~~vm!H0;}fg!r?n?U5n&1#(}*Q;NoRR_h;TQn+d`xg(c(&THNR?r9M-iQ1XaCr z5mA8J_Qq`6U%ue~$hmR%LMUOOW69uEO`}EcDUUigmHPF{K#^;J%)5;MQRC8z?2E3{ z#8g}Eo=F32k`KM~q>q4=)-W2}w@*Np;onprh*b+Gch1ci|7Vk=cMti|GMiOB{%cog z)meRg_j@&U(2(#`f<+MTQD`TceOisj=;UTQ`4J%3G)#)cqI@?oN-(tvQ+9f`jKg+y ze(1LiO?4_l_T46)4+g}ul)uo7K279S?4l zC4V@ZwaRv&!QAG@IUlIXxIzICJGGCP{Z~uF3x(vNwF#4{^GP4RSb{=~%y9CvVplPdf{(oeWa>&>3IM2f*bIrF>qAIcQgbTuyR z>(ngH%$&62xKh&#w%lzcOI=@nNdhLVyM#KtOk_b%%Z%Sq%wBaqcUzuStZ@_HX3#R& zlH|jK8F$`MuFL7oojn_uJ3&@#HXFViMogAD3yv4d?)mrM99-_j+j^bO?H&*N!1|X} zu{vySef&YIwri=)qahzoOlDl71W(=4mDzj3FQjWq>)!6XZ=%q&)zZ*Uma zDBiM5ez5ylD%{AxLMo)Wt$Jkb{h?2QlQoE_0-RX>ZTc6QS~w}`84e#{R?JQZv;?zJ zi~}ugDk)u*=4SA|1D@Z0Res1DW_8SOc)BXb9@Uj z_46rWZjsIJhZrP-Y#mQ$qNufa8Uh`I{DyiPG%%cAv0GA)l1it<&HBwp2~FXt)So&n zSNA$;2czAj6Q+b4{NONnyD_wt==qmj>T9#e_b`UtDMn?IOIFoqAR}HA`ZfVKw^D9P zalL74_Y{V1n}>NXyd2#cYZp_ zcALvq47E$^FkY;JPcSp650s)a<&C_+@v-BA=-5T4Y4_U#aW5_I*Q~#0I377_ zRDbfm|L7$7UeUCFl|W1v+3sj+;rP4C8o?3u%45iSIU`Rjvar}<1V!|+J%mrvfjG~h z010$hPjt*hh7T()-te^0epB@Yr&I^&c3ckkVC&EorQjlVv$Xu#1joTppyQb^gTu6T z=7H94D)SNU`*ehVcMCCxd(L++_C1$%&H$tr?}}Ueroi1fEZcJi{9xUZpxni~i$^S) zQ+#lYK(07BO5!N>o7H-Q;C>hfGJ&O&ucvQ#Ikf64PYD`V_dRJ3Lq}KZnhpaPp#C{& z84d@+`i=K^CD2yw$wO_vEbqBK}m_ufs#iZF9t#mOt62TWA0gI@> z{Q+aPfSVX%{e7zZakl*=eH-|`YSad0QtzyjaCoL-^ypr}$f5siWae8`?8+G%u!-t< zGY(M!<^Z&<#*ghV;SyL-19Z;NpqL;@u8oT2!qPxViF`o9@(o~QFB{f~*@&1SP;RS0 z=hS3&cQlUyO~M-zp zo~q(8ELwOL&>{QSi&DO|6{^WJN&Iw>z|B~S*Q>SaEoc;3YqQ_#iT%ECpCG2Q836tp z1>I$`>q8IOf0<50$9t3%(bBIAX&u=cECb`2JWc#h-1$^0Y1tpvc2-fB^Tt*z^I|I zGFuIAB({iV)n1k}*$z}*=W8FR=%cI3oaOk(TBpS5PtUO0T_vxUb({C7$3B}ODW%`K zfm$Iu=ag=ZWuU_zQ-dzBPK0Yq&?qHV4hDff8<+UZdGh(ikXf{^JN+-H&A62u_1kde zZ4W*~em!7?E_)Z=7O_AW^@PLbi}Y6zucQqw=ikM}oH56l_RbgUo2LyJ zMqPkF+;KEk|ACd^Uu&OS%mFX&{x~B=Pal5H*TM^xxIGm|Bi>4W@g0(n01kwl)?{7x zqM>Iykqo}2rR&2U`3tyYr$ z1P$XA<~F>#HaP-QyY3@s;2V=)#muI#gqz2JzVowTQ=$F>jB;cHqpB6n#I&(IRd7}u z*fHv*nU{C4trXHb^~XiBf&RQy6kY}c0tNkNIt?J;t70j2I8!(m54Ex zSJUBq+pl%{!8AsuAFtt~4MiF0=$rKHZ-&qz%Jj^qdqH3T^<^3Dgv*Cz2C{+QNTTms zwvJ_DSuW{HjXMk9mkx!H1S+VKQ5*Nna_%hdXchW=JNNlleNta-X{?WUt!mpv-ot7^ zhY#)I>HfOi>p4(1G%4w)E`!%UG}C7XAOdp}Wk$6o#=9aig^pM@TXyz|cFI1Skz?ct z8XIt?Vo<`y%(wdR!>YcR(`LVasmbA({L12lCqLqem1{6+#M;2BdX~_F4yc!@qMrW_ zZA7>262f*fYj7X`>F?}ji;qf{kt}SDs2AfeD1j9QQpPV$DlcHte+{75)RkBewJLk- zcry1jOk!%6au|kKI^6&Hv$syT#?HDgM(U5pd4N*aDJW^4Vqy*TS!B%nr`n$MY)Lsb z4_JpkO3{4H*&!-4%}a#j5g-mV-_G}*@kn?BRYEa4outZYd1yv96fTj zq*Ot8hxvp9f|N5doVyYToM)WTvGx8YXE?Zlv|uHXpVNE<6r$*}PeGXu`?z26`^PZH z!s>U=xx)?$(sjL6Meq667)i(?WJHgxOzI^3Q{W7+X`hrSW~(3FtTHSbjn=M0bD4As z&GoLtNwxK>tAzkx2od_GU8{?>%ZdOH!Sh10g#KHF(MI*z@g z%xed{D7CTDuGe56>n;kM*XsBHEJbAMUKfS?*Vl?PdJEC(O86!**7f?+`^{#mb5C6Th&@iYH_ev4Z(=Rme+MiMkdO|_nS29Ox%a_h? z+s|Gm{k&2`7EbB;!dgJJV$mD%`t>T&^GrKo3XFr2GGJQ+TH9wb^a9w<^x=gyJGYFG zndYT!MD#)=EM~at?)eilslxn9bsmD-R&P$QoS98TI!XDfl!jkvZw)VP1Gkluco=hk zB;AlBBcuEO%hxBbGZ! zoi%cvlYUH-3^es0RAC|?WL4Vlh|gtvDk@~H#`v|9hZpgZYrcK9#7bP-;j!cDHf(d7 zqC0E$PWsk8p%6neC&~(B>PtVx=Or0f_Z$Z2#gBlh-HoJlD}T}LYe^=wg$-vP3+SMO zPUUMwkOV-Gh9(fb{n2ubMg1L3WyE;^vvv2$y7Wjd)J$VN_5p!vbyGaA&d>jAxG#xI zqkTo8nzZODDkXcL3%9=zuRB8D=C%E~VdT$uQFH`9C{e#53Y#9Kry*vQC{!!7t}n|^ zjIo2TKxov-ernRx3MMzCXw1#xac)r_+2nifzelOi{fIf)c6>PbB#a+sRp=S%(Fl&> z%`dsOsPEQcN(Oh|y^JcH%ofwWvi2Ogay25lPdRcqS`@sx`}b!PB;BH=*0h(Rw1kX` zkhyA&!nyh2D1*3uuy93bQ!nE}Tc?*-LV;^s&}TI?&v7uqsaO z1nXDB%Miv`<_@v|0P*tizw#R(3)t~vdIUti$bAIBWOatFZ=_`Zj-yru!GEX%fRT=Q z-hu-k0v0q@+3&~uEQQt8t>}|kYvP>jSn(5-Xr!>P=~-xEtf4#S0UP@$aSl75M?lMv a-6Oyz1vR6Oo20Kg_8Eunb_XE&i2i?zdqyb$ literal 0 HcmV?d00001 -- GitLab From 82c2132880f78aa8a3843f96fb827bcca7fa829c Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 30 Aug 2023 18:24:23 +0200 Subject: [PATCH 13/22] Normal category dev in mostlikelyterciles --- .../R/plot_most_likely_terciles_map.R | 57 +- modules/Visualization/R/tmp/ColorBar.R | 593 ++++++ .../R/tmp/GradientCatsColorBar.R | 110 +- modules/Visualization/R/tmp/PlotCombinedMap.R | 138 +- modules/Visualization/R/tmp/PlotEquiMap.R | 1267 ++++++++++++ modules/Visualization/R/tmp/Utils.R | 1779 +++++++++++++++++ modules/Visualization/R/tmp/clim.palette.R | 70 + modules/Visualization/R/tmp/zzz.R | 256 +++ 8 files changed, 4197 insertions(+), 73 deletions(-) create mode 100644 modules/Visualization/R/tmp/ColorBar.R create mode 100644 modules/Visualization/R/tmp/PlotEquiMap.R create mode 100644 modules/Visualization/R/tmp/Utils.R create mode 100644 modules/Visualization/R/tmp/clim.palette.R create mode 100644 modules/Visualization/R/tmp/zzz.R diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index ed5f3635..cfc0635e 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -1,8 +1,14 @@ +## Functions required for normal cat and triangles end. +## See https://earth.bsc.es/gitlab/external/cstools/-/issues/125 source("modules/Visualization/R/tmp/PlotMostLikelyQuantileMap.R") source("modules/Visualization/R/tmp/PlotCombinedMap.R") +source("modules/Visualization/R/tmp/ColorBar.R") +source("modules/Visualization/R/tmp/clim.palette.R") +source("modules/Visualization/R/tmp/Utils.R") +source("modules/Visualization/R/tmp/PlotEquiMap.R") +source("/esarchive/scratch/aho/tmp/ColorBar_onebox.R") source("modules/Visualization/R/tmp/GradientCatsColorBar.R") - ## TODO: Change name plot_most_likely_terciles <- function(recipe, fcst, @@ -49,17 +55,15 @@ plot_most_likely_terciles <- function(recipe, var_long_name <- fcst$attrs$Variable$metadata[[variable]]$long_name # Choose colors depending on the variable if (variable %in% c('prlr')) { ## add others - cols <- list(c("#FFC473", #FFAB38", - "darkorange1"), - c("#b5b5b5", "black"), - c("#A0E5E4",#41CBC9", - "deepskyblue3")) + cols <- list(c("#FFC473", "#FFAB38"), + c("grey"), + c("#A0E5E4","#41CBC9")) + col_sup <- list("darkorange1", "grey", "deepskyblue3") } else { - cols <- list(c("#A0E5E4",#33BFD1", - "deepskyblue3"), - c("#b5b5b5", "black"), - c("#FFB19A", #FF764D", - "indianred3")) + cols <- list(c("#A0E5E4","#33BFD1"), + c("grey"), + c("#FFB19A", "#FF764D")) + col_sup <- list("deepskyblue3", "grey", "indianred3") } var_probs <- ClimProjDiags::Subset(probs_fcst, along = c("var"), @@ -138,14 +142,18 @@ plot_most_likely_terciles <- function(recipe, dot_size = 1, col_mask = 'antiquewhite', cols = cols, + col_sup = col_sup, title_scale = 1, legend_scale = 0.8, cex_bar_titles = 0.9, bar_label_digits = 2, bar_label_scale = 0.7, + bar_limits = list(c(40, 85), c(40, 85), c(40, 85)), + brks = list(4,2,4), axes_label_scale = 1.1, plot_margin = c(5.1, 4.1, 4.1, 2.1), - triangle_ends = c(F, F) , width = 10, height = 8) + return_leg = T, + triangle_ends = c(F, T) , width = 10, height = 8) base_args[names(output_configuration)] <- output_configuration for (i in 1:length(months)) { # Get forecast time label @@ -179,10 +187,33 @@ plot_most_likely_terciles <- function(recipe, base_args$probs <- i_var_probs[i, , , ] base_args$mask <- mask_i base_args$dots <- dots_i - do.call(PlotMostLikelyQuantileMap, + cb_info <- do.call(PlotMostLikelyQuantileMap, args = c(base_args, list(toptitle = toptitle, fileout = fileout))) + # Add color bars with 1 range for normal category: + for (i_bar in 1:cb_info$nmap) { + tmp <- cb_info + tmp$brks <- tmp$brks[[i_bar]] + tmp$cols <- tmp$cols[[i_bar]] + tmp$bar_limits <- tmp$bar_limits[[i_bar]] + tmp$col_sup <- tmp$col_sup[[i_bar]] + tmp$title <- tmp$bar_titles[i_bar] + tmp$bar_titles <- NULL + tmp$nmap <- NULL + tmp$var_limits <- NULL + if (length(cb_info$brks[[i_bar]]) > 2) { + # plot colorbar as normal + do.call(ColorBar, tmp) + } else { + # plot a square + tmp$brks <- 4 + tmp$draw_ticks <- F + tmp$box_label <- "40+" + do.call(ColorBar_onebox, tmp) + } + } + dev.off() } } } diff --git a/modules/Visualization/R/tmp/ColorBar.R b/modules/Visualization/R/tmp/ColorBar.R new file mode 100644 index 00000000..22261c16 --- /dev/null +++ b/modules/Visualization/R/tmp/ColorBar.R @@ -0,0 +1,593 @@ +#'Draws a Color Bar +#' +#'Generates a color bar to use as colouring function for map plots and +#'optionally draws it (horizontally or vertically) to be added to map +#'multipanels or plots. It is possible to draw triangles at the ends of the +#'colour bar to represent values that go beyond the range of interest. A +#'number of options is provided to adjust the colours and the position and +#'size of the components. The drawn colour bar spans a whole figure region +#'and is compatible with figure layouts.\cr\cr +#'The generated colour bar consists of a set of breaks that define the +#'length(brks) - 1 intervals to classify each of the values in each of the +#'grid cells of a two-dimensional field. The corresponding grid cell of a +#'given value of the field will be coloured in function of the interval it +#'belongs to.\cr\cr +#'The only mandatory parameters are 'var_limits' or 'brks' (in its second +#'format, see below). +#' +#'@param brks Can be provided in two formats: +#'\itemize{ +#' \item{A single value with the number of breaks to be generated +#' automatically, between the minimum and maximum specified in 'var_limits' +#' (both inclusive). Hence the parameter 'var_limits' is mandatory if 'brks' +#' is provided with this format. If 'bar_limits' is additionally provided, +#' values only between 'bar_limits' will be generated. The higher the value +#' of 'brks', the smoother the plot will look.} +#' \item{A vector with the actual values of the desired breaks. Values will +#' be reordered by force to ascending order. If provided in this format, no +#' other parameters are required to generate/plot the colour bar.} +#'} +#' This parameter is optional if 'var_limits' is specified. If 'brks' not +#' specified but 'cols' is specified, it will take as value length(cols) + 1. +#' If 'cols' is not specified either, 'brks' will take 21 as value. +#'@param cols Vector of length(brks) - 1 valid colour identifiers, for each +#' interval defined by the breaks. This parameter is optional and will be +#' filled in with a vector of length(brks) - 1 colours generated with the +#' function provided in 'color_fun' (\code{clim.colors} by default).\cr 'cols' +#' can have one additional colour at the beginning and/or at the end with the +#' aim to colour field values beyond the range of interest represented in the +#' colour bar. If any of these extra colours is provided, parameter +#' 'triangle_ends' becomes mandatory in order to disambiguate which of the +#' ends the colours have been provided for. +#'@param vertical TRUE/FALSE for vertical/horizontal colour bar +#' (disregarded if plot = FALSE). +#'@param subsampleg The first of each subsampleg breaks will be ticked on the +#' colorbar. Takes by default an approximation of a value that yields a +#' readable tick arrangement (extreme breaks always ticked). If set to 0 or +#' lower, no labels are drawn. See the code of the function for details or +#' use 'extra_labels' for customized tick arrangements. +#'@param bar_limits Vector of two numeric values with the extremes of the +#' range of values represented in the colour bar. If 'var_limits' go beyond +#' this interval, the drawing of triangle extremes is triggered at the +#' corresponding sides, painted in 'col_inf' and 'col_sup'. Either of them +#' can be set as NA and will then take as value the corresponding extreme in +#' 'var_limits' (hence a triangle end won't be triggered for these sides). +#' Takes as default the extremes of 'brks' if available, else the same values +#' as 'var_limits'. +#'@param var_limits Vector of two numeric values with the minimum and maximum +#' values of the field to represent. These are used to know whether to draw +#' triangle ends at the extremes of the colour bar and what colour to fill +#' them in with. If not specified, take the same value as the extremes of +#' 'brks'. Hence the parameter 'brks' is mandatory if 'var_limits' is not +#' specified. +#'@param triangle_ends Vector of two logical elements, indicating whether to +#' force the drawing of triangle ends at each of the extremes of the colour +#' bar. This choice is automatically made from the provided 'brks', +#' 'bar_limits', 'var_limits', 'col_inf' and 'col_sup', but the behaviour +#' can be manually forced to draw or not to draw the triangle ends with this +#' parameter. If 'cols' is provided, 'col_inf' and 'col_sup' will take +#' priority over 'triangle_ends' when deciding whether to draw the triangle +#' ends or not. +#'@param col_inf Colour to fill the inferior triangle end with. Useful if +#' specifying colours manually with parameter 'cols', to specify the colour +#' and to trigger the drawing of the lower extreme triangle, or if 'cols' is +#' not specified, to replace the colour automatically generated by ColorBar(). +#'@param col_sup Colour to fill the superior triangle end with. Useful if +#' specifying colours manually with parameter 'cols', to specify the colour +#' and to trigger the drawing of the upper extreme triangle, or if 'cols' is +#' not specified, to replace the colour automatically generated by ColorBar(). +#'@param color_fun Function to generate the colours of the color bar. Must +#' take an integer and must return as many colours. The returned colour vector +#' can have the attribute 'na_color', with a colour to draw NA values. This +#' parameter is set by default to clim.palette(). +#'@param plot Logical value indicating whether to only compute its breaks and +#' colours (FALSE) or to also draw it on the current device (TRUE). +#'@param draw_ticks Whether to draw ticks for the labels along the colour bar +#' (TRUE) or not (FALSE). TRUE by default. Disregarded if 'plot = FALSE'. +#'@param draw_separators Whether to draw black lines in the borders of each of +#' the colour rectancles of the colour bar (TRUE) or not (FALSE). FALSE by +#' default. Disregarded if 'plot = FALSE'. +#'@param triangle_ends_scale Scale factor for the drawn triangle ends of the +#' colour bar, if drawn at all. Takes 1 by default (rectangle triangle +#' proportional to the thickness of the colour bar). Disregarded if +#' 'plot = FALSE'. +#'@param extra_labels Numeric vector of extra labels to draw along axis of +#' the colour bar. The number of provided decimals will be conserved. +#' Disregarded if 'plot = FALSE'. +#'@param title Title to draw on top of the colour bar, most commonly with the +#' units of the represented field in the neighbour figures. Empty by default. +#'@param title_scale Scale factor for the 'title' of the colour bar. +#' Takes 1 by default. +#'@param label_scale Scale factor for the labels of the colour bar. +#' Takes 1 by default. +#'@param tick_scale Scale factor for the length of the ticks of the labels +#' along the colour bar. Takes 1 by default. +#'@param extra_margin Extra margins to be added around the colour bar, +#' in the format c(y1, x1, y2, x2). The units are margin lines. Takes +#' rep(0, 4) by default. +#'@param label_digits Number of significant digits to be displayed in the +#' labels of the colour bar, usually to avoid too many decimal digits +#' overflowing the figure region. This does not have effect over the labels +#' provided in 'extra_labels'. Takes 4 by default. +#'@param ... Arguments to be passed to the method. Only accepts the following +#' graphical parameters:\cr adj ann ask bg bty cex.lab cex.main cex.sub cin +#' col.axis col.lab col.main col.sub cra crt csi cxy err family fg fig fin +#' font font.axis font.lab font.main font.sub lend lheight ljoin lmitre lty +#' lwd mai mex mfcol mfrow mfg mkh oma omd omi page pch pin plt pty smo srt +#' tck tcl usr xaxp xaxs xaxt xlog xpd yaxp yaxs yaxt ylbias ylog.\cr For more +#' information about the parameters see `par`. +#' +#'@return +#'\item{brks}{ +#' Breaks used for splitting the range in intervals. +#'} +#'\item{cols}{ +#' Colours generated for each of the length(brks) - 1 intervals. +#' Always of length length(brks) - 1. +#'} +#'\item{col_inf}{ +#' Colour used to draw the lower triangle end in the colour +#' bar (NULL if not drawn at all). +#'} +#'\item{col_sup}{ +#' Colour used to draw the upper triangle end in the colour +#' bar (NULL if not drawn at all). +#'} +#' +#'@examples +#'cols <- c("dodgerblue4", "dodgerblue1", "forestgreen", "yellowgreen", "white", +#' "white", "yellow", "orange", "red", "saddlebrown") +#'lims <- seq(-1, 1, 0.2) +#'ColorBar(lims, cols) +#'@importFrom grDevices col2rgb rgb +#'@export +ColorBar <- function(brks = NULL, cols = NULL, vertical = TRUE, + subsampleg = NULL, bar_limits = NULL, var_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, + color_fun = clim.palette(), plot = TRUE, + draw_ticks = TRUE, draw_separators = FALSE, + triangle_ends_scale = 1, extra_labels = NULL, + title = NULL, title_scale = 1, + label_scale = 1, tick_scale = 1, + extra_margin = rep(0, 4), label_digits = 4, ...) { + # Required checks + if ((is.null(brks) || length(brks) < 2) && is.null(bar_limits) && is.null(var_limits)) { + stop("At least one of 'brks' with the desired breaks, 'bar_limits' or ", + "'var_limits' must be provided to generate the colour bar.") + } + + # Check brks + if (!is.null(brks)) { + if (!is.numeric(brks)) { + stop("Parameter 'brks' must be numeric if specified.") + } else if (length(brks) > 1) { + reorder <- sort(brks, index.return = TRUE) + if (!is.null(cols)) { + cols <- cols[reorder$ix[which(reorder$ix <= length(cols))]] + } + brks <- reorder$x + } + } + + # Check bar_limits + if (!is.null(bar_limits)) { + if (!(all(is.na(bar_limits) | is.numeric(bar_limits)) && (length(bar_limits) == 2))) { + stop("Parameter 'bar_limits' must be a vector of two numeric elements or NAs.") + } + } + + # Check var_limits + if (!is.null(var_limits)) { + if (!(is.numeric(var_limits) && (length(var_limits) == 2))) { + stop("Parameter 'var_limits' must be a numeric vector of length 2.") + } else if (anyNA(var_limits)) { + stop("Parameter 'var_limits' must not contain NA values.") + } else if (any(is.infinite(var_limits))) { + stop("Parameter 'var_limits' must not contain infinite values.") + } + } + + # Check cols + if (!is.null(cols)) { + if (!is.character(cols)) { + stop("Parameter 'cols' must be a vector of character strings.") + } else if (any(!sapply(cols, .IsColor))) { + stop("Parameter 'cols' must contain valid colour identifiers.") + } + } + + # Check color_fun + if (!is.function(color_fun)) { + stop("Parameter 'color_fun' must be a colour-generator function.") + } + + # Check integrity among brks, bar_limits and var_limits + if (is.null(brks) || (length(brks) < 2)) { + if (is.null(brks)) { + if (is.null(cols)) { + brks <- 21 + } else { + brks <- length(cols) + 1 + } + } + if (is.null(bar_limits) || anyNA(bar_limits)) { + # var_limits is defined + if (is.null(bar_limits)) { + bar_limits <- c(NA, NA) + } + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (brks - 1) + bar_limits[which(is.na(bar_limits))] <- c(var_limits[1] - half_width, var_limits[2] + half_width)[which(is.na(bar_limits))] + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + } else if (is.null(var_limits)) { + # bar_limits is defined + var_limits <- bar_limits + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (brks - 1) + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + var_limits[1] <- var_limits[1] + half_width / 50 + } else { + # both bar_limits and var_limits are defined + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + } + } else if (is.null(bar_limits)) { + if (is.null(var_limits)) { + # brks is defined + bar_limits <- c(head(brks, 1), tail(brks, 1)) + var_limits <- bar_limits + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (length(brks) - 1) + var_limits[1] <- var_limits[1] + half_width / 50 + } else { + # brks and var_limits are defined + bar_limits <- c(head(brks, 1), tail(brks, 1)) + } + } else { + # brks and bar_limits are defined + # or + # brks, bar_limits and var_limits are defined + if (head(brks, 1) != bar_limits[1] || tail(brks, 1) != bar_limits[2]) { + stop("Parameters 'brks' and 'bar_limits' are inconsistent.") + } + } + + # Check col_inf + if (!is.null(col_inf)) { + if (!.IsColor(col_inf)) { + stop("Parameter 'col_inf' must be a valid colour identifier.") + } + } + + # Check col_sup + if (!is.null(col_sup)) { + if (!.IsColor(col_sup)) { + stop("Parameter 'col_sup' must be a valid colour identifier.") + } + } + + # Check triangle_ends + if (!is.null(triangle_ends) && (!is.logical(triangle_ends) || length(triangle_ends) != 2)) { + stop("Parameter 'triangle_ends' must be a logical vector with two elements.") + } + teflc <- triangle_ends_from_limit_cols <- c(!is.null(col_inf), !is.null(col_sup)) + if (is.null(triangle_ends) && is.null(col_inf) && is.null(col_sup)) { + triangle_ends <- c(FALSE, FALSE) + if (bar_limits[1] >= var_limits[1]) { + triangle_ends[1] <- TRUE + } + if (bar_limits[2] < var_limits[2]) { + triangle_ends[2] <- TRUE + } + } else if (!is.null(triangle_ends) && is.null(col_inf) && is.null(col_sup)) { + triangle_ends <- triangle_ends + } else if (is.null(triangle_ends) && (!is.null(col_inf) || !is.null(col_sup))) { + triangle_ends <- teflc + } else if (any(teflc != triangle_ends)) { + if (!is.null(brks) && length(brks) > 1 && !is.null(cols) && length(cols) >= length(brks)) { + triangle_ends <- teflc + } else if (!is.null(cols)) { + triangle_ends <- teflc + } else { + triangle_ends <- triangle_ends + } + } + if (plot && !is.null(var_limits)) { + if ((bar_limits[1] >= var_limits[1]) && !triangle_ends[1]) { + .warning("There are variable values smaller or equal to the lower limit ", + "of the colour bar and the lower triangle end has been ", + "disabled. These will be painted in the colour for NA values.") + } + if ((bar_limits[2] < var_limits[2]) && !triangle_ends[2]) { + .warning("There are variable values greater than the higher limit ", + "of the colour bar and the higher triangle end has been ", + "disabled. These will be painted in the colour for NA values.") + } + } + + # Generate colours if needed + if (is.null(cols)) { + cols <- color_fun(length(brks) - 1 + sum(triangle_ends)) + attr_bk <- attributes(cols) + if (triangle_ends[1]) { + if (is.null(col_inf)) col_inf <- head(cols, 1) + cols <- cols[-1] + } + if (triangle_ends[2]) { + if (is.null(col_sup)) col_sup <- tail(cols, 1) + cols <- cols[-length(cols)] + } + attributes(cols) <- attr_bk + } else if ((length(cols) != (length(brks) - 1))) { + stop("Incorrect number of 'brks' and 'cols'. There must be one more break than the number of colours.") + } + + # Check vertical + if (!is.logical(vertical)) { + stop("Parameter 'vertical' must be TRUE or FALSE.") + } + + # Check extra_labels + if (is.null(extra_labels)) { + extra_labels <- numeric(0) + } + if (!is.numeric(extra_labels)) { + stop("Parameter 'extra_labels' must be numeric.") + } else { + if (any(extra_labels > bar_limits[2]) || any(extra_labels < bar_limits[1])) { + stop("Parameter 'extra_labels' must not contain ticks beyond the color bar limits.") + } + } + extra_labels <- sort(extra_labels) + + # Check subsampleg + primes <- function(x) { + # Courtesy of Chase. See http://stackoverflow.com/questions/6424856/r-function-for-returning-all-factors + x <- as.integer(x) + div <- seq_len(abs(x)) + factors <- div[x %% div == 0L] + factors <- list(neg = -factors, pos = factors) + return(factors) + } + remove_final_tick <- FALSE + added_final_tick <- TRUE + if (is.null(subsampleg)) { + subsampleg <- 1 + while (length(brks) / subsampleg > 15 - 1) { + next_factor <- primes((length(brks) - 1) / subsampleg)$pos + next_factor <- next_factor[length(next_factor) - ifelse(length(next_factor) > 2, 1, 0)] + subsampleg <- subsampleg * next_factor + } + if (subsampleg > (length(brks) - 1) / 4) { + subsampleg <- max(1, round(length(brks) / 4)) + extra_labels <- c(extra_labels, bar_limits[2]) + added_final_tick <- TRUE + if ((length(brks) - 1) %% subsampleg < (length(brks) - 1) / 4 / 2) { + remove_final_tick <- TRUE + } + } + } else if (!is.numeric(subsampleg)) { + stop("Parameter 'subsampleg' must be numeric.") + } + subsampleg <- round(subsampleg) + draw_labels <- TRUE + if ((subsampleg) < 1) { + draw_labels <- FALSE + } + + # Check plot + if (!is.logical(plot)) { + stop("Parameter 'plot' must be logical.") + } + + # Check draw_separators + if (!is.logical(draw_separators)) { + stop("Parameter 'draw_separators' must be logical.") + } + + # Check triangle_ends_scale + if (!is.numeric(triangle_ends_scale)) { + stop("Parameter 'triangle_ends_scale' must be numeric.") + } + + # Check draw_ticks + if (!is.logical(draw_ticks)) { + stop("Parameter 'draw_ticks' must be logical.") + } + + # Check title + if (is.null(title)) { + title <- '' + } + if (!is.character(title)) { + stop("Parameter 'title' must be a character string.") + } + + # Check title_scale + if (!is.numeric(title_scale)) { + stop("Parameter 'title_scale' must be numeric.") + } + + # Check label_scale + if (!is.numeric(label_scale)) { + stop("Parameter 'label_scale' must be numeric.") + } + + # Check tick_scale + if (!is.numeric(tick_scale)) { + stop("Parameter 'tick_scale' must be numeric.") + } + + # Check extra_margin + if (!is.numeric(extra_margin) || length(extra_margin) != 4) { + stop("Parameter 'extra_margin' must be a numeric vector of length 4.") + } + + # Check label_digits + if (!is.numeric(label_digits)) { + stop("Parameter 'label_digits' must be numeric.") + } + label_digits <- round(label_digits) + + # Process the user graphical parameters that may be passed in the call + ## Graphical parameters to exclude + excludedArgs <- c("cex", "cex.axis", "col", "lab", "las", "mar", "mgp", "new", "ps") + userArgs <- .FilterUserGraphicArgs(excludedArgs, ...) + + # + # Plotting colorbar + # ~~~~~~~~~~~~~~~~~~~ + # + if (plot) { + pars_to_save <- c('mar', 'cex', names(userArgs), 'mai', 'mgp', 'las', 'xpd') + saved_pars <- par(pars_to_save) + par(mar = c(0, 0, 0, 0), cex = 1) + image(1, 1, t(t(1)), col = rgb(0, 0, 0, 0), axes = FALSE, xlab = '', ylab = '') + # Get the availale space + figure_size <- par('fin') + cs <- par('csi') + # This allows us to assume we always want to plot horizontally + if (vertical) { + figure_size <- rev(figure_size) + } +# pannel_to_redraw <- par('mfg') +# .SwitchToFigure(pannel_to_redraw[1], pannel_to_redraw[2]) + # Load the user parameters + par(new = TRUE) + par(userArgs) + # Set up color bar plot region + margins <- c(0.0, 0, 0.0, 0) + cex_title <- 1 * title_scale + cex_labels <- 0.9 * label_scale + cex_ticks <- -0.3 * tick_scale + spaceticklab <- max(-cex_ticks, 0) + if (vertical) { + margins[1] <- margins[1] + (1.2 * cex_labels * 3 + spaceticklab) * cs + margins <- margins + extra_margin[c(4, 1:3)] * cs + } else { + margins[1] <- margins[1] + (1.2 * cex_labels * 1 + spaceticklab) * cs + margins <- margins + extra_margin * cs + } + if (title != '') { + margins[3] <- margins[3] + (1.0 * cex_title) * cs + } + margins[3] <- margins[3] + sqrt(figure_size[2] / (margins[1] + margins[3])) * + figure_size[2] / 6 * ifelse(title != '', 0.5, 0.8) + # Set side margins + margins[2] <- margins[2] + figure_size[1] / 16 + margins[4] <- margins[4] + figure_size[1] / 16 + triangle_ends_prop <- 1 / 32 * triangle_ends_scale + triangle_ends_cex <- triangle_ends_prop * figure_size[2] + if (triangle_ends[1]) { + margins[2] <- margins[2] + triangle_ends_cex + } + if (triangle_ends[2]) { + margins[4] <- margins[4] + triangle_ends_cex + } + ncols <- length(cols) + # Set up the points of triangles + # Compute the proportion of horiz. space occupied by one plot unit + prop_unit <- (1 - (margins[2] + margins[4]) / figure_size[1]) / ncols + # Convert triangle height to plot inits + triangle_height <- triangle_ends_prop / prop_unit + left_triangle <- list(x = c(1, 1 - triangle_height, 1) - 0.5, + y = c(1.4, 1, 0.6)) + right_triangle <- list(x = c(ncols, ncols + triangle_height, ncols) + 0.5, + y = c(1.4, 1, 0.6)) + # Draw the color squares and title + if (vertical) { + par(mai = c(margins[2:4], margins[1]), + mgp = c(0, spaceticklab + 0.2, 0), las = 1) + d <- 4 + image(1, 1:ncols, t(1:ncols), axes = FALSE, col = cols, + xlab = '', ylab = '') + title(ylab = title, line = cex_title * (0.2 + 0.1), cex.lab = cex_title) + # Draw top and bottom border lines + lines(c(0.6, 0.6), c(1 - 0.5, ncols + 0.5)) + lines(c(1.4, 1.4), c(1 - 0.5, ncols + 0.5)) + # Rotate triangles + names(left_triangle) <- rev(names(left_triangle)) + names(right_triangle) <- rev(names(right_triangle)) + } else { + # The term - cex_labels / 4 * (3 / cex_labels - 1) was found by + # try and error + par(mai = margins, + mgp = c(0, cex_labels / 2 + spaceticklab + - cex_labels / 4 * (3 / cex_labels - 1), 0), + las = 1) + d <- 1 + image(1:ncols, 1, t(t(1:ncols)), axes = FALSE, col = cols, + xlab = '', ylab = '') + title(title, line = cex_title * (0.2 + 0.1), cex.main = cex_title) + # Draw top and bottom border lines + lines(c(1 - 0.5, ncols + 0.5), c(0.6, 0.6)) + lines(c(1 - 0.5, ncols + 0.5), c(1.4, 1.4)) + tick_length <- -0.4 + } + # Draw the triangles + par(xpd = TRUE) + if (triangle_ends[1]) { + # Draw left triangle + polygon(left_triangle$x, left_triangle$y, col = col_inf, border = NA) + lines(left_triangle$x, left_triangle$y) + } + if (triangle_ends[2]) { + # Draw right triangle + polygon(right_triangle$x, right_triangle$y, col = col_sup, border = NA) + lines(right_triangle$x, right_triangle$y) + } + par(xpd = FALSE) + + # Put the separators + if (vertical) { + if (draw_separators) { + for (i in 1:(ncols - 1)) { + lines(c(0.6, 1.4), c(i, i) + 0.5) + } + } + if (draw_separators || is.null(col_inf)) { + lines(c(0.6, 1.4), c(0.5, 0.5)) + } + if (draw_separators || is.null(col_sup)) { + lines(c(0.6, 1.4), c(ncols + 0.5, ncols + 0.5)) + } + } else { + if (draw_separators) { + for (i in 1:(ncols - 1)) { + lines(c(i, i) + 0.5, c(0.6, 1.4)) + } + } + if (draw_separators || is.null(col_inf)) { + lines(c(0.5, 0.5), c(0.6, 1.4)) + } + if (draw_separators || is.null(col_sup)) { + lines(c(ncols + 0.5, ncols + 0.5), c(0.6, 1.4)) + } + } + # Put the ticks + plot_range <- length(brks) - 1 + var_range <- tail(brks, 1) - head(brks, 1) + extra_labels_at <- ((extra_labels - head(brks, 1)) / var_range) * plot_range + 0.5 + at <- seq(1, length(brks), subsampleg) + labels <- brks[at] + # Getting rid of next-to-last tick if too close to last one + if (remove_final_tick) { + at <- at[-length(at)] + labels <- labels[-length(labels)] + } + labels <- signif(labels, label_digits) + if (added_final_tick) { + extra_labels[length(extra_labels)] <- signif(tail(extra_labels, 1), label_digits) + } + at <- at - 0.5 + at <- c(at, extra_labels_at) + labels <- c(labels, extra_labels) + tick_reorder <- sort(at, index.return = TRUE) + at <- tick_reorder$x + if (draw_labels) { + labels <- labels[tick_reorder$ix] + } else { + labels <- FALSE + } + axis(d, at = at, tick = draw_ticks, labels = labels, cex.axis = cex_labels, tcl = cex_ticks) + par(saved_pars) + } + invisible(list(brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup)) +} + diff --git a/modules/Visualization/R/tmp/GradientCatsColorBar.R b/modules/Visualization/R/tmp/GradientCatsColorBar.R index 4cdeb0af..00c89b1d 100644 --- a/modules/Visualization/R/tmp/GradientCatsColorBar.R +++ b/modules/Visualization/R/tmp/GradientCatsColorBar.R @@ -1,28 +1,45 @@ -#Draws Color Bars for Categories -#A wrapper of s2dv::ColorBar to generate multiple color bars for different -#categories, and each category has different color set. GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE, subsampleg = NULL, bar_limits, var_limits = NULL, - triangle_ends = NULL, plot = TRUE, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, plot = TRUE, draw_separators = FALSE, bar_titles = NULL, title_scale = 1, label_scale = 1, extra_margin = rep(0, 4), ...) { - # bar_limits - if (!is.numeric(bar_limits) || length(bar_limits) != 2) { - stop("Parameter 'bar_limits' must be a numeric vector of length 2.") - } + # bar_limits: a vector of 2 or a list + if (!is.list(bar_limits)) { + if (!is.numeric(bar_limits) || length(bar_limits) != 2) { + stop("Parameter 'bar_limits' must be a numeric vector of length 2 or a list containing that.") + } + # turn into list + bar_limits <- rep(list(bar_limits), nmap) + } else { + if (any(!sapply(bar_limits, is.numeric)) || any(sapply(bar_limits, length) != 2)) { + stop("Parameter 'bar_limits' must be a numeric vector of length 2 or a list containing that.") + } + if (length(bar_limits) != nmap) { + stop("Parameter 'bar_limits' must have the length of 'nmap'.") + } + } # Check brks - if (is.null(brks) || (is.numeric(brks) && length(brks) == 1)) { - num_brks <- 5 - if (is.numeric(brks)) { - num_brks <- brks + if (!is.list(brks)) { + if (is.null(brks)) { + brks <- 5 + } else if (!is.numeric(brks)) { + stop("Parameter 'brks' must be a numeric vector.") + } + # Turn it into list + brks <- rep(list(brks), nmap) + } else { + if (length(brks) != nmap) { + stop("Parameter 'brks' must have the length of 'nmap'.") } - brks <- seq(from = bar_limits[1], to = bar_limits[2], length.out = num_brks) } - if (!is.numeric(brks)) { - stop("Parameter 'brks' must be a numeric vector.") + for (i_map in 1:nmap) { + if (length(brks[[i_map]]) == 1) { + brks[[i_map]] <- seq(from = bar_limits[[i_map]][1], to = bar_limits[[i_map]][2], length.out = brks[[i_map]]) + } } + # Check cols col_sets <- list(c("#A1D99B", "#74C476", "#41AB5D", "#238B45"), c("#6BAED6FF", "#4292C6FF", "#2171B5FF", "#08519CFF"), @@ -37,6 +54,44 @@ GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE chosen_sets <- array(1:length(col_sets), nmap) } cols <- col_sets[chosen_sets] + + # Set triangle_ends, col_sup, col_inf + #NOTE: The "col" input of ColorBar() later is not NULL (since we determine it here) + # so ColorBar() cannot decide these parameters for us. + #NOTE: Here, col_inf and col_sup are prior to triangle_ends, which is consistent with ColorBar(). + #TODO: Make triangle_ends a list + if (is.null(triangle_ends)) { + if (!is.null(var_limits)) { + triangle_ends <- c(FALSE, FALSE) + #TODO: bar_limits is a list + if (bar_limits[1] >= var_limits[1] | !is.null(col_inf)) { + triangle_ends[1] <- TRUE + if (is.null(col_inf)) { + col_inf <- lapply(cols, head, 1) + cols <- lapply(cols, '[', -1) + } + } + if (bar_limits[2] < var_limits[2] | !is.null(col_sup)) { + triangle_ends[2] <- TRUE + if (is.null(col_sup)) { + col_sup <- lapply(cols, tail, 1) + cols <- lapply(cols, '[', -length(cols[[1]])) + } + } + } else { + triangle_ends <- c(!is.null(col_inf), !is.null(col_sup)) + } + } else { # triangle_ends has values + if (triangle_ends[1] & is.null(col_inf)) { + col_inf <- lapply(cols, head, 1) + cols <- lapply(cols, '[', -1) + } + if (triangle_ends[2] & is.null(col_sup)) { + col_sup <- lapply(cols, tail, 1) + cols <- lapply(cols, '[', -length(cols[[1]])) + } + } + } else { if (!is.list(cols)) { stop("Parameter 'cols' must be a list of character vectors.") @@ -45,13 +100,12 @@ GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE stop("Parameter 'cols' must be a list of character vectors.") } if (length(cols) != nmap) { - stop("Parameter 'cols' must be a list of the same length as the number of ", - "maps in 'maps'.") + stop("Parameter 'cols' must be a list of the same length as 'nmap'.") } } - for (i in 1:length(cols)) { - if (length(cols[[i]]) != (length(brks) - 1)) { - cols[[i]] <- grDevices::colorRampPalette(cols[[i]])(length(brks) - 1) + for (i_map in 1:length(cols)) { + if (length(cols[[i_map]]) != (length(brks[[i_map]]) - 1)) { + cols[[i_map]] <- grDevices::colorRampPalette(cols[[i_map]])(length(brks[[i_map]]) - 1) } } @@ -68,23 +122,17 @@ GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE } if (plot) { -brks <- brks[-length(brks)] for (k in 1:nmap) { -triangle_ends <- c(F,T) -col_sup <- cols[[k]][length(cols[[k]])] -cols[[k]] <- cols[[k]][-length(cols[[k]])] - s2dv::ColorBar(brks = brks, cols = cols[[k]], vertical = FALSE, - subsampleg = subsampleg, -# bar_limits = bar_limits, var_limits = var_limits, - triangle_ends = triangle_ends, plot = TRUE, - col_sup = col_sup, +#TODO: Add s2dv:: + ColorBar(brks = brks[[k]], cols = cols[[k]], vertical = FALSE, subsampleg = subsampleg, + bar_limits = bar_limits[[k]], #var_limits = var_limits, + triangle_ends = triangle_ends, col_inf = col_inf[[k]], col_sup = col_sup[[k]], plot = TRUE, draw_separators = draw_separators, title = bar_titles[[k]], title_scale = title_scale, label_scale = label_scale, extra_margin = extra_margin) } } else { - #TODO: col_inf and col_sup - return(list(brks = brks, cols = cols)) + return(list(brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup)) } } diff --git a/modules/Visualization/R/tmp/PlotCombinedMap.R b/modules/Visualization/R/tmp/PlotCombinedMap.R index 9c7a6ede..e4d6d7f3 100644 --- a/modules/Visualization/R/tmp/PlotCombinedMap.R +++ b/modules/Visualization/R/tmp/PlotCombinedMap.R @@ -78,6 +78,12 @@ #' 'down', 'd', 'D', 'bottom', 'b', 'B', 'south', 's', 'S' (default)\cr #' 'right', 'r', 'R', 'east', 'e', 'E'\cr #' 'left', 'l', 'L', 'west', 'w', 'W' +#'@param return_leg A logical value indicating if the color bars information +#' should be returned by the function. If TRUE, the function doesn't plot the +#' color bars but still creates the layout with color bar areas, and the +#' arguments for GradientCatsColorBar() or ColorBar() will be returned. It is +#' convenient for users to adjust the color bars manually. The default is +#' FALSE, the color bars will be plotted directly. #'@param ... Additional parameters to be passed on to \code{PlotEquiMap}. #' #'@examples @@ -121,16 +127,18 @@ PlotCombinedMap <- function(maps, lon, lat, map_select_fun, display_range, map_dim = 'map', brks = NULL, cols = NULL, + bar_limits = NULL, triangle_ends = c(F, F), col_inf = NULL, col_sup = NULL, col_unknown_map = 'white', mask = NULL, col_mask = 'grey', dots = NULL, bar_titles = NULL, legend_scale = 1, cex_bar_titles = 1.5, - plot_margin = NULL, + plot_margin = NULL, bar_extra_margin = c(2, 0, 2, 0), fileout = NULL, width = 8, height = 5, - size_units = 'in', res = 100, drawleg = T, + size_units = 'in', res = 100, drawleg = T, return_leg = FALSE, ...) { args <- list(...) + # If there is any filenames to store the graphics, process them # to select the right device if (!is.null(fileout)) { @@ -254,12 +262,16 @@ PlotCombinedMap <- function(maps, lon, lat, # Generate the desired brks and cols. Only nmap, brks, cols, bar_limits, and # bar_titles matter here because plot = F. - colorbar <- GradientCatsColorBar(nmap = dim(maps)[map_dim], + var_limits_maps <- range(maps, na.rm = TRUE) + if (is.null(bar_limits)) bar_limits <- display_range + nmap <- dim(maps)[map_dim] + colorbar <- GradientCatsColorBar(nmap = nmap, brks = brks, cols = cols, vertical = FALSE, - subsampleg = NULL, bar_limits = display_range, var_limits = NULL, - triangle_ends = NULL, plot = FALSE, draw_separators = TRUE, + subsampleg = NULL, bar_limits = bar_limits, + var_limits = var_limits_maps, + triangle_ends = triangle_ends, col_inf = col_inf, col_sup = col_sup, plot = FALSE, draw_separators = TRUE, bar_titles = bar_titles, title_scale = cex_bar_titles, label_scale = legend_scale * 1.5, - extra_margin = c(2, 0, 2, 0)) + extra_margin = bar_extra_margin) # Check legend_scale if (!is.numeric(legend_scale)) { @@ -303,9 +315,52 @@ PlotCombinedMap <- function(maps, lon, lat, #---------------------- # Identify the most likely map #---------------------- - brks_norm <- seq(0, 1, length.out = length(colorbar$brks)) - if (is.function(map_select_fun)) { - range_width <- display_range[2] - display_range[1] + if (!is.null(colorbar$col_sup[[1]])) { + + brks_norm <- vector('list', length = nmap) + range_width <- vector('list', length = nmap) + slightly_tune_val <- vector('list', length = nmap) + for (ii in 1:nmap) { + brks_norm[[ii]] <- seq(0, 1, length.out = length(colorbar$brks[[ii]]) + 1) # add one break for col_sup + slightly_tune_val[[ii]] <- brks_norm[[ii]][2] / (length(brks_norm[[ii]]) * 2) + range_width[[ii]] <- diff(range(colorbar$brks[[ii]])) + } + ml_map <- apply(maps, c(lat_dim, lon_dim), function(x) { + if (any(is.na(x))) { + res <- NA + } else { + res <- which(x == map_select_fun(x)) + if (length(res) > 0) { + res <- res_ind <- res[1] + if (map_select_fun(x) < display_range[1] || map_select_fun(x) > display_range[2]) { + res <- -0.5 + } else { + if (map_select_fun(x) > tail(colorbar$brks[[res_ind]], 1)) { # col_sup + res <- res + 1 - slightly_tune_val[[res_ind]] + } else { + res <- res + ((map_select_fun(x) - colorbar$brks[[res_ind]][1]) / + range_width[[res_ind]] * ((length(brks_norm[[res_ind]]) - 2) / (length(brks_norm[[res_ind]]) - 1))) + if (map_select_fun(x) == colorbar$brks[[res_ind]][1]) { + res <- res + slightly_tune_val[[res_ind]] + } + } + } + } else { + res <- -0.5 + } + } + res + }) + + } else { + + brks_norm <- vector('list', length = nmap) + range_width <- display_range[2] - display_range[1] #vector('list', length = nmap) + slightly_tune_val <- vector('list', length = nmap) + for (ii in 1:nmap) { + brks_norm[[ii]] <- seq(0, 1, length.out = length(colorbar$brks[[ii]])) + slightly_tune_val[[ii]] <- brks_norm[[ii]][2] / (length(brks_norm[[ii]]) * 2) + } ml_map <- apply(maps, c(lat_dim, lon_dim), function(x) { if (any(is.na(x))) { res <- NA @@ -313,13 +368,13 @@ PlotCombinedMap <- function(maps, lon, lat, res <- which(x == map_select_fun(x)) if (length(res) > 0) { res <- res[1] - if (map_select_fun(x) < display_range[1] || - map_select_fun(x) > display_range[2]) { + if (map_select_fun(x) < display_range[1] || + map_select_fun(x) > display_range[2]) { res <- -0.5 } else { res <- res + (map_select_fun(x) - display_range[1]) / range_width if (map_select_fun(x) == display_range[1]) { - res <- res + brks_norm[2] / (length(brks_norm) * 2) + res <- res + slightly_tune_val } } } else { @@ -328,18 +383,15 @@ PlotCombinedMap <- function(maps, lon, lat, } res }) - } else { - stop("Providing 'map_select_fun' as array not implemented yet.") - ml_map <- map_select_fun } - nmap <- dim(maps)[map_dim] + nlat <- length(lat) nlon <- length(lon) #---------------------- # Set latitudes from minimum to maximum #---------------------- - if (lat[1] > lat[nlat]){ + if (lat[1] > lat[nlat]) { lat <- lat[nlat:1] indices <- list(nlat:1, TRUE) ml_map <- do.call("[", c(list(x = ml_map), indices)) @@ -372,11 +424,21 @@ PlotCombinedMap <- function(maps, lon, lat, #---------------------- # Set colors and breaks and then PlotEquiMap #---------------------- - tcols <- c(col_unknown_map, colorbar$cols[[1]]) - for (k in 2:nmap) { - tcols <- append(tcols, c(col_unknown_map, colorbar$cols[[k]])) + if (!is.null(colorbar$col_sup[[1]])) { + tcols <- c(col_unknown_map, colorbar$cols[[1]], colorbar$col_sup[[1]]) + tbrks <- c(-1, brks_norm[[1]] + rep(1, each = length(brks_norm[[1]]))) + for (k in 2:nmap) { + tcols <- append(tcols, c(col_unknown_map, colorbar$cols[[k]], colorbar$col_sup[[k]])) + tbrks <- c(tbrks, brks_norm[[k]] + rep(k, each = length(brks_norm[[k]]))) + } + } else { # original code + tcols <- c(col_unknown_map, colorbar$cols[[1]]) + tbrks <- c(-1, brks_norm[[1]] + rep(1, each = length(brks_norm[[1]]))) + for (k in 2:nmap) { + tcols <- append(tcols, c(col_unknown_map, colorbar$cols[[k]])) + tbrks <- c(tbrks, brks_norm[[k]] + rep(k, each = length(brks_norm[[k]]))) + } } - tbrks <- c(-1, brks_norm + rep(1:nmap, each = length(brks_norm))) if (is.null(plot_margin)) { plot_margin <- c(5, 4, 4, 2) + 0.1 # default of par()$mar @@ -429,17 +491,35 @@ PlotCombinedMap <- function(maps, lon, lat, par(mar = old_mar) } - if (drawleg) { - GradientCatsColorBar(nmap = dim(maps)[map_dim], + if (drawleg & !return_leg) { + GradientCatsColorBar(nmap = dim(maps)[map_dim], brks = colorbar$brks, cols = colorbar$cols, vertical = FALSE, - subsampleg = NULL, bar_limits = display_range, var_limits = NULL, - triangle_ends = NULL, plot = TRUE, draw_separators = TRUE, + subsampleg = NULL, bar_limits = bar_limits, + var_limits = var_limits_maps, + triangle_ends = triangle_ends, col_inf = colorbar$col_inf, col_sup = colorbar$col_sup, + plot = TRUE, draw_separators = TRUE, bar_titles = bar_titles, title_scale = cex_bar_titles, label_scale = legend_scale * 1.5, - extra_margin = c(2, 0, 1.5, 0)) + extra_margin = bar_extra_margin) } - - # If the graphic was saved to file, close the connection with the device - if (!is.null(fileout)) dev.off() + + if (!return_leg) { + # If the graphic was saved to file, close the connection with the device + if (!is.null(fileout)) dev.off() + } + + if (return_leg) { + tmp <- list(nmap = dim(maps)[map_dim], + brks = colorbar$brks, cols = colorbar$cols, vertical = FALSE, + subsampleg = NULL, bar_limits = bar_limits, + var_limits = var_limits_maps, + triangle_ends = triangle_ends, col_inf = colorbar$col_inf, col_sup = colorbar$col_sup, + plot = TRUE, draw_separators = TRUE, + bar_titles = bar_titles, title_scale = cex_bar_titles, label_scale = legend_scale * 1.5, + extra_margin = bar_extra_margin) + return(tmp) + #NOTE: The device is not off! Can keep plotting the color bars. + } + } diff --git a/modules/Visualization/R/tmp/PlotEquiMap.R b/modules/Visualization/R/tmp/PlotEquiMap.R new file mode 100644 index 00000000..78025a02 --- /dev/null +++ b/modules/Visualization/R/tmp/PlotEquiMap.R @@ -0,0 +1,1267 @@ +#'Maps A Two-Dimensional Variable On A Cylindrical Equidistant Projection +#' +#'Map longitude-latitude array (on a regular rectangular or gaussian grid) +#'on a cylindrical equidistant latitude and longitude projection with coloured +#'grid cells. Only the region for which data has been provided is displayed. +#'A colour bar (legend) can be plotted and adjusted. It is possible to draw +#'superimposed arrows, dots, symbols, contour lines and boxes. A number of +#'options is provided to adjust the position, size and colour of the +#'components. Some parameters are provided to add and adjust the masks that +#'include continents, oceans, and lakes. This plot function is compatible with +#'figure layouts if colour bar is disabled. +#' +#'@param var Array with the values at each cell of a grid on a regular +#' rectangular or gaussian grid. The array is expected to have two +#' dimensions: c(latitude, longitude). Longitudes can be in ascending or +#' descending order and latitudes in any order. It can contain NA values +#' (coloured with 'colNA'). Arrays with dimensions c(longitude, latitude) +#' will also be accepted but 'lon' and 'lat' will be used to disambiguate so +#' this alternative is not appropriate for square arrays. It is allowed that +#' the positions of the longitudinal and latitudinal coordinate dimensions +#' are interchanged. +#'@param lon Numeric vector of longitude locations of the cell centers of the +#' grid of 'var', in ascending or descending order (same as 'var'). Expected +#' to be regularly spaced, within either of the ranges [-180, 180] or +#' [0, 360]. Data for two adjacent regions split by the limits of the +#' longitude range can also be provided, e.g. \code{lon = c(0:50, 300:360)} +#' ('var' must be provided consitently). +#'@param lat Numeric vector of latitude locations of the cell centers of the +#' grid of 'var', in any order (same as 'var'). Expected to be from a regular +#' rectangular or gaussian grid, within the range [-90, 90]. +#'@param varu Array of the zonal component of wind/current/other field with +#' the same dimensions as 'var'. It is allowed that the positions of the +#' longitudinal and latitudinal coordinate dimensions are interchanged. +#'@param varv Array of the meridional component of wind/current/other field +#' with the same dimensions as 'var'. It is allowed that the positions of the +#' longitudinal and latitudinal coordinate dimensions are interchanged. +#'@param toptitle Top title of the figure, scalable with parameter +#' 'title_scale'. +#'@param sizetit Scale factor for the figure top title provided in parameter +#' 'toptitle'. Deprecated. Use 'title_scale' instead. +#'@param units Title at the top of the colour bar, most commonly the units of +#' the variable provided in parameter 'var'. +#'@param brks,cols,bar_limits,triangle_ends Usually only providing 'brks' is +#' enough to generate the desired colour bar. These parameters allow to +#' define n breaks that define n - 1 intervals to classify each of the values +#' in 'var'. The corresponding grid cell of a given value in 'var' will be +#' coloured in function of the interval it belongs to. These parameters are +#' sent to \code{ColorBar()} to generate the breaks and colours. Additional +#' colours for values beyond the limits of the colour bar are also generated +#' and applied to the plot if 'bar_limits' or 'brks' and 'triangle_ends' are +#' properly provided to do so. See ?ColorBar for a full explanation. +#'@param col_inf,col_sup,colNA Colour identifiers to colour the values in +#' 'var' that go beyond the extremes of the colour bar and to colour NA +#' values, respectively. 'colNA' takes attr(cols, 'na_color') if available by +#' default, where cols is the parameter 'cols' if provided or the vector of +#' colors returned by 'color_fun'. If not available, it takes 'pink' by +#' default. 'col_inf' and 'col_sup' will take the value of 'colNA' if not +#' specified. See ?ColorBar for a full explanation on 'col_inf' and 'col_sup'. +#'@param color_fun,subsampleg,bar_extra_labels,draw_bar_ticks Set of +#' parameters to control the visual aspect of the drawn colour bar +#' (1/3). See ?ColorBar for a full explanation. +#'@param draw_separators,triangle_ends_scale,bar_label_digits Set of +#' parameters to control the visual aspect of the drawn colour bar +#' (2/3). See ?ColorBar for a full explanation. +#'@param bar_label_scale,units_scale,bar_tick_scale,bar_extra_margin Set of +#' parameters to control the visual aspect of the drawn colour bar (3/3). +#' See ?ColorBar for a full explanation. +#'@param square Logical value to choose either to draw a coloured square for +#' each grid cell in 'var' (TRUE; default) or to draw contour lines and fill +#' the spaces in between with colours (FALSE). In the latter case, +#' 'filled.continents' will take the value FALSE if not specified. +#'@param filled.continents Colour to fill in drawn projected continents. +#' Takes the value gray(0.5) by default or, if 'square = FALSE', takes the +#' value FALSE. If set to FALSE, continents are not filled in. +#'@param filled.oceans A logical value or the color name to fill in drawn +#' projected oceans. The default value is FALSE. If it is TRUE, the default +#' colour is "light blue". +#'@param country.borders A logical value indicating if the country borders +#' should be plotted (TRUE) or not (FALSE). It only works when +#' 'filled.continents' is FALSE. The default value is FALSE. +#'@param coast_color Colour of the coast line of the drawn projected continents. +#' Takes the value gray(0.5) by default. +#'@param coast_width Line width of the coast line of the drawn projected +#' continents. Takes the value 1 by default. +#'@param lake_color Colour of the lake or other water body inside continents. +#' The default value is NULL. +#'@param shapefile A character string of the path to a .rds file or a list +#' object containinig shape file data. If it is a .rds file, it should contain +#' a list. The list should contains 'x' and 'y' at least, which indicate the +#' location of the shape. The default value is NULL. +#'@param shapefile_color Line color of the shapefile. +#'@param shapefile_lwd Line width of the shapefile. The default value is 1. +#'@param contours Array of same dimensions as 'var' to be added to the plot +#' and displayed with contours. Parameter 'brks2' is required to define the +#' magnitude breaks for each contour curve. Disregarded if 'square = FALSE'. +#' It is allowed that the positions of the longitudinal and latitudinal +#' coordinate dimensions are interchanged. +#'@param brks2 Vector of magnitude breaks where to draw contour curves for the +#' array provided in 'contours' or if 'square = FALSE'. +#'@param contour_lwd Line width of the contour curves provided via 'contours' +#' and 'brks2', or if 'square = FALSE'. +#'@param contour_color Line color of the contour curves provided via 'contours' +#' and 'brks2', or if 'square = FALSE'. +#'@param contour_lty Line type of the contour curves. Takes 1 (solid) by +#' default. See help on 'lty' in par() for other accepted values. +#'@param contour_draw_label A logical value indicating whether to draw the +#' contour labels or not. The default value is TRUE. +#'@param contour_label_scale Scale factor for the superimposed labels when +#' drawing contour levels. +#'@param dots Array of same dimensions as 'var' or with dimensions +#' c(n, dim(var)), where n is the number of dot/symbol layers to add to the +#' plot. A value of TRUE at a grid cell will draw a dot/symbol on the +#' corresponding square of the plot. By default all layers provided in 'dots' +#' are plotted with dots, but a symbol can be specified for each of the +#' layers via the parameter 'dot_symbol'. It is allowed that the positions of +#' the longitudinal and latitudinal coordinate dimensions are interchanged. +#'@param dot_symbol Single character/number or vector of characters/numbers +#' that correspond to each of the symbol layers specified in parameter 'dots'. +#' If a single value is specified, it will be applied to all the layers in +#' 'dots'. Takes 15 (centered square) by default. See 'pch' in par() for +#' additional accepted options. +#'@param dot_size Scale factor for the dots/symbols to be plotted, specified +#' in 'dots'. If a single value is specified, it will be applied to all +#' layers in 'dots'. Takes 1 by default. +#'@param arr_subsamp Subsampling factor to select a subset of arrows in +#' 'varu' and 'varv' to be drawn. Only one out of arr_subsamp arrows will +#' be drawn. Takes 1 by default. +#'@param arr_scale Scale factor for drawn arrows from 'varu' and 'varv'. +#' Takes 1 by default. +#'@param arr_ref_len Length of the refence arrow to be drawn as legend at the +#' bottom of the figure (in same units as 'varu' and 'varv', only affects the +#' legend for the wind or variable in these arrays). Defaults to 15. +#'@param arr_units Units of 'varu' and 'varv', to be drawn in the legend. +#' Takes 'm/s' by default. +#'@param arr_scale_shaft Parameter for the scale of the shaft of the arrows +#' (which also depend on the number of figures and the arr_scale parameter). +#' Defaults to 1. +#'@param arr_scale_shaft_angle Parameter for the scale of the angle of the +#' shaft of the arrows (which also depend on the number of figure and the +#' arr_scale parameter). Defaults to 1. +#'@param axelab Whether to draw longitude and latitude axes or not. +#' TRUE by default. +#'@param labW Whether to label the longitude axis with a 'W' instead of minus +#' for negative values. Defaults to FALSE. +#'@param lab_dist_x A numeric of the distance of the longitude labels to the +#' box borders. The default value is NULL and is automatically adjusted by +#' the function. +#'@param lab_dist_y A numeric of the distance of the latitude labels to the +#' box borders. The default value is NULL and is automatically adjusted by +#' the function. +#'@param degree_sym A logical indicating whether to include degree symbol +#' (30° N) or not (30N; default). +#'@param intylat Interval between latitude ticks on y-axis, in degrees. +#' Defaults to 20. +#'@param intxlon Interval between latitude ticks on x-axis, in degrees. +#' Defaults to 20. +#'@param xlonshft A numeric of the degrees to shift the latitude ticks. The +#' default value is 0. +#'@param ylatshft A numeric of the degrees to shift the longitude ticks. The +#' default value is 0. +#'@param xlabels A vector of character string of the custumized x-axis labels. +#' The values should correspond to each tick, which is decided by the longitude +#' and parameter 'intxlon'. The default value is NULL and the labels will be +#' automatically generated. +#'@param ylabels A vector of character string of the custumized y-axis labels. +#' The values should correspond to each tick, which is decided by the latitude +#' and parameter 'intylat'. The default value is NULL and the labels will be +#' automatically generated. +#'@param axes_tick_scale Scale factor for the tick lines along the longitude +#' and latitude axes. +#'@param axes_label_scale Scale factor for the labels along the longitude +#' and latitude axes. +#'@param drawleg Whether to plot a color bar (legend, key) or not. Defaults to +#' TRUE. It is not possible to plot the colour bar if 'add = TRUE'. Use +#' ColorBar() and the return values of PlotEquiMap() instead. +#'@param boxlim Limits of a box to be added to the plot, in degrees: +#' c(x1, y1, x2, y2). A list with multiple box specifications can also be +#' provided. +#'@param boxcol Colour of the box lines. A vector with a colour for each of +#' the boxes is also accepted. Defaults to 'purple2'. +#'@param boxlwd Line width of the box lines. A vector with a line width for +#' each of the boxes is also accepted. Defaults to 5. +#'@param margin_scale Scale factor for the margins around the map plot, with +#' the format c(y1, x1, y2, x2). Defaults to rep(1, 4). If drawleg = TRUE, +#' then margin_scale[1] is subtracted 1 unit. +#'@param title_scale Scale factor for the figure top title. Defaults to 1. +#'@param numbfig Number of figures in the layout the plot will be put into. +#' A higher numbfig will result in narrower margins and smaller labels, +#' axe labels, ticks, thinner lines, ... Defaults to 1. +#'@param fileout File where to save the plot. If not specified (default) a +#' graphics device will pop up. Extensions allowed: eps/ps, jpeg, png, pdf, +#' bmp and tiff. +#'@param width File width, in the units specified in the parameter size_units +#' (inches by default). Takes 8 by default. +#'@param height File height, in the units specified in the parameter +#' size_units (inches by default). Takes 5 by default. +#'@param size_units Units of the size of the device (file or window) to plot +#' in. Inches ('in') by default. See ?Devices and the creator function of +#' the corresponding device. +#'@param res Resolution of the device (file or window) to plot in. See +#' ?Devices and the creator function of the corresponding device. +#'@param \dots Arguments to be passed to the method. Only accepts the following +#' graphical parameters:\cr +#' adj ann ask bg bty cex.sub cin col.axis col.lab col.main col.sub cra crt +#' csi cxy err family fg font font.axis font.lab font.main font.sub lend +#' lheight ljoin lmitre mex mfcol mfrow mfg mkh omd omi page pch pin plt +#' pty smo srt tcl usr xaxp xaxs xaxt xlog xpd yaxp yaxs yaxt ylbias ylog \cr +#' For more information about the parameters see `par`. +#' +#'@return +#'\item{brks}{ +#' Breaks used for colouring the map (and legend if drawleg = TRUE). +#'} +#'\item{cols}{ +#' Colours used for colouring the map (and legend if drawleg = TRUE). Always +#' of length length(brks) - 1. +#'} +#'\item{col_inf}{ +#' Colour used to draw the lower triangle end in the colour bar (NULL if not +#' drawn at all). +#' } +#'\item{col_sup}{ +#' Colour used to draw the upper triangle end in the colour bar (NULL if not +#' drawn at all). +#'} +#' +#'@examples +#'# See examples on Load() to understand the first lines in this example +#' \dontrun{ +#'data_path <- system.file('sample_data', package = 's2dv') +#'expA <- list(name = 'experiment', path = file.path(data_path, +#' 'model/$EXP_NAME$/$STORE_FREQ$_mean/$VAR_NAME$_3hourly', +#' '$VAR_NAME$_$START_DATE$.nc')) +#'obsX <- list(name = 'observation', path = file.path(data_path, +#' '$OBS_NAME$/$STORE_FREQ$_mean/$VAR_NAME$', +#' '$VAR_NAME$_$YEAR$$MONTH$.nc')) +#' +#'# Now we are ready to use Load(). +#'startDates <- c('19851101', '19901101', '19951101', '20001101', '20051101') +#'sampleData <- Load('tos', list(expA), list(obsX), startDates, +#' leadtimemin = 1, leadtimemax = 4, output = 'lonlat', +#' latmin = 27, latmax = 48, lonmin = -12, lonmax = 40) +#' } +#' \dontshow{ +#'startDates <- c('19851101', '19901101', '19951101', '20001101', '20051101') +#'sampleData <- s2dv:::.LoadSampleData('tos', c('experiment'), +#' c('observation'), startDates, +#' leadtimemin = 1, +#' leadtimemax = 4, +#' output = 'lonlat', +#' latmin = 27, latmax = 48, +#' lonmin = -12, lonmax = 40) +#' } +#'PlotEquiMap(sampleData$mod[1, 1, 1, 1, , ], sampleData$lon, sampleData$lat, +#' toptitle = 'Predicted sea surface temperature for Nov 1960 from 1st Nov', +#' title_scale = 0.5) +#'@import graphics maps +#'@importFrom grDevices dev.cur dev.new dev.off gray +#'@importFrom stats cor +#'@export +PlotEquiMap <- function(var, lon, lat, varu = NULL, varv = NULL, + toptitle = NULL, sizetit = NULL, units = NULL, + brks = NULL, cols = NULL, bar_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, + colNA = NULL, color_fun = clim.palette(), + square = TRUE, filled.continents = NULL, + filled.oceans = FALSE, country.borders = FALSE, + coast_color = NULL, coast_width = 1, lake_color = NULL, + shapefile = NULL, shapefile_color = NULL, shapefile_lwd = 1, + contours = NULL, brks2 = NULL, contour_lwd = 0.5, + contour_color = 'black', contour_lty = 1, + contour_draw_label = TRUE, contour_label_scale = 1, + dots = NULL, dot_symbol = 4, dot_size = 1, + arr_subsamp = floor(length(lon) / 30), arr_scale = 1, + arr_ref_len = 15, arr_units = "m/s", + arr_scale_shaft = 1, arr_scale_shaft_angle = 1, + axelab = TRUE, labW = FALSE, + lab_dist_x = NULL, lab_dist_y = NULL, degree_sym = FALSE, + intylat = 20, intxlon = 20, + xlonshft = 0, ylatshft = 0, xlabels = NULL, ylabels = NULL, + axes_tick_scale = 1, axes_label_scale = 1, + drawleg = TRUE, subsampleg = NULL, + bar_extra_labels = NULL, draw_bar_ticks = TRUE, + draw_separators = FALSE, triangle_ends_scale = 1, + bar_label_digits = 4, bar_label_scale = 1, + units_scale = 1, bar_tick_scale = 1, + bar_extra_margin = rep(0, 4), + boxlim = NULL, boxcol = 'purple2', boxlwd = 5, + margin_scale = rep(1, 4), title_scale = 1, + numbfig = NULL, fileout = NULL, + width = 8, height = 5, size_units = 'in', + res = 100, ...) { + # Process the user graphical parameters that may be passed in the call + ## Graphical parameters to exclude + excludedArgs <- c("cex", "cex.axis", "cex.lab", "cex.main", "col", "din", "fig", "fin", "lab", "las", "lty", "lwd", "mai", "mar", "mgp", "new", "oma", "ps", "tck") + userArgs <- .FilterUserGraphicArgs(excludedArgs, ...) + + # If there is any filenames to store the graphics, process them + # to select the right device + if (!is.null(fileout)) { + deviceInfo <- .SelectDevice(fileout = fileout, width = width, height = height, units = size_units, res = res) + saveToFile <- deviceInfo$fun + fileout <- deviceInfo$files + } + + # Check lon, lat + if (!is.numeric(lon) || !is.numeric(lat)) { + stop("Parameters 'lon' and 'lat' must be numeric vectors.") + } + + # Check var + if (is.null(var)) { + stop("Parameter 'var' cannot be NULL.") + } + if (!is.array(var)) { + stop("Parameter 'var' must be a numeric array.") + } + + transpose <- FALSE + if (!is.null(names(dim(var)))) { + if (any(names(dim(var)) %in% .KnownLonNames()) && + any(names(dim(var)) %in% .KnownLatNames())) { + lon_dim <- names(dim(var))[names(dim(var)) %in% .KnownLonNames()] + lat_dim <- names(dim(var))[names(dim(var)) %in% .KnownLatNames()] + } else { + names(dim(var)) <- NULL + lat_dim <- NULL + lon_dim <- NULL + .warning("Dimension names of 'var' doesn't correspond to any coordinates names supported by s2dv package.") + } + } else { + lon_dim <- NULL + lat_dim <- NULL + .warning("Parameter 'var' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the corresponding coordinates dimensions.") + } + + if (length(dim(var)) > 2) { + if (!is.null(lon_dim) & !is.null(lat_dim)) { + dimnames <- names(dim(var)) + dim(var) <- dim(var)[which((dimnames == lon_dim | dimnames == lat_dim | dim(var) != 1))] + } else { + if (all(dim(var) == 1)) { + dim(var) <- c(1, 1) + } else if (length(dim(var)[which(dim(var) > 1)]) == 2) { + var <- drop(var) + } else if (length(dim(var)[which(dim(var) > 1)]) == 1) { + dim(var) <- c(dim(var)[which(dim(var) > 1)], 1) + } + } + } + + if (length(dim(var)) != 2) { + stop("Parameter 'var' must be a numeric array with two dimensions.") + } + + if ((dim(var)[1] == length(lon) && dim(var)[2] == length(lat)) || + (dim(var)[2] == length(lon) && dim(var)[1] == length(lat))) { + if (dim(var)[2] == length(lon) && dim(var)[1] == length(lat)) { + if (length(lon) == length(lat)) { + if (is.null(names(dim(var)))) { + .warning("Parameter 'var' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the first and second dimensions.") + } else { + if (names(dim(var)[1]) == lat_dim) { + transpose <- TRUE + } + } + } else { + transpose <- TRUE + } + } + } else { + stop("Parameters 'lon' and 'lat' must have as many elements as the number of cells along longitudes and latitudes in the input array 'var'.") + } + + if (!is.null(names(dim(var)))) { + if (names(dim(var)[1]) == lon_dim) { + if (transpose) { + stop("Coordinates dimensions of 'var' doesn't correspond to lat or lon.") + } + } else if (names(dim(var)[2]) == lon_dim) { + if (!transpose) { + stop("Coordinates dimensions of 'var' doesn't correspond to lat or lon.") + } + } + } + + # Transpose the input matrices because the base plot functions work directly + # with dimensions c(lon, lat). + + if (transpose) { + var <- t(var) + } + + transpose <- FALSE + + names(dim(var)) <- c(lon_dim, lat_dim) + dims <- dim(var) + + # Check varu and varv + if (!is.null(varu) && !is.null(varv)) { + if (!is.array(varu) || !(length(dim(varu)) == 2)) { + stop("Parameter 'varu' must be a numerical array with two dimensions.") + } + if (!is.array(varv) || !(length(dim(varv)) == 2)) { + stop("Parameter 'varv' must be a numerical array with two dimensions.") + } + } else if (!is.null(varu) || !is.null(varv)) { + stop("Only one of the components 'varu' or 'varv' has been provided. Both must be provided.") + } + + if (!is.null(varu) && !is.null(varv)) { + if (!all(dim(varu) %in% dim(varv)) || !all(names(dim(varv)) %in% names(dim(varu)))) { + stop("Parameter 'varu' and 'varv' must have equal dimensions and dimension names.") + } else if (any(dim(varu) != dim(varv)) || any(names(dim(varv)) != names(dim(varu)))) { + varv <- t(varv) + names(dim(varv)) <- names(dim(varu)) + } + + if (is.null(lon_dim)) { + names(dim(varu)) <- NULL + names(dim(varv)) <- NULL + } else { + if (!is.null(names(dim(varu)))) { + if (!(lon_dim %in% names(dim(varu)) && lat_dim %in% names(dim(varu)))) { + stop("Parameters 'varu' and 'varv' must have same dimension names as 'var'.") + } else if (dim(varu)[lon_dim] != dim(var)[lon_dim] || dim(varu)[lat_dim] != dim(var)[lat_dim]) { + stop("Parameters 'varu' and 'varv' must have same dimensions as 'var'.") + } + } else { + .warning("Parameters 'varu' and 'varv' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the corresponding coordinates dimensions.") + } + } + + + if ((dim(varu)[1] == dims[1] && dim(varu)[2] == dims[2]) || + (dim(varu)[2] == dims[1] && dim(varu)[1] == dims[2])) { + if (dim(varu)[2] == dims[1] && dim(varu)[1] == dims[2]) { + if (length(lon) == length(lat)) { + if (is.null(names(dim(varu)))) { + .warning("Parameters 'varu' and 'varv' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the first and second dimensions.") + } else { + if (names(dim(varu)[1]) == lat_dim) { + transpose <- TRUE + } + } + } else { + transpose <- TRUE + } + } + } else { + stop("Parameters 'lon' and 'lat' must have as many elements as the number of cells along longitudes and latitudes in the input array 'varu' and 'varv'.") + } + + if (transpose) { + varu <- t(varu) + varv <- t(varv) + } + + transpose <- FALSE + + } + + # Check contours + if (!is.null(contours)) { + if (!is.array(contours) || !(length(dim(contours)) == 2)) { + stop("Parameter 'contours' must be a numerical array with two dimensions.") + } + } + + + if (!is.null(contours)) { + + if (is.null(lon_dim)) { + names(dim(contours)) <- NULL + } else { + if (!is.null(names(dim(contours)))) { + if (!(lon_dim %in% names(dim(contours)) && lat_dim %in% names(dim(contours)))) { + stop("Parameters 'contours' must have same dimension names as 'var'.") + } else if (dim(contours)[lon_dim] != dim(var)[lon_dim] || dim(contours)[lat_dim] != dim(var)[lat_dim]) { + stop("Parameters 'contours' must have same dimensions as 'var'.") + } + } else { + .warning("Parameters 'contours' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the corresponding coordinates dimensions.") + } + } + + + transpose <- FALSE + if ((dim(contours)[1] == dims[1] && dim(contours)[2] == dims[2]) || + (dim(contours)[2] == dims[1] && dim(contours)[1] == dims[2])) { + if (dim(contours)[2] == dims[1] && dim(contours)[1] == dims[2]) { + if (length(lon) == length(lat)) { + if (is.null(names(dim(contours)))) { + .warning("Parameter 'contours' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the first and second dimensions.") + } else { + if (names(dim(contours)[1]) == lat_dim) { + transpose <- TRUE + } + } + } else { + transpose <- TRUE + } + } + } else { + stop("Parameters 'lon' and 'lat' must have as many elements as the number of cells along longitudes and latitudes in the input array 'contours'.") + } + + if (transpose) { + contours <- t(contours) + } + + transpose <- FALSE + + } + + # Check toptitle + if (is.null(toptitle) || is.na(toptitle)) { + toptitle <- '' + } + if (!is.character(toptitle)) { + stop("Parameter 'toptitle' must be a character string.") + } + + # Check sizetit + if (!is.null(sizetit)) { + .warning("Parameter 'sizetit' is obsolete. Use 'title_scale' instead.") + if (!is.numeric(sizetit) || length(sizetit) != 1) { + stop("Parameter 'sizetit' must be a single numeric value.") + } + title_scale <- sizetit + } + + var_limits <- c(min(var, na.rm = TRUE), max(var, na.rm = TRUE)) + # Check: brks, cols, subsampleg, bar_limits, color_fun, bar_extra_labels, draw_bar_ticks + # draw_separators, triangle_ends_scale, label_scale, units, units_scale, + # bar_label_digits + # Build: brks, cols, bar_limits, col_inf, col_sup + colorbar <- ColorBar(brks, cols, FALSE, subsampleg, bar_limits, var_limits, + triangle_ends, col_inf, col_sup, color_fun, FALSE, + extra_labels = bar_extra_labels, draw_ticks = draw_bar_ticks, + draw_separators = draw_separators, + triangle_ends_scale = triangle_ends_scale, + label_scale = bar_label_scale, title = units, + title_scale = units_scale, tick_scale = bar_tick_scale, + extra_margin = bar_extra_margin, label_digits = bar_label_digits) + brks <- colorbar$brks + cols <- colorbar$cols + col_inf <- colorbar$col_inf + col_sup <- colorbar$col_sup + bar_limits <- c(head(brks, 1), tail(brks, 1)) + + # Check colNA + if (is.null(colNA)) { + if ('na_color' %in% names(attributes(cols))) { + colNA <- attr(cols, 'na_color') + if (!.IsColor(colNA)) { + stop("The 'na_color' provided as attribute of the colour vector must be a valid colour identifier.") + } + } else { + colNA <- 'pink' + } + } else if (!.IsColor(colNA)) { + stop("Parameter 'colNA' must be a valid colour identifier.") + } + + # Check square + if (!is.logical(square)) { + stop("Parameter 'square' must be logical.") + } + + # Check filled.continents + if (is.null(filled.continents)) { + if (!square) { + filled.continents <- FALSE + } else { + filled.continents <- TRUE + } + } + if (!.IsColor(filled.continents) && !is.logical(filled.continents)) { + stop("Parameter 'filled.continents' must be logical or a colour identifier.") + } else if (!is.logical(filled.continents)) { + continent_color <- filled.continents + filled.continents <- TRUE + } else { + continent_color <- gray(0.5) + } + + # Check filled.oceans + if (!.IsColor(filled.oceans) & !is.logical(filled.oceans)) { + stop("Parameter 'filled.oceans' must be logical or a colour identifier.") + } else if (!is.logical(filled.oceans)) { + ocean_color <- filled.oceans + filled.oceans <- TRUE + } else if (filled.oceans) { + ocean_color <- "light blue" + } + + # Check country.borders + if (!is.logical(country.borders)) { + stop("Parameter 'country.borders' must be logical.") + } + + # Check coast_color + if (is.null(coast_color)) { + if (filled.continents) { + coast_color <- continent_color + } else { + coast_color <- 'black' + } + } + if (!.IsColor(coast_color)) { + stop("Parameter 'coast_color' must be a valid colour identifier.") + } + + # Check coast_width + if (!is.numeric(coast_width)) { + stop("Parameter 'coast_width' must be numeric.") + } + + # Check lake_color + if (!is.null(lake_color)) { + if (!.IsColor(lake_color)) { + stop("Parameter 'lake_color' must be a valid colour identifier.") + } + } + + # Check shapefile + if (!is.null(shapefile)) { + if (is.list(shapefile)) { + shape <- shapefile + if (any(!c('x', 'y') %in% names(shape))) { + stop("The list names of the object in 'shapefile' .rds file should ", + "have at least 'x' and 'y'.") + } + if (length(shape$x) != length(shape$y)) { + stop("The length of x and y in 'shapefile' list should be equal.") + } + } else if (!is.character(shapefile)) { + stop("Parameter 'shapefile' must be a .rds file or a list.") + } else { # .rds file + if (!file.exists(shapefile)) { + stop("Parameter 'shapefile' is not a valid file.") + } + if (!grepl("\\.rds$", shapefile)) { + stop("Parameter 'shapefile' must be a .rds file or a list.") + } + shape <- readRDS(file = shapefile) + if (!is.list(shape)) { + stop("Parameter 'shapefile' should be a .rds file of a list object.") + } + if (any(!c('x', 'y') %in% names(shape))) { + stop("The list names of the object in 'shapefile' .rds file should ", + "have at least 'x' and 'y'.") + } + if (length(shape$x) != length(shape$y)) { + stop("The length of x and y in 'shapefile' list should be equal.") + } + } + } + + # Check shapefile_col + if (is.null(shapefile_color)) { + if (filled.continents) { + shapefile_color <- continent_color + } else { + shapefile_color <- 'black' + } + } + if (!.IsColor(shapefile_color)) { + stop("Parameter 'shapefile_color' must be a valid colour identifier.") + } + + # Check brks2 + if (is.null(brks2)) { + if (is.null(contours)) { + if (!square) { + brks2 <- brks + contours <- var + } + } else { + ll <- signif(min(contours, na.rm = TRUE), 2) + ul <- signif(max(contours, na.rm = TRUE), 2) + brks2 <- signif(seq(ll, ul, length.out = length(brks)), 2) + } + } + + # Check contour_lwd + if (!is.numeric(contour_lwd)) { + stop("Parameter 'contour_lwd' must be numeric.") + } + + # Check contour_color + if (!.IsColor(contour_color)) { + stop("Parameter 'contour_color' must be a valid colour identifier.") + } + + # Check contour_lty + if (!is.numeric(contour_lty) && !is.character(contour_lty)) { + stop("Parameter 'contour_lty' must be either a number or a character string.") + } + + # Check contour_draw_label + if (!is.logical(contour_draw_label)) { + stop("Parameter 'contour_draw_label' must be logical.") + } + + # Check contour_label_scale + if (!is.numeric(contour_label_scale)) { + stop("Parameter 'contour_label_scale' must be numeric.") + } + + # Check dots + if (!is.null(dots)) { + if (!is.array(dots) || !(length(dim(dots)) %in% c(2, 3))) { + stop("Parameter 'dots' must be a logical array with two or three dimensions.") + } + if (length(dim(dots)) == 2) { + dim(dots) <- c(1, dim(dots)) + } + + if (is.null(lon_dim)) { + names(dim(dots)) <- NULL + } else { + if (!is.null(names(dim(dots)))) { + if (!(lon_dim %in% names(dim(dots)) && lat_dim %in% names(dim(dots)))) { + stop("Parameters 'dots' must have same dimension names as 'var'.") + } else if (dim(dots)[lon_dim] != dim(var)[lon_dim] || dim(dots)[lat_dim] != dim(var)[lat_dim]) { + stop("Parameters 'dots' must have same dimensions as 'var'.") + } + } else { + .warning("Parameters 'dots' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the corresponding coordinates dimensions.") + } + } + + transpose <- FALSE + if ((dim(dots)[2] == dims[1] && dim(dots)[3] == dims[2]) || + (dim(dots)[3] == dims[1] && dim(dots)[2] == dims[2])) { + if (dim(dots)[3] == dims[1] && dim(dots)[2] == dims[2]) { + if (length(lon) == length(lat)) { + if (is.null(names(dim(dots)))) { + .warning("Parameter 'dots' should have dimension names. Coordinates 'lon' and 'lat' have been assigned into the first and second dimensions.") + } else { + if (names(dim(dots)[2]) == lat_dim) { + transpose <- TRUE + } + } + } else { + transpose <- TRUE + } + } + } else { + stop("Parameter 'dots' must have same number of longitudes and latitudes as 'var'.") + } + + if (transpose) { + dots <- aperm(dots, c(1, 3, 2)) + } + + transpose <- FALSE + + } + + # Check dot_symbol and dot_size + if (!is.null(dots)) { + if (!is.numeric(dot_symbol) && !is.character(dot_symbol)) { + stop("Parameter 'dot_symbol' must be a numeric or character string vector.") + } + if (length(dot_symbol) == 1) { + dot_symbol <- rep(dot_symbol, dim(dots)[1]) + } else if (length(dot_symbol) < dim(dots)[1]) { + stop("Parameter 'dot_symbol' does not contain enough symbols.") + } + if (!is.numeric(dot_size)) { + stop("Parameter 'dot_size' must be numeric.") + } + if (length(dot_size) == 1) { + dot_size <- rep(dot_size, dim(dots)[1]) + } else if (length(dot_size) < dim(dots)[1]) { + stop("Parameter 'dot_size' does not contain enough sizes.") + } + } + + # Check arrow parameters + if (!is.numeric(arr_subsamp)) { + stop("Parameter 'arr_subsamp' must be numeric.") + } + if (!is.numeric(arr_scale)) { + stop("Parameter 'arr_scale' must be numeric.") + } + if (!is.numeric(arr_ref_len)) { + stop("Parameter 'arr_ref_len' must be numeric.") + } + if (!is.character(arr_units)) { + stop("Parameter 'arr_units' must be character.") + } + if (!is.numeric(arr_scale_shaft)) { + stop("Parameter 'arr_scale_shaft' must be numeric.") + } + if (!is.numeric(arr_scale_shaft_angle)) { + stop("Parameter 'arr_scale_shaft_angle' must be numeric.") + } + + # Check axis parameters + if (!is.logical(axelab)) { + stop("Parameter 'axelab' must be logical.") + } + if (!is.logical(labW)) { + stop("Parameter 'labW' must be logical.") + } + if (!is.null(lab_dist_x)) { + if (!is.numeric(lab_dist_x)) { + stop("Parameter 'lab_dist_x' must be numeric.") + } + } + if (!is.null(lab_dist_y)) { + if (!is.numeric(lab_dist_y)) { + stop("Parameter 'lab_dist_y' must be numeric.") + } + } + if (!is.numeric(intylat)) { + stop("Parameter 'intylat' must be numeric.") + } else { + intylat <- round(intylat) + } + if (!is.numeric(intxlon)) { + stop("Parameter 'intxlon' must be numeric.") + } else { + intxlon <- round(intxlon) + } + if (!is.numeric(xlonshft) | length(xlonshft) != 1) { + stop("Parameter 'xlonshft' must be a number.") + } + if (!is.numeric(ylatshft) | length(ylatshft) != 1) { + stop("Parameter 'ylatshft' must be a number.") + } + if (!is.null(xlabels)) { + if (!is.character(xlabels) | !is.vector(xlabels)) { + stop("Parameter 'xlabels' must be a vector of character string.") + } + } + if (!is.null(ylabels)) { + if (!is.character(ylabels) | !is.vector(ylabels)) { + stop("Parameter 'ylabels' must be a vector of character string.") + } + } + + # Check legend parameters + if (!is.logical(drawleg)) { + stop("Parameter 'drawleg' must be logical.") + } + + # Check box parameters + if (!is.null(boxlim)) { + if (!is.list(boxlim)) { + boxlim <- list(boxlim) + } + for (i in 1:length(boxlim)) { + if (!is.numeric(boxlim[[i]]) || length(boxlim[[i]]) != 4) { + stop("Parameter 'boxlim' must be a a numeric vector or a list of numeric vectors of length 4 (with W, S, E, N box limits).") + } + } + if (!is.character(boxcol)) { + stop("Parameter 'boxcol' must be a character string or a vector of character strings.") + } else { + if (length(boxlim) != length(boxcol)) { + if (length(boxcol) == 1) { + boxcol <- rep(boxcol, length(boxlim)) + } else { + stop("Parameter 'boxcol' must have a colour for each box in 'boxlim' or a single colour for all boxes.") + } + } + } + if (!is.numeric(boxlwd)) { + stop("Parameter 'boxlwd' must be numeric.") + } else { + if (length(boxlim) != length(boxlwd)) { + if (length(boxlwd) == 1) { + boxlwd <- rep(boxlwd, length(boxlim)) + } else { + stop("Parameter 'boxlwd' must have a line width for each box in 'boxlim' or a single line width for all boxes.") + } + } + } + } + + # Check margin_scale + if (!is.numeric(margin_scale) || length(margin_scale) != 4) { + stop("Parameter 'margin_scale' must be a numeric vector of length 4.") + } + + # Check title_scale + if (!is.numeric(title_scale)) { + stop("Parameter 'title_scale' must be numeric.") + } + + # Check axes_tick_scale + if (!is.numeric(axes_tick_scale)) { + stop("Parameter 'axes_tick_scale' must be numeric.") + } + + # Check axes_label_scale + if (!is.numeric(axes_label_scale)) { + stop("Parameter 'axes_label_scale' must be numeric.") + } + + # Check numbfig + if (!is.null(numbfig)) { + if (!is.numeric(numbfig)) { + stop("Parameter 'numbfig' must be numeric.") + } else { + numbfig <- round(numbfig) + scale <- 1 / numbfig ** 0.3 + axes_tick_scale <- axes_tick_scale * scale + axes_label_scale <- axes_label_scale * scale + title_scale <- title_scale * scale + margin_scale <- margin_scale * scale + arr_scale <- arr_scale * scale + dot_size <- dot_size * scale + contour_label_scale <- contour_label_scale * scale + contour_lwd <- contour_lwd * scale + } + } + + # + # Input arguments + # ~~~~~~~~~~~~~~~~~ + # + latb <- sort(lat, index.return = TRUE) + dlon <- diff(lon) + wher <- which(dlon > (mean(dlon) + 1)) + if (length(wher) > 0) { + .warning("Detect gap in 'lon' vector, which is considered as crossing the border.") + lon[(wher + 1):dims[1]] <- lon[(wher + 1):dims[1]] - 360 + } + lonb <- sort(lon, index.return = TRUE) + latmin <- floor(min(lat) / 10) * 10 + latmax <- ceiling(max(lat) / 10) * 10 + lonmin <- floor(min(lon) / 10) * 10 + lonmax <- ceiling(max(lon) / 10) * 10 + + # + # Plotting the map + # ~~~~~~~~~~~~~~~~~~ + # + + # Open connection to graphical device + if (!is.null(fileout)) { + saveToFile(fileout) + } else if (names(dev.cur()) == 'null device') { + dev.new(units = size_units, res = res, width = width, height = height) + } + + # + # Defining the layout + # ~~~~~~~~~~~~~~~~~~~~~ + # + if (drawleg) { + margin_scale[1] <- margin_scale[1] - 1 + } + margins <- rep(0.4, 4) * margin_scale + margins[4] <- margins[4] + 1 + cex_title <- 2 * title_scale + cex_axes_labels <- 1.3 * axes_label_scale + cex_axes_ticks <- -0.5 * axes_tick_scale + spaceticklab <- 0 + if (axelab) { + # Y axis label + if (!is.null(ylabels)) { + ypos <- seq(latmin, latmax, intylat) + ylatshft + if (length(ypos) != length(ylabels)) { + stop(paste0("Parameter 'ylabels' must have the same length as the latitude ", + "vector spaced by 'intylat' (length = ", length(ypos), ").")) + } + ylabs <- ylabels + } else { + ypos <- seq(latmin, latmax, intylat) + ylatshft + letters <- array('', length(ypos)) + if (degree_sym == FALSE) { + letters[ypos < 0] <- 'S' + letters[ypos > 0] <- 'N' + } else { + letters[ypos < 0] <- paste(intToUtf8(176), 'S') + letters[ypos > 0] <- paste(intToUtf8(176), 'N') + } + ylabs <- paste(as.character(abs(ypos)), letters, sep = '') + } + + # X axis label + if (!is.null(xlabels)) { + xpos <- seq(lonmin, lonmax, intxlon) + xlonshft + if (length(xpos) != length(xlabels)) { + stop(paste0("Parameter 'xlabels' must have the same length as the longitude ", + "vector spaced by 'intxlon' (length = ", length(xpos), ").")) + } + xlabs <- xlabels + } else { + xpos <- seq(lonmin, lonmax, intxlon) + xlonshft + letters <- array('', length(xpos)) + if (labW) { + xpos2 <- xpos + xpos2[xpos2 > 180] <- 360 - xpos2[xpos2 > 180] + } + if (degree_sym == FALSE) { + letters[xpos < 0] <- 'W' + letters[xpos > 0] <- 'E' + } else { + letters[xpos < 0] <- paste(intToUtf8(176), 'W') + letters[xpos > 0] <- paste(intToUtf8(176), 'E') + } + if (labW) { + letters[xpos == 0] <- ' ' + letters[xpos == 180] <- ' ' + if (degree_sym == FALSE) { + letters[xpos > 180] <- 'W' + } else { + letters[xpos > 180] <- paste(intToUtf8(176), 'W') + } + xlabs <- paste(as.character(abs(xpos2)), letters, sep = '') + } else { + xlabs <- paste(as.character(abs(xpos)), letters, sep = '') + } + } + spaceticklab <- max(-cex_axes_ticks, 0) + margins[1] <- margins[1] + 1.2 * cex_axes_labels + spaceticklab + margins[2] <- margins[2] + 1.2 * cex_axes_labels + spaceticklab + } + bar_extra_margin[2] <- bar_extra_margin[2] + margins[2] + bar_extra_margin[4] <- bar_extra_margin[4] + margins[4] + if (toptitle != '') { + margins[3] <- margins[3] + cex_title + 1 + } + if (!is.null(varu)) { + margins[1] <- margins[1] + 2.2 * units_scale + } + + if (drawleg) { + layout(matrix(1:2, ncol = 1, nrow = 2), heights = c(5, 1)) + } + plot.new() + # Load the user parameters + par(userArgs) + par(mar = margins, cex.main = cex_title, cex.axis = cex_axes_labels, + mgp = c(0, spaceticklab, 0), las = 0) + + #NOTE: Here creates the window for later plot. If 'usr' for par() is not specified, + # use the lat/lon as the borders. If 'usr' is specified, use the assigned values. + if (is.null(userArgs$usr)) { + #NOTE: The grids are assumed to be equally spaced + xlim_cal <- c(lonb$x[1] - (lonb$x[2] - lonb$x[1]) / 2, + lonb$x[length(lonb$x)] + (lonb$x[2] - lonb$x[1]) / 2) + ylim_cal <- c(latb$x[1] - (latb$x[2] - latb$x[1]) / 2, + latb$x[length(latb$x)] + (latb$x[2] - latb$x[1]) / 2) + plot.window(xlim = xlim_cal, ylim = ylim_cal, xaxs = 'i', yaxs = 'i') +# Below is Old code. The border grids are only half plotted. +# plot.window(xlim = range(lonb$x, finite = TRUE), ylim = range(latb$x, finite = TRUE), +# xaxs = 'i', yaxs = 'i') + } else { + plot.window(xlim = par("usr")[1:2], ylim = par("usr")[3:4], xaxs = 'i', yaxs = 'i') + } + + if (axelab) { + lab_distance_y <- ifelse(is.null(lab_dist_y), spaceticklab + 0.2, lab_dist_y) + lab_distance_x <- ifelse(is.null(lab_dist_x), spaceticklab + cex_axes_labels / 2 - 0.3, lab_dist_x) + + axis(2, at = ypos, labels = ylabs, cex.axis = cex_axes_labels, tcl = cex_axes_ticks, + mgp = c(0, lab_distance_y, 0)) + axis(1, at = xpos, labels = xlabs, cex.axis = cex_axes_labels, tcl = cex_axes_ticks, + mgp = c(0, lab_distance_x, 0)) + } + title(toptitle, cex.main = cex_title) + rect(par("usr")[1], par("usr")[3], par("usr")[2], par("usr")[4], col = colNA) + col_inf_image <- ifelse(is.null(col_inf), colNA, col_inf) + col_sup_image <- ifelse(is.null(col_sup), colNA, col_sup) + if (square) { + # If lat and lon are both regular-spaced, "useRaster = TRUE" can avoid + # artifact white lines on the figure. If not, useRaster has to be FALSE (default) + tryCatch({ + image(lonb$x, latb$x, var[lonb$ix, latb$ix], + col = c(col_inf_image, cols, col_sup_image), + breaks = c(-.Machine$double.xmax, brks, .Machine$double.xmax), + axes = FALSE, xlab = "", ylab = "", add = TRUE, useRaster = TRUE) + }, error = function(x) { + image(lonb$x, latb$x, var[lonb$ix, latb$ix], + col = c(col_inf_image, cols, col_sup_image), + breaks = c(-.Machine$double.xmax, brks, .Machine$double.xmax), + axes = FALSE, xlab = "", ylab = "", add = TRUE) + }) + } else { + .filled.contour(lonb$x, latb$x, var[lonb$ix, latb$ix], + levels = c(.Machine$double.xmin, brks, .Machine$double.xmax), + col = c(col_inf_image, cols, col_sup_image)) + } + if (!is.null(contours)) { +#NOTE: 'labcex' is the absolute size of contour labels. Parameter 'contour_label_scale' +# is provided in PlotEquiMap() but it was not used. Here, 'cex_axes_labels' was used +# and it was calculated from 'axes_label_scale', the size of lat/lon axis label. +# It is changed to use contour_label_scale*par('cex'). + contour(lonb$x, latb$x, contours[lonb$ix, latb$ix], levels = brks2, + method = "edge", add = TRUE, +# labcex = cex_axes_labels, + labcex = contour_label_scale * par("cex"), + lwd = contour_lwd, lty = contour_lty, + col = contour_color, drawlabels = contour_draw_label) + } + + # + # Adding black dots or symbols + # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + # + if (!is.null(dots)) { + data_avail <- !is.na(var) + for (counter in 1:(dim(dots)[1])) { + points <- which(dots[counter, , ] & data_avail, arr.ind = TRUE) + points(lon[points[, 1]], lat[points[, 2]], + pch = dot_symbol[counter], + cex = dot_size[counter] * 3 / sqrt(sqrt(length(var))), + lwd = dot_size[counter] * 3 / sqrt(sqrt(length(var)))) + } + } + # + # Plotting continents + # ~~~~~~~~~~~~~~~~~~~~~ + # + wrap_vec <- c(lonb$x[1], lonb$x[1] + 360) + old_lwd <- par('lwd') + par(lwd = coast_width) + # If [0, 360], use GEOmap; if [-180, 180], use maps::map + # UPDATE: Use maps::map for both cases. The difference between GEOmap and + # maps is trivial. The only thing we can see for now is that + # GEOmap has better lakes. + coast <- maps::map(interior = country.borders, wrap = wrap_vec, + fill = filled.continents, add = TRUE, plot = FALSE) + + if (filled.continents) { + polygon(coast, col = continent_color, border = coast_color, lwd = coast_width) + } else { + lines(coast, col = coast_color, lwd = coast_width) + } + if (!is.null(lake_color)) { + maps::map('lakes', add = TRUE, wrap = wrap_vec, fill = filled.continents, col = lake_color) + } + par(lwd = old_lwd) + + # filled.oceans + if (filled.oceans) { + old_lwd <- par('lwd') + par(lwd = coast_width) + + outline <- maps::map(wrap = wrap_vec, fill = T, plot = FALSE) # must be fill = T + xbox <- wrap_vec + c(-2, 2) + ybox <- c(-92, 92) + outline$x <- c(outline$x, NA, c(xbox, rev(xbox), xbox[1])) + outline$y <- c(outline$y, NA, rep(ybox, each = 2), ybox[1]) + polypath(outline, col = ocean_color, rule = 'evenodd', border = NA) + + par(lwd = old_lwd) + } + + # Plot shapefile + #NOTE: the longitude range cannot cut shapefile range, or not all the shapefile will be plotted. + if (!is.null(shapefile)) { + maps::map(shape, interior = country.borders, #wrap = wrap_vec, + fill = filled.continents, add = TRUE, plot = TRUE, + lwd = shapefile_lwd, col = shapefile_color) + } + + box() + # Draw rectangle on the map + if (!is.null(boxlim)) { + counter <- 1 + for (box in boxlim) { + if (box[1] > box[3]) { + box[1] <- box[1] - 360 + } + if (length(box) != 4) { + stop(paste("The", counter, "st box defined in the parameter 'boxlim' is ill defined.")) + } else if (box[2] < latmin || box[4] > latmax || + box[1] < lonmin || box[3] > lonmax) { + stop(paste("The limits of the", counter, "st box defined in the parameter 'boxlim' are invalid.")) + } else if (box[1] < 0 && box[3] > 0) { + #segments south + segments(box[1], box[2], 0, box[2], col = boxcol[counter], lwd = boxlwd[counter]) + segments(0, box[2], box[3], box[2], col = boxcol[counter], lwd = boxlwd[counter]) + #segments north + segments(box[1], box[4], 0, box[4], col = boxcol[counter], lwd = boxlwd[counter]) + segments(0, box[4], box[3], box[4], col = boxcol[counter], lwd = boxlwd[counter]) + #segments west + segments(box[1], box[2], box[1], box[4], col = boxcol[counter], + lwd = boxlwd[counter]) + #segments est + segments(box[3], box[2], box[3],box[4], col = boxcol[counter], + lwd = boxlwd[counter]) + } else { + rect(box[1], box[2], box[3], box[4], border = boxcol[counter], col = NULL, + lwd = boxlwd[counter], lty = 'solid') + } + counter <- counter + 1 + } + } + # + # PlotWind + # ~~~~~~~~~~ + # + if (!is.null(varu) && !is.null(varv)) { + # Create a two dimention array of longitude and latitude + lontab <- InsertDim(lonb$x, 2, length(latb$x), name = 'lat') + lattab <- InsertDim(latb$x, 1, length(lonb$x), name = 'lon') + varplotu <- varu[lonb$ix, latb$ix] + varplotv <- varv[lonb$ix, latb$ix] + + # Select a subsample af the points to an arrow + #for each "subsample" grid point + sublon <- seq(1,length(lon), arr_subsamp) + sublat <- seq(1,length(lat), arr_subsamp) + + uaux <- lontab[sublon, sublat] + varplotu[sublon, sublat] * 0.5 * arr_scale + vaux <- lattab[sublon, sublat] + varplotv[sublon, sublat] * 0.5 * arr_scale + + lenshaft <- 0.18 * arr_scale * arr_scale_shaft + angleshaft <- 12 * arr_scale_shaft_angle + # Plot Wind + arrows(lontab[sublon, sublat], lattab[sublon, sublat], + uaux, vaux, + angle = angleshaft, + length = lenshaft) + + # Plotting an arrow at the bottom of the plot for the legend + posarlon <- lonb$x[1] + (lonmax - lonmin) * 0.1 + posarlat <- latmin - ((latmax - latmin) + 1) / par('pin')[2] * + (spaceticklab + 0.2 + cex_axes_labels + 0.6 * units_scale) * par('csi') + + arrows(posarlon, posarlat, + posarlon + 0.5 * arr_scale * arr_ref_len, posarlat, + length = lenshaft, angle = angleshaft, + xpd = TRUE) + #save the parameter value + xpdsave <- par('xpd') + #desactivate xpd to be able to plot in margen + par(xpd = NA) + #plot text + mtext(paste(as.character(arr_ref_len), arr_units, sep = ""), + line = spaceticklab + 0.2 + cex_axes_labels + 1.2 * units_scale, side = 1, + at = posarlon + (0.5 * arr_scale * arr_ref_len) / 2, + cex = units_scale) + #come back to the previous xpd value + par(xpd = xpdsave) + } + # + # Colorbar + # ~~~~~~~~~~ + # + if (drawleg) { + ColorBar(brks, cols, FALSE, subsampleg, bar_limits, var_limits, + triangle_ends, col_inf = col_inf, col_sup = col_sup, + extra_labels = bar_extra_labels, draw_ticks = draw_bar_ticks, + draw_separators = draw_separators, title = units, + title_scale = units_scale, triangle_ends_scale = triangle_ends_scale, + label_scale = bar_label_scale, tick_scale = bar_tick_scale, + extra_margin = bar_extra_margin, label_digits = bar_label_digits) + } + + # If the graphic was saved to file, close the connection with the device + if (!is.null(fileout)) dev.off() + + invisible(list(brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup)) +} + diff --git a/modules/Visualization/R/tmp/Utils.R b/modules/Visualization/R/tmp/Utils.R new file mode 100644 index 00000000..05878259 --- /dev/null +++ b/modules/Visualization/R/tmp/Utils.R @@ -0,0 +1,1779 @@ +#'@importFrom abind abind +#'@import plyr ncdf4 +#'@importFrom grDevices png jpeg pdf svg bmp tiff +#'@importFrom easyVerification convert2prob + +## Function to tell if a regexpr() match is a complete match to a specified name +.IsFullMatch <- function(x, name) { + ifelse(x > 0 && attributes(x)$match.length == nchar(name), TRUE, FALSE) +} + +.ConfigReplaceVariablesInString <- function(string, replace_values, allow_undefined_key_vars = FALSE) { + # This function replaces all the occurrences of a variable in a string by + # their corresponding string stored in the replace_values. + if (length(strsplit(string, "\\$")[[1]]) > 1) { + parts <- strsplit(string, "\\$")[[1]] + output <- "" + i <- 0 + for (part in parts) { + if (i %% 2 == 0) { + output <- paste(output, part, sep = "") + } else { + if (part %in% names(replace_values)) { + output <- paste(output, .ConfigReplaceVariablesInString(replace_values[[part]], replace_values, allow_undefined_key_vars), sep = "") + } else if (allow_undefined_key_vars) { + output <- paste0(output, "$", part, "$") + } else { + stop(paste('Error: The variable $', part, '$ was not defined in the configuration file.', sep = '')) + } + } + i <- i + 1 + } + output + } else { + string + } +} + +.KnownLonNames <- function() { + known_lon_names <- c('lon', 'longitude', 'x', 'i', 'nav_lon') +} + +.KnownLatNames <- function() { + known_lat_names <- c('lat', 'latitude', 'y', 'j', 'nav_lat') +} + +.t2nlatlon <- function(t) { + ## As seen in cdo's griddes.c: ntr2nlat() + nlats <- (t * 3 + 1) / 2 + if ((nlats > 0) && (nlats - trunc(nlats) >= 0.5)) { + nlats <- ceiling(nlats) + } else { + nlats <- round(nlats) + } + if (nlats %% 2 > 0) { + nlats <- nlats + 1 + } + ## As seen in cdo's griddes.c: compNlon(), and as specified in ECMWF + nlons <- 2 * nlats + keep_going <- TRUE + while (keep_going) { + n <- nlons + if (n %% 8 == 0) n <- trunc(n / 8) + while (n %% 6 == 0) n <- trunc(n / 6) + while (n %% 5 == 0) n <- trunc(n / 5) + while (n %% 4 == 0) n <- trunc(n / 4) + while (n %% 3 == 0) n <- trunc(n / 3) + if (n %% 2 == 0) n <- trunc(n / 2) + if (n <= 8) { + keep_going <- FALSE + } else { + nlons <- nlons + 2 + if (nlons > 9999) { + stop("Error: pick another gaussian grid truncation. It doesn't fulfill the standards to apply FFT.") + } + } + } + c(nlats, nlons) +} + +.nlat2t <- function(nlats) { + trunc((nlats * 2 - 1) / 3) +} + +.LoadDataFile <- function(work_piece, explore_dims = FALSE, silent = FALSE) { + # The purpose, working modes, inputs and outputs of this function are + # explained in ?LoadDataFile + #suppressPackageStartupMessages({library(ncdf4)}) + #suppressPackageStartupMessages({library(bigmemory)}) + #suppressPackageStartupMessages({library(plyr)}) + # Auxiliar function to convert array indices to lineal indices + arrayIndex2VectorIndex <- function(indices, dims) { + if (length(indices) > length(dims)) { + stop("Error: indices do not match dimensions in arrayIndex2VectorIndex.") + } + position <- 1 + dims <- rev(dims) + indices <- rev(indices) + for (i in 1:length(indices)) { + position <- position + (indices[i] - 1) * prod(dims[-c(1:i)]) + } + position + } + + found_file <- NULL + dims <- NULL + grid_name <- units <- var_long_name <- NULL + is_2d_var <- array_across_gw <- NULL + data_across_gw <- NULL + + filename <- work_piece[['filename']] + namevar <- work_piece[['namevar']] + output <- work_piece[['output']] + # The names of all data files in the directory of the repository that match + # the pattern are obtained. + if (length(grep("^http", filename)) > 0) { + is_url <- TRUE + files <- filename + ## TODO: Check that the user is not using shell globbing exps. + } else { + is_url <- FALSE + files <- Sys.glob(filename) + } + + # If we don't find any, we leave the flag 'found_file' with a NULL value. + if (length(files) > 0) { + # The first file that matches the pattern is chosen and read. + filename <- head(files, 1) + filein <- filename + found_file <- filename + mask <- work_piece[['mask']] + + if (!silent) { + if (explore_dims) { + .message(paste("Exploring dimensions...", filename)) + } + ##} else { + ## cat(paste("* Reading & processing data...", filename, '\n')) + ##} + } + + # We will fill in 'expected_dims' with the names of the expected dimensions of + # the data array we'll retrieve from the file. + expected_dims <- NULL + remap_needed <- FALSE + # But first we open the file and work out whether the requested variable is 2d + fnc <- nc_open(filein) + if (!(namevar %in% names(fnc$var))) { + stop(paste("Error: The variable", namevar, "is not defined in the file", filename)) + } + var_long_name <- fnc$var[[namevar]]$longname + units <- fnc$var[[namevar]]$units + file_dimnames <- unlist(lapply(fnc$var[[namevar]][['dim']], '[[', 'name')) + # The following two 'ifs' are to allow for 'lon'/'lat' by default, instead of + # 'longitude'/'latitude'. + if (!(work_piece[['dimnames']][['lon']] %in% file_dimnames) && + (work_piece[['dimnames']][['lon']] == 'longitude') && + ('lon' %in% file_dimnames)) { + work_piece[['dimnames']][['lon']] <- 'lon' + } + if (!(work_piece[['dimnames']][['lat']] %in% file_dimnames) && + (work_piece[['dimnames']][['lat']] == 'latitude') && + ('lat' %in% file_dimnames)) { + work_piece[['dimnames']][['lat']] <- 'lat' + } + if (is.null(work_piece[['is_2d_var']])) { + is_2d_var <- all(c(work_piece[['dimnames']][['lon']], + work_piece[['dimnames']][['lat']]) %in% + unlist(lapply(fnc$var[[namevar]][['dim']], + '[[', 'name'))) + } else { + is_2d_var <- work_piece[['is_2d_var']] + } + if ((is_2d_var || work_piece[['is_file_per_dataset']])) { + if (Sys.which("cdo")[[1]] == "") { + stop("Error: CDO libraries not available") + } + + cdo_version <- strsplit(suppressWarnings(system2("cdo", args = '-V', stderr = TRUE))[[1]], ' ')[[1]][5] + + cdo_version <- as.numeric_version(unlist(strsplit(cdo_version, "[A-Za-z]", fixed = FALSE))[[1]]) + + } + # If the variable to load is 2-d, we need to determine whether: + # - interpolation is needed + # - subsetting is requested + if (is_2d_var) { + ## We read the longitudes and latitudes from the file. + lon <- ncvar_get(fnc, work_piece[['dimnames']][['lon']]) + lat <- ncvar_get(fnc, work_piece[['dimnames']][['lat']]) + first_lon_in_original_file <- lon[1] + # If a common grid is requested or we are exploring the file dimensions + # we need to read the grid type and size of the file to finally work out the + # CDO grid name. + if (!is.null(work_piece[['grid']]) || explore_dims) { + # Here we read the grid type and its number of longitudes and latitudes + file_info <- system(paste('cdo -s griddes', filein, '2> /dev/null'), intern = TRUE) + grids_positions <- grep('# gridID', file_info) + if (length(grids_positions) < 1) { + stop("The grid should be defined in the files.") + } + grids_first_lines <- grids_positions + 2 + grids_last_lines <- c((grids_positions - 2)[-1], length(file_info)) + grids_info <- as.list(1:length(grids_positions)) + grids_info <- lapply(grids_info, function (x) file_info[grids_first_lines[x]:grids_last_lines[x]]) + grids_info <- lapply(grids_info, function (x) gsub(" *", " ", x)) + grids_info <- lapply(grids_info, function (x) gsub("^ | $", "", x)) + grids_info <- lapply(grids_info, function (x) unlist(strsplit(x, " | = "))) + grids_types <- unlist(lapply(grids_info, function (x) x[grep('gridtype', x) + 1])) + grids_matches <- unlist(lapply(grids_info, function (x) { + nlons <- if (length(grep('xsize', x)) > 0) { + as.numeric(x[grep('xsize', x) + 1]) + } else { + NA + } + nlats <- if (length(grep('ysize', x)) > 0) { + as.numeric(x[grep('ysize', x) + 1]) + } else { + NA + } + result <- FALSE + if (!anyNA(c(nlons, nlats))) { + if ((nlons == length(lon)) && + (nlats == length(lat))) { + result <- TRUE + } + } + result + })) + grids_matches <- grids_matches[which(grids_types %in% c('gaussian', 'lonlat'))] + grids_info <- grids_info[which(grids_types %in% c('gaussian', 'lonlat'))] + grids_types <- grids_types[which(grids_types %in% c('gaussian', 'lonlat'))] + if (length(grids_matches) == 0) { + stop("Error: Only 'gaussian' and 'lonlat' grids supported. See e.g: cdo sinfo ", filename) + } + if (sum(grids_matches) > 1) { + if ((all(grids_types[which(grids_matches)] == 'gaussian') || + all(grids_types[which(grids_matches)] == 'lonlat')) && + all(unlist(lapply(grids_info[which(grids_matches)], identical, + grids_info[which(grids_matches)][[1]])))) { + grid_type <- grids_types[which(grids_matches)][1] + } else { + stop("Error: Load() can't disambiguate: More than one lonlat/gaussian grids with the same size as the requested variable defined in ", filename) + } + } else if (sum(grids_matches) == 1) { + grid_type <- grids_types[which(grids_matches)] + } else { + stop("Unexpected error.") + } + grid_lons <- length(lon) + grid_lats <- length(lat) + # Convert to CDO grid name as seen in cdo's griddes.c: nlat2ntr() + if (grid_type == 'lonlat') { + grid_name <- paste0('r', grid_lons, 'x', grid_lats) + } else { + grid_name <- paste0('t', .nlat2t(grid_lats), 'grid') + } + if (is.null(work_piece[['grid']])) { + .warning(paste0("Detect the grid type to be '", grid_name, "'. ", + "If it is not expected, assign parameter 'grid' to avoid wrong result.")) + } + } + # If a common grid is requested, we will also calculate its size which we will use + # later on. + if (!is.null(work_piece[['grid']])) { + # Now we calculate the common grid type and its lons and lats + if (length(grep('^t\\d{1,+}grid$', work_piece[['grid']])) > 0) { + common_grid_type <- 'gaussian' + common_grid_res <- as.numeric(strsplit(work_piece[['grid']], '[^0-9]{1,+}')[[1]][2]) + nlonlat <- .t2nlatlon(common_grid_res) + common_grid_lats <- nlonlat[1] + common_grid_lons <- nlonlat[2] + } else if (length(grep('^r\\d{1,+}x\\d{1,+}$', work_piece[['grid']])) > 0) { + common_grid_type <- 'lonlat' + common_grid_lons <- as.numeric(strsplit(work_piece[['grid']], '[^0-9]{1,+}')[[1]][2]) + common_grid_lats <- as.numeric(strsplit(work_piece[['grid']], '[^0-9]{1,+}')[[1]][3]) + } else { + stop("Error: Only supported grid types in parameter 'grid' are tgrid and rx") + } + } else { + ## If no 'grid' is specified, there is no common grid. + ## But these variables are filled in for consistency in the code. + common_grid_lons <- length(lon) + common_grid_lats <- length(lat) + } + first_common_grid_lon <- 0 + last_common_grid_lon <- 360 - 360/common_grid_lons + ## This is not true for gaussian grids or for some regular grids, but + ## is a safe estimation + first_common_grid_lat <- -90 + last_common_grid_lat <- 90 + # And finally determine whether interpolation is needed or not + remove_shift <- FALSE + if (!is.null(work_piece[['grid']])) { + if ((grid_lons != common_grid_lons) || + (grid_lats != common_grid_lats) || + (grid_type != common_grid_type) || + (lon[1] != first_common_grid_lon)) { + if (grid_lons == common_grid_lons && grid_lats == common_grid_lats && + grid_type == common_grid_type && lon[1] != first_common_grid_lon) { + remove_shift <- TRUE + } + remap_needed <- TRUE + common_grid_name <- work_piece[['grid']] + } + } else if ((lon[1] != first_common_grid_lon) && explore_dims && + !work_piece[['single_dataset']]) { + remap_needed <- TRUE + common_grid_name <- grid_name + remove_shift <- TRUE + } + if (remap_needed && (work_piece[['remap']] == 'con') && + (cdo_version >= as.numeric_version('1.7.0'))) { + work_piece[['remap']] <- 'ycon' + } + if (remove_shift && !explore_dims) { + if (!is.null(work_piece[['progress_amount']])) { + cat("\n") + } + .warning(paste0("The dataset with index ", + tail(work_piece[['indices']], 1), " in '", + work_piece[['dataset_type']], "' doesn't start at longitude 0 and will be re-interpolated in order to align its longitudes with the standard CDO grids definable with the names 'tgrid' or 'rx', which are by definition starting at the longitude 0.\n")) + if (!is.null(mask)) { + .warning(paste0("A mask was provided for the dataset with index ", + tail(work_piece[['indices']], 1), " in '", + work_piece[['dataset_type']], "'. This dataset has been re-interpolated to align its longitudes to start at 0. You must re-interpolate the corresponding mask to align its longitudes to start at 0 as well, if you haven't done so yet. Running cdo remapcon,", common_grid_name, " original_mask_file.nc new_mask_file.nc will fix it.\n")) + } + } + if (remap_needed && (grid_lons < common_grid_lons || grid_lats < common_grid_lats)) { + if (!is.null(work_piece[['progress_amount']])) { + cat("\n") + } + if (!explore_dims) { + .warning(paste0("The dataset with index ", tail(work_piece[['indices']], 1), + " in '", work_piece[['dataset_type']], "' is originally on ", + "a grid coarser than the common grid and it has been ", + "extrapolated. Check the results carefully. It is ", + "recommended to specify as common grid the coarsest grid ", + "among all requested datasets via the parameter 'grid'.\n")) + } + } + # Now calculate if the user requests for a lonlat subset or for the + # entire field + lonmin <- work_piece[['lon_limits']][1] + lonmax <- work_piece[['lon_limits']][2] + latmin <- work_piece[['lat_limits']][1] + latmax <- work_piece[['lat_limits']][2] + lon_subsetting_requested <- FALSE + lonlat_subsetting_requested <- FALSE + if (lonmin <= lonmax) { + if ((lonmin > first_common_grid_lon) || (lonmax < last_common_grid_lon)) { + lon_subsetting_requested <- TRUE + } + } else { + if ((lonmin - lonmax) > 360/common_grid_lons) { + lon_subsetting_requested <- TRUE + } else { + gap_width <- floor(lonmin / (360/common_grid_lons)) - + floor(lonmax / (360/common_grid_lons)) + if (gap_width > 0) { + if (!(gap_width == 1 && (lonmin %% (360/common_grid_lons) == 0) && + (lonmax %% (360/common_grid_lons) == 0))) { + lon_subsetting_requested <- TRUE + } + } + } + } + if ((latmin > first_common_grid_lat) || (latmax < last_common_grid_lat) + || (lon_subsetting_requested)) { + lonlat_subsetting_requested <- TRUE + } + # Now that we know if subsetting was requested, we can say if final data + # will go across greenwich + if (lonmax < lonmin) { + data_across_gw <- TRUE + } else { + data_across_gw <- !lon_subsetting_requested + } + + # When remap is needed but no subsetting, the file is copied locally + # so that cdo works faster, and then interpolated. + # Otherwise the file is kept as is and the subset will have to be + # interpolated still. + if (!lonlat_subsetting_requested && remap_needed) { + nc_close(fnc) + filecopy <- tempfile(pattern = "load", fileext = ".nc") + file.copy(filein, filecopy) + filein <- tempfile(pattern = "loadRegridded", fileext = ".nc") + # "-L" is to serialize I/O accesses. It prevents potential segmentation fault in the + # underlying hdf5 library. + system(paste0("cdo -L -s remap", work_piece[['remap']], ",", + common_grid_name, + " -selname,", namevar, " ", filecopy, " ", filein, + " 2>/dev/null", sep = "")) + file.remove(filecopy) + work_piece[['dimnames']][['lon']] <- 'lon' + work_piece[['dimnames']][['lat']] <- 'lat' + fnc <- nc_open(filein) + lon <- ncvar_get(fnc, work_piece[['dimnames']][['lon']]) + lat <- ncvar_get(fnc, work_piece[['dimnames']][['lat']]) + } + + # Read and check also the mask + if (!is.null(mask)) { + ###mask_file <- tempfile(pattern = 'loadMask', fileext = '.nc') + if (is.list(mask)) { + if (!file.exists(mask[['path']])) { + stop(paste("Error: Couldn't find the mask file", mask[['path']])) + } + mask_file <- mask[['path']] + ###file.copy(work_piece[['mask']][['path']], mask_file) + fnc_mask <- nc_open(mask_file) + vars_in_mask <- sapply(fnc_mask$var, '[[', 'name') + if ('nc_var_name' %in% names(mask)) { + if (!(mask[['nc_var_name']] %in% + vars_in_mask)) { + stop(paste("Error: couldn't find variable", mask[['nc_var_name']], + "in the mask file", mask[['path']])) + } + } else { + if (length(vars_in_mask) != 1) { + stop(paste("Error: one and only one non-coordinate variable should be defined in the mask file", + mask[['path']], "if the component 'nc_var_name' is not specified. Currently found: ", + paste(vars_in_mask, collapse = ', '), ".")) + } else { + mask[['nc_var_name']] <- vars_in_mask + } + } + if (sum(fnc_mask$var[[mask[['nc_var_name']]]]$size > 1) != 2) { + stop(paste0("Error: the variable '", + mask[['nc_var_name']], + "' must be defined only over the dimensions '", + work_piece[['dimnames']][['lon']], "' and '", + work_piece[['dimnames']][['lat']], + "' in the mask file ", + mask[['path']])) + } + mask <- ncvar_get(fnc_mask, mask[['nc_var_name']], collapse_degen = TRUE) + nc_close(fnc_mask) + ### mask_lon <- ncvar_get(fnc_mask, work_piece[['dimnames']][['lon']]) + ### mask_lat <- ncvar_get(fnc_mask, work_piece[['dimnames']][['lat']]) + ###} else { + ### dim_longitudes <- ncdim_def(work_piece[['dimnames']][['lon']], "degrees_east", lon) + ### dim_latitudes <- ncdim_def(work_piece[['dimnames']][['lat']], "degrees_north", lat) + ### ncdf_var <- ncvar_def('LSM', "", list(dim_longitudes, dim_latitudes), NA, 'double') + ### fnc_mask <- nc_create(mask_file, list(ncdf_var)) + ### ncvar_put(fnc_mask, ncdf_var, work_piece[['mask']]) + ### nc_close(fnc_mask) + ### fnc_mask <- nc_open(mask_file) + ### work_piece[['mask']] <- list(path = mask_file, nc_var_name = 'LSM') + ### mask_lon <- lon + ### mask_lat <- lat + ###} + ###} + ### Now ready to check that the mask is right + ##if (!(lonlat_subsetting_requested && remap_needed)) { + ### if ((dim(mask)[2] != length(lon)) || (dim(mask)[1] != length(lat))) { + ### stop(paste("Error: the mask of the dataset with index ", tail(work_piece[['indices']], 1), " in '", work_piece[['dataset_type']], "' is wrong. It must be on the common grid if the selected output type is 'lonlat', 'lon' or 'lat', or 'areave' and 'grid' has been specified. It must be on the grid of the corresponding dataset if the selected output type is 'areave' and no 'grid' has been specified. For more information check ?Load and see help on parameters 'grid', 'maskmod' and 'maskobs'.", sep = "")) + ### } + ###if (!(identical(mask_lon, lon) && identical(mask_lat, lat))) { + ### stop(paste0("Error: the longitudes and latitudes in the masks must be identical to the ones in the corresponding data files if output = 'areave' or, if the selected output is 'lon', 'lat' or 'lonlat', the longitudes in the mask file must start by 0 and the latitudes must be ordered from highest to lowest. See\n ", + ### work_piece[['mask']][['path']], " and ", filein)) + ###} + } + } + + lon_indices <- 1:length(lon) + if (!(lonlat_subsetting_requested && remap_needed)) { + lon[which(lon < 0)] <- lon[which(lon < 0)] + 360 + } + if (lonmax >= lonmin) { + lon_indices <- lon_indices[which(((lon %% 360) >= lonmin) & ((lon %% 360) <= lonmax))] + } else if (!remap_needed) { + lon_indices <- lon_indices[which(((lon %% 360) <= lonmax) | ((lon %% 360) >= lonmin))] + } + lat_indices <- which(lat >= latmin & lat <= latmax) + ## In most of the cases the latitudes are ordered from -90 to 90. + ## We will reorder them to be in the order from 90 to -90, so mostly + ## always the latitudes are reordered. + ## TODO: This could be avoided in future. + if (lat[1] < lat[length(lat)]) { + lat_indices <- lat_indices[length(lat_indices):1] + } + if (!is.null(mask) && !(lonlat_subsetting_requested && remap_needed)) { + if ((dim(mask)[1] != length(lon)) || (dim(mask)[2] != length(lat))) { + stop(paste("Error: the mask of the dataset with index ", tail(work_piece[['indices']], 1), " in '", work_piece[['dataset_type']], "' is wrong. It must be on the common grid if the selected output type is 'lonlat', 'lon' or 'lat', or 'areave' and 'grid' has been specified. It must be on the grid of the corresponding dataset if the selected output type is 'areave' and no 'grid' has been specified. For more information check ?Load and see help on parameters 'grid', 'maskmod' and 'maskobs'.", sep = "")) + } + mask <- mask[lon_indices, lat_indices] + } + ## If the user requests subsetting, we must extend the lon and lat limits if possible + ## so that the interpolation after is done properly + maximum_extra_points <- work_piece[['remapcells']] + if (lonlat_subsetting_requested && remap_needed) { + if ((maximum_extra_points > (head(lon_indices, 1) - 1)) || + (maximum_extra_points > (length(lon) - tail(lon_indices, 1)))) { + ## if the requested number of points goes beyond the left or right + ## sides of the map, we need to take the entire map so that the + ## interpolation works properly + lon_indices <- 1:length(lon) + } else { + extra_points <- min(maximum_extra_points, head(lon_indices, 1) - 1) + if (extra_points > 0) { + lon_indices <- c((head(lon_indices, 1) - extra_points):(head(lon_indices, 1) - 1), lon_indices) + } + extra_points <- min(maximum_extra_points, length(lon) - tail(lon_indices, 1)) + if (extra_points > 0) { + lon_indices <- c(lon_indices, (tail(lon_indices, 1) + 1):(tail(lon_indices, 1) + extra_points)) + } + } + min_lat_ind <- min(lat_indices) + max_lat_ind <- max(lat_indices) + extra_points <- min(maximum_extra_points, min_lat_ind - 1) + if (extra_points > 0) { + if (lat[1] < tail(lat, 1)) { + lat_indices <- c(lat_indices, (min_lat_ind - 1):(min_lat_ind - extra_points)) + } else { + lat_indices <- c((min_lat_ind - extra_points):(min_lat_ind - 1), lat_indices) + } + } + extra_points <- min(maximum_extra_points, length(lat) - max_lat_ind) + if (extra_points > 0) { + if (lat[1] < tail(lat, 1)) { + lat_indices <- c((max_lat_ind + extra_points):(max_lat_ind + 1), lat_indices) + } else { + lat_indices <- c(lat_indices, (max_lat_ind + 1):(max_lat_ind + extra_points)) + } + } + } + lon <- lon[lon_indices] + lat <- lat[lat_indices] + expected_dims <- c(work_piece[['dimnames']][['lon']], + work_piece[['dimnames']][['lat']]) + } else { + lon <- 0 + lat <- 0 + } + # We keep on filling the expected dimensions + var_dimnames <- unlist(lapply(fnc$var[[namevar]][['dim']], '[[', 'name')) + nmemb <- nltime <- NULL + ## Sometimes CDO renames 'members' dimension to 'lev' + old_members_dimname <- NULL + if (('lev' %in% var_dimnames) && !(work_piece[['dimnames']][['member']] %in% var_dimnames)) { + old_members_dimname <- work_piece[['dimnames']][['member']] + work_piece[['dimnames']][['member']] <- 'lev' + } + if (work_piece[['dimnames']][['member']] %in% var_dimnames) { + nmemb <- fnc$var[[namevar]][['dim']][[match(work_piece[['dimnames']][['member']], var_dimnames)]]$len + expected_dims <- c(expected_dims, work_piece[['dimnames']][['member']]) + } else { + nmemb <- 1 + } + if (length(expected_dims) > 0) { + dim_matches <- match(expected_dims, var_dimnames) + if (anyNA(dim_matches)) { + if (!is.null(old_members_dimname)) { + expected_dims[which(expected_dims == 'lev')] <- old_members_dimname + } + stop(paste("Error: the expected dimension(s)", + paste(expected_dims[which(is.na(dim_matches))], collapse = ', '), + "were not found in", filename)) + } + time_dimname <- var_dimnames[-dim_matches] + } else { + time_dimname <- var_dimnames + } + if (length(time_dimname) > 0) { + if (length(time_dimname) == 1) { + nltime <- fnc$var[[namevar]][['dim']][[match(time_dimname, var_dimnames)]]$len + expected_dims <- c(expected_dims, time_dimname) + dim_matches <- match(expected_dims, var_dimnames) + } else { + if (!is.null(old_members_dimname)) { + expected_dims[which(expected_dims == 'lev')] <- old_members_dimname + } + stop(paste("Error: the variable", namevar, + "is defined over more dimensions than the expected (", + paste(c(expected_dims, 'time'), collapse = ', '), + "). It could also be that the members, longitude or latitude dimensions are named incorrectly. In that case, either rename the dimensions in the file or adjust Load() to recognize the actual name with the parameter 'dimnames'. See file", filename)) + } + } else { + nltime <- 1 + } + + # Now we must retrieve the data from the file, but only the asked indices. + # So we build up the indices to retrieve. + # Longitudes or latitudes have been retrieved already. + if (explore_dims) { + # If we're exploring the file we only want one time step from one member, + # to regrid it and work out the number of longitudes and latitudes. + # We don't need more. + members <- 1 + ltimes_list <- list(c(1)) + } else { + # The data is arranged in the array 'tmp' with the dimensions in a + # common order: + # 1) Longitudes + # 2) Latitudes + # 3) Members (even if is not a file per member experiment) + # 4) Lead-times + if (work_piece[['is_file_per_dataset']]) { + time_indices <- 1:nltime + mons <- strsplit(system(paste('cdo showmon ', filein, + ' 2>/dev/null'), intern = TRUE), split = ' ') + years <- strsplit(system(paste('cdo showyear ', filein, + ' 2>/dev/null'), intern = TRUE), split = ' ') + mons <- as.numeric(mons[[1]][which(mons[[1]] != "")]) + years <- as.numeric(years[[1]][which(years[[1]] != "")]) + time_indices <- ts(time_indices, start = c(years[1], mons[1]), + end = c(years[length(years)], mons[length(mons)]), + frequency = 12) + ltimes_list <- list() + for (sdate in work_piece[['startdates']]) { + selected_time_indices <- window(time_indices, start = c(as.numeric( + substr(sdate, 1, 4)), as.numeric(substr(sdate, 5, 6))), + end = c(3000, 12), frequency = 12, extend = TRUE) + selected_time_indices <- selected_time_indices[work_piece[['leadtimes']]] + ltimes_list <- c(ltimes_list, list(selected_time_indices)) + } + } else { + ltimes <- work_piece[['leadtimes']] + #if (work_piece[['dataset_type']] == 'exp') { + ltimes_list <- list(ltimes[which(ltimes <= nltime)]) + #} + } + ## TODO: Put, when reading matrices, this kind of warnings + # if (nmember < nmemb) { + # cat("Warning: + members <- 1:work_piece[['nmember']] + members <- members[which(members <= nmemb)] + } + + # Now, for each list of leadtimes to load (usually only one list with all leadtimes), + # we'll join the indices and retrieve data + found_disordered_dims <- FALSE + for (ltimes in ltimes_list) { + if (is_2d_var) { + start <- c(min(lon_indices), min(lat_indices)) + end <- c(max(lon_indices), max(lat_indices)) + if (lonlat_subsetting_requested && remap_needed) { + subset_indices <- list(min(lon_indices):max(lon_indices) - min(lon_indices) + 1, + lat_indices - min(lat_indices) + 1) + dim_longitudes <- ncdim_def(work_piece[['dimnames']][['lon']], "degrees_east", lon) + dim_latitudes <- ncdim_def(work_piece[['dimnames']][['lat']], "degrees_north", lat) + ncdf_dims <- list(dim_longitudes, dim_latitudes) + } else { + subset_indices <- list(lon_indices - min(lon_indices) + 1, + lat_indices - min(lat_indices) + 1) + ncdf_dims <- list() + } + final_dims <- c(length(subset_indices[[1]]), length(subset_indices[[2]]), 1, 1) + } else { + start <- end <- c() + subset_indices <- list() + ncdf_dims <- list() + final_dims <- c(1, 1, 1, 1) + } + + if (work_piece[['dimnames']][['member']] %in% expected_dims) { + start <- c(start, head(members, 1)) + end <- c(end, tail(members, 1)) + subset_indices <- c(subset_indices, list(members - head(members, 1) + 1)) + dim_members <- ncdim_def(work_piece[['dimnames']][['member']], "", members) + ncdf_dims <- c(ncdf_dims, list(dim_members)) + final_dims[3] <- length(members) + } + if (time_dimname %in% expected_dims) { + if (any(!is.na(ltimes))) { + start <- c(start, head(ltimes[which(!is.na(ltimes))], 1)) + end <- c(end, tail(ltimes[which(!is.na(ltimes))], 1)) + subset_indices <- c(subset_indices, list(ltimes - head(ltimes[which(!is.na(ltimes))], 1) + 1)) + } else { + start <- c(start, NA) + end <- c(end, NA) + subset_indices <- c(subset_indices, list(ltimes)) + } + dim_time <- ncdim_def(time_dimname, "", 1:length(ltimes), unlim = TRUE) + ncdf_dims <- c(ncdf_dims, list(dim_time)) + final_dims[4] <- length(ltimes) + } + count <- end - start + 1 + start <- start[dim_matches] + count <- count[dim_matches] + subset_indices <- subset_indices[dim_matches] + # Now that we have the indices to retrieve, we retrieve the data + if (prod(final_dims) > 0) { + tmp <- take(ncvar_get(fnc, namevar, start, count, + collapse_degen = FALSE), + 1:length(subset_indices), subset_indices) + # The data is regridded if it corresponds to an atmospheric variable. When + # the chosen output type is 'areave' the data is not regridded to not + # waste computing time unless the user specified a common grid. + if (is_2d_var) { + ###if (!is.null(work_piece[['mask']]) && !(lonlat_subsetting_requested && remap_needed)) { + ### mask <- take(ncvar_get(fnc_mask, work_piece[['mask']][['nc_var_name']], + ### start[dim_matches[1:2]], count[dim_matches[1:2]], + ### collapse_degen = FALSE), 1:2, subset_indices[dim_matches[1:2]]) + ###} + if (lonlat_subsetting_requested && remap_needed) { + filein <- tempfile(pattern = "loadRegridded", fileext = ".nc") + filein2 <- tempfile(pattern = "loadRegridded2", fileext = ".nc") + ncdf_var <- ncvar_def(namevar, "", ncdf_dims[dim_matches], + fnc$var[[namevar]]$missval, + prec = if (fnc$var[[namevar]]$prec == 'int') { + 'integer' + } else { + fnc$var[[namevar]]$prec + }) + scale_factor <- ifelse(fnc$var[[namevar]]$hasScaleFact, fnc$var[[namevar]]$scaleFact, 1) + add_offset <- ifelse(fnc$var[[namevar]]$hasAddOffset, fnc$var[[namevar]]$addOffset, 0) + if (fnc$var[[namevar]]$hasScaleFact || fnc$var[[namevar]]$hasAddOffset) { + tmp <- (tmp - add_offset) / scale_factor + } + #nc_close(fnc) + fnc2 <- nc_create(filein2, list(ncdf_var)) + ncvar_put(fnc2, ncdf_var, tmp) + if (add_offset != 0) { + ncatt_put(fnc2, ncdf_var, 'add_offset', add_offset) + } + if (scale_factor != 1) { + ncatt_put(fnc2, ncdf_var, 'scale_factor', scale_factor) + } + nc_close(fnc2) + system(paste0("cdo -L -s -sellonlatbox,", if (lonmin > lonmax) { + "0,360," + } else { + paste0(lonmin, ",", lonmax, ",") + }, latmin, ",", latmax, + " -remap", work_piece[['remap']], ",", common_grid_name, + " ", filein2, " ", filein, " 2>/dev/null", sep = "")) + file.remove(filein2) + fnc2 <- nc_open(filein) + sub_lon <- ncvar_get(fnc2, 'lon') + sub_lat <- ncvar_get(fnc2, 'lat') + ## We read the longitudes and latitudes from the file. + ## In principle cdo should put in order the longitudes + ## and slice them properly unless data is across greenwich + sub_lon[which(sub_lon < 0)] <- sub_lon[which(sub_lon < 0)] + 360 + sub_lon_indices <- 1:length(sub_lon) + if (lonmax < lonmin) { + sub_lon_indices <- sub_lon_indices[which((sub_lon <= lonmax) | (sub_lon >= lonmin))] + } + sub_lat_indices <- 1:length(sub_lat) + ## In principle cdo should put in order the latitudes + if (sub_lat[1] < sub_lat[length(sub_lat)]) { + sub_lat_indices <- length(sub_lat):1 + } + final_dims[c(1, 2)] <- c(length(sub_lon_indices), length(sub_lat_indices)) + subset_indices[[dim_matches[1]]] <- sub_lon_indices + subset_indices[[dim_matches[2]]] <- sub_lat_indices + + tmp <- take(ncvar_get(fnc2, namevar, collapse_degen = FALSE), + 1:length(subset_indices), subset_indices) + + if (!is.null(mask)) { + ## We create a very simple 2d netcdf file that is then interpolated to the common + ## grid to know what are the lons and lats of our slice of data + mask_file <- tempfile(pattern = 'loadMask', fileext = '.nc') + mask_file_remap <- tempfile(pattern = 'loadMask', fileext = '.nc') + dim_longitudes <- ncdim_def(work_piece[['dimnames']][['lon']], "degrees_east", c(0, 360)) + dim_latitudes <- ncdim_def(work_piece[['dimnames']][['lat']], "degrees_north", c(-90, 90)) + ncdf_var <- ncvar_def('LSM', "", list(dim_longitudes, dim_latitudes), NA, 'double') + fnc_mask <- nc_create(mask_file, list(ncdf_var)) + ncvar_put(fnc_mask, ncdf_var, array(rep(0, 4), dim = c(2, 2))) + nc_close(fnc_mask) + system(paste0("cdo -L -s remap", work_piece[['remap']], ",", common_grid_name, + " ", mask_file, " ", mask_file_remap, " 2>/dev/null", sep = "")) + fnc_mask <- nc_open(mask_file_remap) + mask_lons <- ncvar_get(fnc_mask, 'lon') + mask_lats <- ncvar_get(fnc_mask, 'lat') + nc_close(fnc_mask) + file.remove(mask_file, mask_file_remap) + if ((dim(mask)[1] != common_grid_lons) || (dim(mask)[2] != common_grid_lats)) { + stop(paste("Error: the mask of the dataset with index ", tail(work_piece[['indices']], 1), " in '", work_piece[['dataset_type']], "' is wrong. It must be on the common grid if the selected output type is 'lonlat', 'lon' or 'lat', or 'areave' and 'grid' has been specified. It must be on the grid of the corresponding dataset if the selected output type is 'areave' and no 'grid' has been specified. For more information check ?Load and see help on parameters 'grid', 'maskmod' and 'maskobs'.", sep = "")) + } + mask_lons[which(mask_lons < 0)] <- mask_lons[which(mask_lons < 0)] + 360 + if (lonmax >= lonmin) { + mask_lon_indices <- which((mask_lons >= lonmin) & (mask_lons <= lonmax)) + } else { + mask_lon_indices <- which((mask_lons >= lonmin) | (mask_lons <= lonmax)) + } + mask_lat_indices <- which((mask_lats >= latmin) & (mask_lats <= latmax)) + if (sub_lat[1] < sub_lat[length(sub_lat)]) { + mask_lat_indices <- mask_lat_indices[length(mask_lat_indices):1] + } + mask <- mask[mask_lon_indices, mask_lat_indices] + } + sub_lon <- sub_lon[sub_lon_indices] + sub_lat <- sub_lat[sub_lat_indices] + ### nc_close(fnc_mask) + ### system(paste0("cdo -s -sellonlatbox,", if (lonmin > lonmax) { + ### "0,360," + ### } else { + ### paste0(lonmin, ",", lonmax, ",") + ### }, latmin, ",", latmax, + ### " -remap", work_piece[['remap']], ",", common_grid_name, + ###This is wrong: same files + ### " ", mask_file, " ", mask_file, " 2>/dev/null", sep = "")) + ### fnc_mask <- nc_open(mask_file) + ### mask <- take(ncvar_get(fnc_mask, work_piece[['mask']][['nc_var_name']], + ### collapse_degen = FALSE), 1:2, subset_indices[dim_matches[1:2]]) + ###} + } + } + if (!all(dim_matches == sort(dim_matches))) { + if (!found_disordered_dims && rev(work_piece[['indices']])[2] == 1 && rev(work_piece[['indices']])[3] == 1) { + found_disordered_dims <- TRUE + .warning(paste0("The dimensions for the variable ", namevar, " in the files of the experiment with index ", tail(work_piece[['indices']], 1), " are not in the optimal order for loading with Load(). The optimal order would be '", paste(expected_dims, collapse = ', '), "'. One of the files of the dataset is stored in ", filename)) + } + tmp <- aperm(tmp, dim_matches) + } + dim(tmp) <- final_dims + # If we are exploring the file we don't need to process and arrange + # the retrieved data. We only need to keep the dimension sizes. + if (is_2d_var && lonlat_subsetting_requested && remap_needed) { + final_lons <- sub_lon + final_lats <- sub_lat + } else { + final_lons <- lon + final_lats <- lat + } + if (explore_dims) { + if (work_piece[['is_file_per_member']]) { + ## TODO: When the exp_full_path contains asterisks and is file_per_member + ## members from different datasets may be accounted. + ## Also if one file member is missing the accounting will be wrong. + ## Should parse the file name and extract number of members. + if (is_url) { + nmemb <- NULL + } else { + nmemb <- length(files) + } + } + dims <- list(member = nmemb, ftime = nltime, lon = final_lons, lat = final_lats) + } else { + # If we are not exploring, then we have to process the retrieved data + if (is_2d_var) { + tmp <- apply(tmp, c(3, 4), function(x) { + # Disable of large values. + if (!is.na(work_piece[['var_limits']][2])) { + x[which(x > work_piece[['var_limits']][2])] <- NA + } + if (!is.na(work_piece[['var_limits']][1])) { + x[which(x < work_piece[['var_limits']][1])] <- NA + } + if (!is.null(mask)) { + x[which(mask < 0.5)] <- NA + } + + if (output == 'areave' || output == 'lon') { + weights <- InsertDim(cos(final_lats * pi / 180), 1, length(final_lons), name = 'lon') + weights[which(is.na(x))] <- NA + if (output == 'areave') { + weights <- weights / mean(weights, na.rm = TRUE) + mean(x * weights, na.rm = TRUE) + } else { + weights <- weights / InsertDim(MeanDims(weights, 2, na.rm = TRUE), 2, length(final_lats), name = 'lat') + MeanDims(x * weights, 2, na.rm = TRUE) + } + } else if (output == 'lat') { + MeanDims(x, 1, na.rm = TRUE) + } else if (output == 'lonlat') { + signif(x, 5) + } + }) + if (output == 'areave') { + dim(tmp) <- c(1, 1, final_dims[3:4]) + } else if (output == 'lon') { + dim(tmp) <- c(final_dims[1], 1, final_dims[3:4]) + } else if (output == 'lat') { + dim(tmp) <- c(1, final_dims[c(2, 3, 4)]) + } else if (output == 'lonlat') { + dim(tmp) <- final_dims + } + } + var_data <- attach.big.matrix(work_piece[['out_pointer']]) + if (work_piece[['dims']][['member']] > 1 && nmemb > 1 && + work_piece[['dims']][['ftime']] > 1 && + nltime < work_piece[['dims']][['ftime']]) { + work_piece[['indices']][2] <- work_piece[['indices']][2] - 1 + for (jmemb in members) { + work_piece[['indices']][2] <- work_piece[['indices']][2] + 1 + out_position <- arrayIndex2VectorIndex(work_piece[['indices']], work_piece[['dims']]) + out_indices <- out_position:(out_position + length(tmp[, , jmemb, ]) - 1) + var_data[out_indices] <- as.vector(tmp[, , jmemb, ]) + } + work_piece[['indices']][2] <- work_piece[['indices']][2] - tail(members, 1) + 1 + } else { + out_position <- arrayIndex2VectorIndex(work_piece[['indices']], work_piece[['dims']]) + out_indices <- out_position:(out_position + length(tmp) - 1) + a <- aperm(tmp, c(1, 2, 4, 3)) + as.vector(a) + var_data[out_indices] <- as.vector(aperm(tmp, c(1, 2, 4, 3))) + } + work_piece[['indices']][3] <- work_piece[['indices']][3] + 1 + } + } + } + nc_close(fnc) + if (is_2d_var) { + if (remap_needed) { + array_across_gw <- FALSE + file.remove(filein) + ###if (!is.null(mask) && lonlat_subsetting_requested) { + ### file.remove(mask_file) + ###} + } else { + if (first_lon_in_original_file < 0) { + array_across_gw <- data_across_gw + } else { + array_across_gw <- FALSE + } + } + } + } + if (explore_dims) { + list(dims = dims, is_2d_var = is_2d_var, grid = grid_name, + units = units, var_long_name = var_long_name, + data_across_gw = data_across_gw, array_across_gw = array_across_gw) + } else { + ###if (!silent && !is.null(progress_connection) && !is.null(work_piece[['progress_amount']])) { + ### foobar <- writeBin(work_piece[['progress_amount']], progress_connection) + ###} + if (!silent && !is.null(work_piece[['progress_amount']])) { + message(paste0(work_piece[['progress_amount']]), appendLF = FALSE) + } + found_file + } +} + +.LoadSampleData <- function(var, exp = NULL, obs = NULL, sdates, + nmember = NULL, nmemberobs = NULL, + nleadtime = NULL, leadtimemin = 1, + leadtimemax = NULL, storefreq = 'monthly', + sampleperiod = 1, lonmin = 0, lonmax = 360, + latmin = -90, latmax = 90, output = 'areave', + method = 'conservative', grid = NULL, + maskmod = vector("list", 15), + maskobs = vector("list", 15), + configfile = NULL, suffixexp = NULL, + suffixobs = NULL, varmin = NULL, varmax = NULL, + silent = FALSE, nprocs = NULL) { + ## This function loads and selects sample data stored in sampleMap and + ## sampleTimeSeries and is used in the examples instead of Load() so as + ## to avoid nco and cdo system calls and computation time in the stage + ## of running examples in the CHECK process on CRAN. + selected_start_dates <- match(sdates, c('19851101', '19901101', '19951101', + '20001101', '20051101')) + start_dates_position <- 3 + lead_times_position <- 4 + + if (output == 'lonlat') { + sampleData <- s2dv::sampleMap + if (is.null(leadtimemax)) { + leadtimemax <- dim(sampleData$mod)[lead_times_position] + } + selected_lead_times <- leadtimemin:leadtimemax + + dataOut <- sampleData + dataOut$mod <- sampleData$mod[, , selected_start_dates, selected_lead_times, , ] + dataOut$obs <- sampleData$obs[, , selected_start_dates, selected_lead_times, , ] + } + else if (output == 'areave') { + sampleData <- s2dv::sampleTimeSeries + if (is.null(leadtimemax)) { + leadtimemax <- dim(sampleData$mod)[lead_times_position] + } + selected_lead_times <- leadtimemin:leadtimemax + + dataOut <- sampleData + dataOut$mod <- sampleData$mod[, , selected_start_dates, selected_lead_times] + dataOut$obs <- sampleData$obs[, , selected_start_dates, selected_lead_times] + } + + dims_out <- dim(sampleData$mod) + dims_out[start_dates_position] <- length(selected_start_dates) + dims_out[lead_times_position] <- length(selected_lead_times) + dim(dataOut$mod) <- dims_out + + dims_out <- dim(sampleData$obs) + dims_out[start_dates_position] <- length(selected_start_dates) + dims_out[lead_times_position] <- length(selected_lead_times) + dim(dataOut$obs) <- dims_out + + invisible(list(mod = dataOut$mod, obs = dataOut$obs, + lat = dataOut$lat, lon = dataOut$lon)) +} + +.ConfigGetDatasetInfo <- function(matching_entries, table_name) { + # This function obtains the information of a dataset and variable pair, + # applying all the entries that match in the configuration file. + if (table_name == 'experiments') { + id <- 'EXP' + } else { + id <- 'OBS' + } + defaults <- c(paste0('$DEFAULT_', id, '_MAIN_PATH$'), paste0('$DEFAULT_', id, '_FILE_PATH$'), '$DEFAULT_NC_VAR_NAME$', '$DEFAULT_SUFFIX$', '$DEFAULT_VAR_MIN$', '$DEFAULT_VAR_MAX$') + info <- NULL + + for (entry in matching_entries) { + if (is.null(info)) { + info <- entry[-1:-2] + info[which(info == '*')] <- defaults[which(info == '*')] + } else { + info[which(entry[-1:-2] != '*')] <- entry[-1:-2][which(entry[-1:-2] != '*')] + } + } + + info <- as.list(info) + names(info) <- c('main_path', 'file_path', 'nc_var_name', 'suffix', 'var_min', 'var_max') + info +} + +.ReplaceGlobExpressions <- function(path_with_globs, actual_path, + replace_values, tags_to_keep, + dataset_name, permissive) { + # The goal of this function is to replace the shell globbing expressions in + # a path pattern (that may contain shell globbing expressions and Load() + # tags) by the corresponding part of the real existing path. + # What is done actually is to replace all the values of the tags in the + # actual path by the corresponding $TAG$ + # + # It takes mainly two inputs. The path with expressions and tags, e.g.: + # /data/experiments/*/$EXP_NAME$/$VAR_NAME$/$VAR_NAME$_*$START_DATE$*.nc + # and a complete known path to one of the matching files, e.g.: + # /data/experiments/ecearth/i00k/tos/tos_fc0-1_19901101_199011-199110.nc + # and it returns the path pattern but without shell globbing expressions: + # /data/experiments/ecearth/$EXP_NAME$/$VAR_NAME$/$VAR_NAME$_fc0-1_$START_DATE$_199011-199110.nc + # + # To do that, it needs also as inputs the list of replace values (the + # association of each tag to their value). + # + # All the tags not present in the parameter tags_to_keep will be repalced. + # + # Not all cases can be resolved with the implemented algorithm. In an + # unsolvable case a warning is given and one possible guess is returned. + # + # In some cases it is interesting to replace only the expressions in the + # path to the file, but not the ones in the file name itself. To keep the + # expressions in the file name, the parameter permissive can be set to + # TRUE. To replace all the expressions it can be set to FALSE. + clean <- function(x) { + if (nchar(x) > 0) { + x <- gsub('\\\\', '', x) + x <- gsub('\\^', '', x) + x <- gsub('\\$', '', x) + x <- unname(sapply(strsplit(x, '[',fixed = TRUE)[[1]], function(y) gsub('.*]', '.', y))) + do.call(paste0, as.list(x)) + } else { + x + } + } + + strReverse <- function(x) sapply(lapply(strsplit(x, NULL), rev), paste, collapse = "") + + if (permissive) { + actual_path_chunks <- strsplit(actual_path, '/')[[1]] + actual_path <- paste(actual_path_chunks[-length(actual_path_chunks)], collapse = '/') + file_name <- tail(actual_path_chunks, 1) + if (length(actual_path_chunks) > 1) { + file_name <- paste0('/', file_name) + } + path_with_globs_chunks <- strsplit(path_with_globs, '/')[[1]] + path_with_globs <- paste(path_with_globs_chunks[-length(path_with_globs_chunks)], + collapse = '/') + path_with_globs <- .ConfigReplaceVariablesInString(path_with_globs, replace_values) + file_name_with_globs <- tail(path_with_globs_chunks, 1) + if (length(path_with_globs_chunks) > 1) { + file_name_with_globs <- paste0('/', file_name_with_globs) + } + right_known <- head(strsplit(file_name_with_globs, '*', fixed = TRUE)[[1]], 1) + right_known_no_tags <- .ConfigReplaceVariablesInString(right_known, replace_values) + path_with_globs_rx <- utils::glob2rx(paste0(path_with_globs, right_known_no_tags)) + match <- regexpr(gsub('$', '', path_with_globs_rx, fixed = TRUE), paste0(actual_path, file_name)) + if (match != 1) { + stop("Incorrect parameters to replace glob expressions. The path with expressions does not match the actual path.") + } + if (attr(match, 'match.length') - nchar(right_known_no_tags) < nchar(actual_path)) { + path_with_globs <- paste0(path_with_globs, right_known_no_tags, '*') + file_name_with_globs <- sub(right_known, '/*', file_name_with_globs) + } + } + path_with_globs_rx <- utils::glob2rx(path_with_globs) + values_to_replace <- c() + tags_to_replace_starts <- c() + tags_to_replace_ends <- c() + give_warning <- FALSE + for (tag in tags_to_keep) { + matches <- gregexpr(paste0('$', tag, '$'), path_with_globs_rx, fixed = TRUE)[[1]] + lengths <- attr(matches, 'match.length') + if (!(length(matches) == 1 && matches[1] == -1)) { + for (i in 1:length(matches)) { + left <- NULL + if (matches[i] > 1) { + left <- .ConfigReplaceVariablesInString(substr(path_with_globs_rx, 1, matches[i] - 1), replace_values) + left_known <- strReverse(head(strsplit(strReverse(left), strReverse('.*'), fixed = TRUE)[[1]], 1)) + } + right <- NULL + if ((matches[i] + lengths[i] - 1) < nchar(path_with_globs_rx)) { + right <- .ConfigReplaceVariablesInString(substr(path_with_globs_rx, matches[i] + lengths[i], nchar(path_with_globs_rx)), replace_values) + right_known <- head(strsplit(right, '.*', fixed = TRUE)[[1]], 1) + } + final_match <- NULL + match_limits <- NULL + if (!is.null(left)) { + left_match <- regexpr(paste0(left, replace_values[[tag]], right_known), actual_path) + match_len <- attr(left_match, 'match.length') + left_match_limits <- c(left_match + match_len - 1 - nchar(clean(right_known)) - nchar(replace_values[[tag]]) + 1, + left_match + match_len - 1 - nchar(clean(right_known))) + if (!(left_match < 1)) { + match_limits <- left_match_limits + } + } + right_match <- NULL + if (!is.null(right)) { + right_match <- regexpr(paste0(left_known, replace_values[[tag]], right), actual_path) + match_len <- attr(right_match, 'match.length') + right_match_limits <- c(right_match + nchar(clean(left_known)), + right_match + nchar(clean(left_known)) + nchar(replace_values[[tag]]) - 1) + if (is.null(match_limits) && !(right_match < 1)) { + match_limits <- right_match_limits + } + } + if (!is.null(right_match) && !is.null(left_match)) { + if (!identical(right_match_limits, left_match_limits)) { + give_warning <- TRUE + } + } + if (is.null(match_limits)) { + stop("Too complex path pattern specified for ", dataset_name, + ". Specify a simpler path pattern for this dataset.") + } + values_to_replace <- c(values_to_replace, tag) + tags_to_replace_starts <- c(tags_to_replace_starts, match_limits[1]) + tags_to_replace_ends <- c(tags_to_replace_ends, match_limits[2]) + } + } + } + + if (length(tags_to_replace_starts) > 0) { + reorder <- sort(tags_to_replace_starts, index.return = TRUE) + tags_to_replace_starts <- reorder$x + values_to_replace <- values_to_replace[reorder$ix] + tags_to_replace_ends <- tags_to_replace_ends[reorder$ix] + while (length(values_to_replace) > 0) { + actual_path <- paste0(substr(actual_path, 1, head(tags_to_replace_starts, 1) - 1), + '$', head(values_to_replace, 1), '$', + substr(actual_path, head(tags_to_replace_ends, 1) + 1, nchar(actual_path))) + extra_chars <- nchar(head(values_to_replace, 1)) + 2 - (head(tags_to_replace_ends, 1) - head(tags_to_replace_starts, 1) + 1) + values_to_replace <- values_to_replace[-1] + tags_to_replace_starts <- tags_to_replace_starts[-1] + tags_to_replace_ends <- tags_to_replace_ends[-1] + tags_to_replace_starts <- tags_to_replace_starts + extra_chars + tags_to_replace_ends <- tags_to_replace_ends + extra_chars + } + } + + if (give_warning) { + .warning(paste0("Too complex path pattern specified for ", dataset_name, + ". Double check carefully the '$Files' fetched for this dataset or specify a simpler path pattern.")) + } + + if (permissive) { + paste0(actual_path, file_name_with_globs) + } else { + actual_path + } +} + +.FindTagValue <- function(path_with_globs_and_tag, actual_path, tag) { + tag <- paste0('\\$', tag, '\\$') + path_with_globs_and_tag <- paste0('^', path_with_globs_and_tag, '$') + parts <- strsplit(path_with_globs_and_tag, '*', fixed = TRUE)[[1]] + parts <- as.list(parts[grep(tag, parts)]) + longest_couples <- c() + pos_longest_couples <- c() + found_value <- NULL + for (i in 1:length(parts)) { + parts[[i]] <- strsplit(parts[[i]], tag)[[1]] + if (length(parts[[i]]) == 1) { + parts[[i]] <- c(parts[[i]], '') + } + len_parts <- sapply(parts[[i]], nchar) + len_couples <- len_parts[-length(len_parts)] + len_parts[2:length(len_parts)] + pos_longest_couples <- c(pos_longest_couples, which.max(len_couples)) + longest_couples <- c(longest_couples, max(len_couples)) + } + chosen_part <- which.max(longest_couples) + parts[[chosen_part]] <- parts[[chosen_part]][pos_longest_couples[chosen_part]:(pos_longest_couples[chosen_part] + 1)] + if (nchar(parts[[chosen_part]][1]) >= nchar(parts[[chosen_part]][2])) { + if (nchar(parts[[chosen_part]][1]) > 0) { + matches <- gregexpr(parts[[chosen_part]][1], actual_path)[[1]] + if (length(matches) == 1) { + match_left <- matches + actual_path <- substr(actual_path, match_left + attr(match_left, 'match.length'), nchar(actual_path)) + } + } + if (nchar(parts[[chosen_part]][2]) > 0) { + matches <- gregexpr(parts[[chosen_part]][2], actual_path)[[1]] + if (length(matches) == 1) { + match_right <- matches + found_value <- substr(actual_path, 0, match_right - 1) + } + } + } else { + if (nchar(parts[[chosen_part]][2]) > 0) { + matches <- gregexpr(parts[[chosen_part]][2], actual_path)[[1]] + if (length(matches) == 1) { + match_right <- matches + actual_path <- substr(actual_path, 0, match_right - 1) + } + } + if (nchar(parts[[chosen_part]][1]) > 0) { + matches <- gregexpr(parts[[chosen_part]][1], actual_path)[[1]] + if (length(matches) == 1) { + match_left <- matches + found_value <- substr(actual_path, match_left + attr(match_left, 'match.length'), nchar(actual_path)) + } + } + } + found_value +} + +.FilterUserGraphicArgs <- function(excludedArgs, ...) { + # This function filter the extra graphical parameters passed by the user in + # a plot function, excluding the ones that the plot function uses by default. + # Each plot function has a different set of arguments that are not allowed to + # be modified. + args <- list(...) + userArgs <- list() + for (name in names(args)) { + if ((name != "") & !is.element(name, excludedArgs)) { + # If the argument has a name and it is not in the list of excluded + # arguments, then it is added to the list that will be used + userArgs[[name]] <- args[[name]] + } else { + .warning(paste0("the argument '", name, "' can not be + modified and the new value will be ignored")) + } + } + userArgs +} + +.SelectDevice <- function(fileout, width, height, units, res) { + # This function is used in the plot functions to check the extension of the + # files where the graphics will be stored and select the right R device to + # save them. + # If the vector of filenames ('fileout') has files with different + # extensions, then it will only accept the first one, changing all the rest + # of the filenames to use that extension. + + # We extract the extension of the filenames: '.png', '.pdf', ... + ext <- regmatches(fileout, regexpr("\\.[a-zA-Z0-9]*$", fileout)) + + if (length(ext) != 0) { + # If there is an extension specified, select the correct device + ## units of width and height set to accept inches + if (ext[1] == ".png") { + saveToFile <- function(fileout) { + png(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] == ".jpeg") { + saveToFile <- function(fileout) { + jpeg(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] %in% c(".eps", ".ps")) { + saveToFile <- function(fileout) { + postscript(file = fileout, width = width, height = height) + } + } else if (ext[1] == ".pdf") { + saveToFile <- function(fileout) { + pdf(file = fileout, width = width, height = height) + } + } else if (ext[1] == ".svg") { + saveToFile <- function(fileout) { + svg(filename = fileout, width = width, height = height) + } + } else if (ext[1] == ".bmp") { + saveToFile <- function(fileout) { + bmp(filename = fileout, width = width, height = height, res = res, units = units) + } + } else if (ext[1] == ".tiff") { + saveToFile <- function(fileout) { + tiff(filename = fileout, width = width, height = height, res = res, units = units) + } + } else { + .warning("file extension not supported, it will be used '.eps' by default.") + ## In case there is only one filename + fileout[1] <- sub("\\.[a-zA-Z0-9]*$", ".eps", fileout[1]) + ext[1] <- ".eps" + saveToFile <- function(fileout) { + postscript(file = fileout, width = width, height = height) + } + } + # Change filenames when necessary + if (any(ext != ext[1])) { + .warning(paste0("some extensions of the filenames provided in 'fileout' are not ", ext[1],". The extensions are being converted to ", ext[1], ".")) + fileout <- sub("\\.[a-zA-Z0-9]*$", ext[1], fileout) + } + } else { + # Default filenames when there is no specification + .warning("there are no extensions specified in the filenames, default to '.eps'") + fileout <- paste0(fileout, ".eps") + saveToFile <- postscript + } + + # return the correct function with the graphical device, and the correct + # filenames + list(fun = saveToFile, files = fileout) +} + +.message <- function(...) { + # Function to use the 'message' R function with our custom settings + # Default: new line at end of message, indent to 0, exdent to 3, + # collapse to \n* + args <- list(...) + + ## In case we need to specify message arguments + if (!is.null(args[["appendLF"]])) { + appendLF <- args[["appendLF"]] + } else { + ## Default value in message function + appendLF <- TRUE + } + if (!is.null(args[["domain"]])) { + domain <- args[["domain"]] + } else { + ## Default value in message function + domain <- NULL + } + args[["appendLF"]] <- NULL + args[["domain"]] <- NULL + + ## To modify strwrap indent and exdent arguments + if (!is.null(args[["indent"]])) { + indent <- args[["indent"]] + } else { + indent <- 0 + } + if (!is.null(args[["exdent"]])) { + exdent <- args[["exdent"]] + } else { + exdent <- 3 + } + args[["indent"]] <- NULL + args[["exdent"]] <- NULL + + ## To modify paste collapse argument + if (!is.null(args[["collapse"]])) { + collapse <- args[["collapse"]] + } else { + collapse <- "\n*" + } + args[["collapse"]] <- NULL + + ## Message tag + if (!is.null(args[["tag"]])) { + tag <- args[["tag"]] + } else { + tag <- "* " + } + args[["tag"]] <- NULL + + message(paste0(tag, paste(strwrap( + args, indent = indent, exdent = exdent + ), collapse = collapse)), appendLF = appendLF, domain = domain) +} + +.warning <- function(...) { + # Function to use the 'warning' R function with our custom settings + # Default: no call information, indent to 0, exdent to 3, + # collapse to \n + args <- list(...) + + ## In case we need to specify warning arguments + if (!is.null(args[["call."]])) { + call <- args[["call."]] + } else { + ## Default: don't show info about the call where the warning came up + call <- FALSE + } + if (!is.null(args[["immediate."]])) { + immediate <- args[["immediate."]] + } else { + ## Default value in warning function + immediate <- FALSE + } + if (!is.null(args[["noBreaks."]])) { + noBreaks <- args[["noBreaks."]] + } else { + ## Default value warning function + noBreaks <- FALSE + } + if (!is.null(args[["domain"]])) { + domain <- args[["domain"]] + } else { + ## Default value warning function + domain <- NULL + } + args[["call."]] <- NULL + args[["immediate."]] <- NULL + args[["noBreaks."]] <- NULL + args[["domain"]] <- NULL + + ## To modify strwrap indent and exdent arguments + if (!is.null(args[["indent"]])) { + indent <- args[["indent"]] + } else { + indent <- 0 + } + if (!is.null(args[["exdent"]])) { + exdent <- args[["exdent"]] + } else { + exdent <- 3 + } + args[["indent"]] <- NULL + args[["exdent"]] <- NULL + + ## To modify paste collapse argument + if (!is.null(args[["collapse"]])) { + collapse <- args[["collapse"]] + } else { + collapse <- "\n!" + } + args[["collapse"]] <- NULL + + ## Warning tag + if (!is.null(args[["tag"]])) { + tag <- args[["tag"]] + } else { + tag <- "! Warning: " + } + args[["tag"]] <- NULL + + warning(paste0(tag, paste(strwrap( + args, indent = indent, exdent = exdent + ), collapse = collapse)), call. = call, immediate. = immediate, + noBreaks. = noBreaks, domain = domain) +} + +.IsColor <- function(x) { + res <- try(col2rgb(x), silent = TRUE) + return(!"try-error" %in% class(res)) +} + +# This function switches to a specified figure at position (row, col) in a layout. +# This overcomes the bug in par(mfg = ...). However the mode par(new = TRUE) is +# activated, i.e., all drawn elements will be superimposed. Additionally, after +# using this function, the automatical pointing to the next figure in the layout +# will be spoiled: once the last figure in the layout is drawn, the pointer won't +# move to the first figure in the layout. +# Only figures with numbers other than 0 (when creating the layout) will be +# accessible. +# Inputs: either row and col, or n and mat +.SwitchToFigure <- function(row = NULL, col = NULL, n = NULL, mat = NULL) { + if (!is.null(n) && !is.null(mat)) { + if (!is.numeric(n) || length(n) != 1) { + stop("Parameter 'n' must be a single numeric value.") + } + n <- round(n) + if (!is.array(mat)) { + stop("Parameter 'mat' must be an array.") + } + target <- which(mat == n, arr.ind = TRUE)[1, ] + row <- target[1] + col <- target[2] + } else if (!is.null(row) && !is.null(col)) { + if (!is.numeric(row) || length(row) != 1) { + stop("Parameter 'row' must be a single numeric value.") + } + row <- round(row) + if (!is.numeric(col) || length(col) != 1) { + stop("Parameter 'col' must be a single numeric value.") + } + col <- round(col) + } else { + stop("Either 'row' and 'col' or 'n' and 'mat' must be provided.") + } + next_attempt <- c(row, col) + par(mfg = next_attempt) + i <- 1 + layout_size <- par('mfrow') + layout_cells <- matrix(1:prod(layout_size), layout_size[1], layout_size[2], + byrow = TRUE) + while (any((par('mfg')[1:2] != c(row, col)))) { + next_attempt <- which(layout_cells == i, arr.ind = TRUE)[1, ] + par(mfg = next_attempt) + i <- i + 1 + if (i > prod(layout_size)) { + stop("Figure not accessible.") + } + } + plot(0, type = 'n', axes = FALSE, ann = FALSE) + par(mfg = next_attempt) +} + +# Function to permute arrays of non-atomic elements (e.g. POSIXct) +.aperm2 <- function(x, new_order) { + old_dims <- dim(x) + attr_bk <- attributes(x) + if ('dim' %in% names(attr_bk)) { + attr_bk[['dim']] <- NULL + } + if (is.numeric(x)) { + x <- aperm(x, new_order) + } else { + y <- array(1:length(x), dim = dim(x)) + y <- aperm(y, new_order) + x <- x[as.vector(y)] + } + dim(x) <- old_dims[new_order] + attributes(x) <- c(attributes(x), attr_bk) + x +} + +# This function is a helper for the function .MergeArrays. +# It expects as inputs two named numeric vectors, and it extends them +# with dimensions of length 1 until an ordered common dimension +# format is reached. +# The first output is dims1 extended with 1s. +# The second output is dims2 extended with 1s. +# The third output is a merged dimension vector. If dimensions with +# the same name are found in the two inputs, and they have a different +# length, the maximum is taken. +.MergeArrayDims <- function(dims1, dims2) { + new_dims1 <- c() + new_dims2 <- c() + while (length(dims1) > 0) { + if (names(dims1)[1] %in% names(dims2)) { + pos <- which(names(dims2) == names(dims1)[1]) + dims_to_add <- rep(1, pos - 1) + if (length(dims_to_add) > 0) { + names(dims_to_add) <- names(dims2[1:(pos - 1)]) + } + new_dims1 <- c(new_dims1, dims_to_add, dims1[1]) + new_dims2 <- c(new_dims2, dims2[1:pos]) + dims1 <- dims1[-1] + dims2 <- dims2[-c(1:pos)] + } else { + new_dims1 <- c(new_dims1, dims1[1]) + new_dims2 <- c(new_dims2, 1) + names(new_dims2)[length(new_dims2)] <- names(dims1)[1] + dims1 <- dims1[-1] + } + } + if (length(dims2) > 0) { + dims_to_add <- rep(1, length(dims2)) + names(dims_to_add) <- names(dims2) + new_dims1 <- c(new_dims1, dims_to_add) + new_dims2 <- c(new_dims2, dims2) + } + list(new_dims1, new_dims2, pmax(new_dims1, new_dims2)) +} + +# This function takes two named arrays and merges them, filling with +# NA where needed. +# dim(array1) +# 'b' 'c' 'e' 'f' +# 1 3 7 9 +# dim(array2) +# 'a' 'b' 'd' 'f' 'g' +# 2 3 5 9 11 +# dim(.MergeArrays(array1, array2, 'b')) +# 'a' 'b' 'c' 'e' 'd' 'f' 'g' +# 2 4 3 7 5 9 11 +.MergeArrays <- function(array1, array2, along) { + if (!(is.null(array1) || is.null(array2))) { + if (!(identical(names(dim(array1)), names(dim(array2))) && + identical(dim(array1)[-which(names(dim(array1)) == along)], + dim(array2)[-which(names(dim(array2)) == along)]))) { + new_dims <- .MergeArrayDims(dim(array1), dim(array2)) + dim(array1) <- new_dims[[1]] + dim(array2) <- new_dims[[2]] + for (j in 1:length(dim(array1))) { + if (names(dim(array1))[j] != along) { + if (dim(array1)[j] != dim(array2)[j]) { + if (which.max(c(dim(array1)[j], dim(array2)[j])) == 1) { + na_array_dims <- dim(array2) + na_array_dims[j] <- dim(array1)[j] - dim(array2)[j] + na_array <- array(dim = na_array_dims) + array2 <- abind(array2, na_array, along = j) + names(dim(array2)) <- names(na_array_dims) + } else { + na_array_dims <- dim(array1) + na_array_dims[j] <- dim(array2)[j] - dim(array1)[j] + na_array <- array(dim = na_array_dims) + array1 <- abind(array1, na_array, along = j) + names(dim(array1)) <- names(na_array_dims) + } + } + } + } + } + if (!(along %in% names(dim(array2)))) { + stop("The dimension specified in 'along' is not present in the ", + "provided arrays.") + } + array1 <- abind(array1, array2, along = which(names(dim(array1)) == along)) + names(dim(array1)) <- names(dim(array2)) + } else if (is.null(array1)) { + array1 <- array2 + } + array1 +} + +# only can be used in Trend(). Needs generalization or be replaced by other function. +.reorder <- function(output, time_dim, dim_names) { + # Add dim name back + if (is.null(dim(output))) { + dim(output) <- c(stats = length(output)) + } else { #is an array + if (length(dim(output)) == 1) { + if (!is.null(names(dim(output)))) { + dim(output) <- c(1, dim(output)) + names(dim(output))[1] <- time_dim + } else { + names(dim(output)) <- time_dim + } + } else { # more than one dim + if (names(dim(output))[1] != "") { + dim(output) <- c(1, dim(output)) + names(dim(output))[1] <- time_dim + } else { #regular case + names(dim(output))[1] <- time_dim + } + } + } + # reorder + pos <- match(dim_names, names(dim(output))) + output <- aperm(output, pos) + names(dim(output)) <- dim_names + names(dim(output))[names(dim(output)) == time_dim] <- 'stats' + return(output) +} + +# to be used in AMV.R, TPI.R, SPOD.R, GSAT.R and GMST.R +.Indices <- function(data, type, monini, indices_for_clim, + fmonth_dim, sdate_dim, year_dim, month_dim, na.rm) { + + if (type == 'dcpp') { + + fyear_dim <- 'fyear' + data <- Season(data = data, time_dim = fmonth_dim, + monini = monini, moninf = 1, monsup = 12, + method = mean, na.rm = na.rm) + names(dim(data))[which(names(dim(data))==fmonth_dim)] <- fyear_dim + + if (identical(indices_for_clim, FALSE)) { ## data is already anomalies + + anom <- data + + } else { ## Different indices_for_clim for each forecast year (to use the same calendar years) + + n_fyears <- as.numeric(dim(data)[fyear_dim]) + n_sdates <- as.numeric(dim(data)[sdate_dim]) + + if (is.null(indices_for_clim)) { ## climatology over the whole (common) period + first_years_for_clim <- n_fyears : 1 + last_years_for_clim <- n_sdates : (n_sdates - n_fyears + 1) + } else { ## indices_for_clim specified as a numeric vector + first_years_for_clim <- seq(from = indices_for_clim[1], by = -1, length.out = n_fyears) + last_years_for_clim <- seq(from = indices_for_clim[length(indices_for_clim)], by = -1, length.out = n_fyears) + } + + data <- s2dv::Reorder(data = data, order = c(fyear_dim, sdate_dim)) + anom <- array(data = NA, dim = dim(data)) + for (i in 1:n_fyears) { + clim <- mean(data[i,first_years_for_clim[i]:last_years_for_clim[i]], na.rm = na.rm) + anom[i,] <- data[i,] - clim + } + } + + } else if (type %in% c('obs','hist')) { + + data <- multiApply::Apply(data = data, target_dims = month_dim, fun = mean, na.rm = na.rm)$output1 + + if (identical(indices_for_clim, FALSE)) { ## data is already anomalies + clim <- 0 + } else if (is.null(indices_for_clim)) { ## climatology over the whole period + clim <- multiApply::Apply(data = data, target_dims = year_dim, fun = mean, na.rm = na.rm)$output1 + } else { ## indices_for_clim specified as a numeric vector + clim <- multiApply::Apply(data = ClimProjDiags::Subset(x = data, along = year_dim, indices = indices_for_clim), + target_dims = year_dim, fun = mean, na.rm = na.rm)$output1 + } + + anom <- data - clim + + } else {stop('type must be dcpp, hist or obs')} + + return(anom) +} + +#TODO: Remove from s2dv when PlotLayout can get colorbar info from plotting function directly. +# The function is temporarily here because PlotLayout() needs to draw the colorbars of +# PlotMostLikelyQuantileMap(). +#Draws Color Bars for Categories +#A wrapper of s2dv::ColorBar to generate multiple color bars for different +#categories, and each category has different color set. +GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE, subsampleg = NULL, + bar_limits, var_limits = NULL, + triangle_ends = NULL, plot = TRUE, + draw_separators = FALSE, + bar_titles = NULL, title_scale = 1, label_scale = 1, extra_margin = rep(0, 4), + ...) { + # bar_limits + if (!is.numeric(bar_limits) || length(bar_limits) != 2) { + stop("Parameter 'bar_limits' must be a numeric vector of length 2.") + } + + # Check brks + if (is.null(brks) || (is.numeric(brks) && length(brks) == 1)) { + num_brks <- 5 + if (is.numeric(brks)) { + num_brks <- brks + } + brks <- seq(from = bar_limits[1], to = bar_limits[2], length.out = num_brks) + } + if (!is.numeric(brks)) { + stop("Parameter 'brks' must be a numeric vector.") + } + # Check cols + col_sets <- list(c("#A1D99B", "#74C476", "#41AB5D", "#238B45"), + c("#6BAED6FF", "#4292C6FF", "#2171B5FF", "#08519CFF"), + c("#FFEDA0FF", "#FED976FF", "#FEB24CFF", "#FD8D3CFF"), + c("#FC4E2AFF", "#E31A1CFF", "#BD0026FF", "#800026FF"), + c("#FCC5C0", "#FA9FB5", "#F768A1", "#DD3497")) + if (is.null(cols)) { + if (length(col_sets) >= nmap) { + chosen_sets <- 1:nmap + chosen_sets <- chosen_sets + floor((length(col_sets) - length(chosen_sets)) / 2) + } else { + chosen_sets <- array(1:length(col_sets), nmap) + } + cols <- col_sets[chosen_sets] + } else { + if (!is.list(cols)) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (!all(sapply(cols, is.character))) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (length(cols) != nmap) { + stop("Parameter 'cols' must be a list of the same length as the number of ", + "maps in 'maps'.") + } + } + for (i in 1:length(cols)) { + if (length(cols[[i]]) != (length(brks) - 1)) { + cols[[i]] <- grDevices::colorRampPalette(cols[[i]])(length(brks) - 1) + } + } + + # Check bar_titles + if (is.null(bar_titles)) { + if (nmap == 3) { + bar_titles <- c("Below normal (%)", "Normal (%)", "Above normal (%)") + } else if (nmap == 5) { + bar_titles <- c("Low (%)", "Below normal (%)", + "Normal (%)", "Above normal (%)", "High (%)") + } else { + bar_titles <- paste0("Cat. ", 1:nmap, " (%)") + } + } + + if (plot) { + for (k in 1:nmap) { + s2dv::ColorBar(brks = brks, cols = cols[[k]], vertical = FALSE, subsampleg = subsampleg, +# bar_limits = bar_limits, var_limits = var_limits, + triangle_ends = triangle_ends, plot = TRUE, + draw_separators = draw_separators, + title = bar_titles[[k]], title_scale = title_scale, + label_scale = label_scale, extra_margin = extra_margin) + } + } else { + #TODO: col_inf and col_sup + return(list(brks = brks, cols = cols)) + } + +} + + diff --git a/modules/Visualization/R/tmp/clim.palette.R b/modules/Visualization/R/tmp/clim.palette.R new file mode 100644 index 00000000..7f220d31 --- /dev/null +++ b/modules/Visualization/R/tmp/clim.palette.R @@ -0,0 +1,70 @@ +#'Generate Climate Color Palettes +#' +#'Generates a colorblind friendly color palette with color ranges useful in +#'climate temperature variable plotting. +#' +#'@param palette Which type of palette to generate: from blue through white +#' to red ('bluered'), from red through white to blue ('redblue'), from +#' yellow through orange to red ('yellowred'), from red through orange to +#' red ('redyellow'), from purple through white to orange ('purpleorange'), +#' and from orange through white to purple ('orangepurple'). +#'@param n Number of colors to generate. +#' +#'@examples +#'lims <- seq(-1, 1, length.out = 21) +#' +#'ColorBar(lims, color_fun = clim.palette('redyellow')) +#' +#'cols <- clim.colors(20) +#'ColorBar(lims, cols) +#' +#'@rdname clim.palette +#'@importFrom grDevices colorRampPalette +#'@export +clim.palette <- function(palette = "bluered") { + if (palette == "bluered") { + colorbar <- colorRampPalette(rev(c("#67001f", "#b2182b", "#d6604d", + "#f4a582", "#fddbc7", "#f7f7f7", + "#d1e5f0", "#92c5de", "#4393c3", + "#2166ac", "#053061"))) + attr(colorbar, 'na_color') <- 'pink' + } else if (palette == "redblue") { + colorbar <- colorRampPalette(c("#67001f", "#b2182b", "#d6604d", + "#f4a582", "#fddbc7", "#f7f7f7", + "#d1e5f0", "#92c5de", "#4393c3", + "#2166ac", "#053061")) + attr(colorbar, 'na_color') <- 'pink' + } else if (palette == "yellowred") { + colorbar <- colorRampPalette(c("#ffffcc", "#ffeda0", "#fed976", + "#feb24c", "#fd8d3c", "#fc4e2a", + "#e31a1c", "#bd0026", "#800026")) + attr(colorbar, 'na_color') <- 'pink' + } else if (palette == "redyellow") { + colorbar <- colorRampPalette(rev(c("#ffffcc", "#ffeda0", "#fed976", + "#feb24c", "#fd8d3c", "#fc4e2a", + "#e31a1c", "#bd0026", "#800026"))) + attr(colorbar, 'na_color') <- 'pink' + } else if (palette == "purpleorange") { + colorbar <- colorRampPalette(c("#2d004b", "#542789", "#8073ac", + "#b2abd2", "#d8daeb", "#f7f7f7", + "#fee0b6", "#fdb863", "#e08214", + "#b35806", "#7f3b08")) + attr(colorbar, 'na_color') <- 'pink' + } else if (palette == "orangepurple") { + colorbar <- colorRampPalette(rev(c("#2d004b", "#542789", "#8073ac", + "#b2abd2", "#d8daeb", "#f7f7f7", + "#fee0b6", "#fdb863", "#e08214", + "#b35806", "#7f3b08"))) + attr(colorbar, 'na_color') <- 'pink' + } else { + stop("Parameter 'palette' must be one of 'bluered', 'redblue', 'yellowred'", + "'redyellow', 'purpleorange' or 'orangepurple'.") + } + colorbar +} +#'@rdname clim.palette +#'@export +clim.colors <- function(n, palette = "bluered") { + clim.palette(palette)(n) +} + diff --git a/modules/Visualization/R/tmp/zzz.R b/modules/Visualization/R/tmp/zzz.R new file mode 100644 index 00000000..f2871dfd --- /dev/null +++ b/modules/Visualization/R/tmp/zzz.R @@ -0,0 +1,256 @@ +# Function to permute arrays of non-atomic elements (e.g. POSIXct) +.aperm2 <- function(x, new_order) { + old_dims <- dim(x) + attr_bk <- attributes(x) + if ('dim' %in% names(attr_bk)) { + attr_bk[['dim']] <- NULL + } + if (is.numeric(x)) { + x <- aperm(x, new_order) + } else { + y <- array(1:length(x), dim = dim(x)) + y <- aperm(y, new_order) + x <- x[as.vector(y)] + } + dim(x) <- old_dims[new_order] + attributes(x) <- c(attributes(x), attr_bk) + x +} + +# verbose-only printing function +.printv <- function(value, verbosity = TRUE) { + if (verbosity) { + print(value) + } +} + +# normalize a time series +.standardize <- function(timeseries) { + out <- (timeseries - mean(timeseries, na.rm = T)) / sd(timeseries, na.rm = T) + return(out) +} + +.selbox <- function(lon, lat, xlim = NULL, ylim = NULL) { + if (!is.null(xlim)) { + # This transforms c(-20, -10) to c(340, 350) but c(-20, 10) is unchanged + # Bring them all to the same units in the 0:360 range + xlim1 <- xlim[1] %% 360 + xlim2 <- xlim[2] %% 360 + lonm <- lon %% 360 + if (lonm[1] > tail(lonm, 1)) { + lonm <- lon + } + if (xlim1 > xlim2) { + # If box crosses 0 + ilonsel <- (lonm >= xlim1) | (lonm <= xlim2) + } else { + ilonsel <- (lonm >= xlim1) & (lonm <= xlim2) + } + if (!any(ilonsel)) { + stop("No intersection between longitude bounds and data domain.") + } + } else { + ilonsel <- 1:length(lon) + } + if (!is.null(ylim)) { + ilatsel <- (lat >= ylim[1]) & (lat <= ylim[2]) + } else { + ilatsel <- 1:length(lat) + } + return(list(ilon = ilonsel, ilat = ilatsel)) +} + +# produce a 2d matrix of area weights +.area.weight <- function(ics, ipsilon, root = T) { + field <- array(NA, dim = c(length(ics), length(ipsilon))) + if (root == T) { + for (j in 1:length(ipsilon)) { + field[, j] <- sqrt(cos(pi / 180 * ipsilon[j])) + } + } + + if (root == F) { + for (j in 1:length(ipsilon)) { + field[, j] <- cos(pi / 180 * ipsilon[j]) + } + } + + return(field) +} + +#Draws Color Bars for Categories +#A wrapper of s2dv::ColorBar to generate multiple color bars for different +#categories, and each category has different color set. +GradientCatsColorBar <- function(nmap, brks = NULL, cols = NULL, vertical = TRUE, subsampleg = NULL, + bar_limits, var_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, plot = TRUE, + draw_separators = FALSE, + bar_titles = NULL, title_scale = 1, label_scale = 1, extra_margin = rep(0, 4), + ...) { + + # bar_limits: a vector of 2 or a list + if (!is.list(bar_limits)) { + if (!is.numeric(bar_limits) || length(bar_limits) != 2) { + stop("Parameter 'bar_limits' must be a numeric vector of length 2 or a list containing that.") + } + # turn into list + bar_limits <- rep(list(bar_limits), nmap) + } else { + if (any(!sapply(bar_limits, is.numeric)) || any(sapply(bar_limits, length) != 2)) { + stop("Parameter 'bar_limits' must be a numeric vector of length 2 or a list containing that.") + } + if (length(bar_limits) != nmap) { + stop("Parameter 'bar_limits' must have the length of 'nmap'.") + } + } + # Check brks + if (!is.list(brks)) { + if (is.null(brks)) { + brks <- 5 + } else if (!is.numeric(brks)) { + stop("Parameter 'brks' must be a numeric vector.") + } + # Turn it into list + brks <- rep(list(brks), nmap) + } else { + if (length(brks) != nmap) { + stop("Parameter 'brks' must have the length of 'nmap'.") + } + } + for (i_map in 1:nmap) { + if (length(brks[[i_map]]) == 1) { + brks[[i_map]] <- seq(from = bar_limits[[i_map]][1], to = bar_limits[[i_map]][2], length.out = brks[[i_map]]) + } + } + + # Check cols + col_sets <- list(c("#A1D99B", "#74C476", "#41AB5D", "#238B45"), + c("#6BAED6FF", "#4292C6FF", "#2171B5FF", "#08519CFF"), + c("#FFEDA0FF", "#FED976FF", "#FEB24CFF", "#FD8D3CFF"), + c("#FC4E2AFF", "#E31A1CFF", "#BD0026FF", "#800026FF"), + c("#FCC5C0", "#FA9FB5", "#F768A1", "#DD3497")) + if (is.null(cols)) { + if (length(col_sets) >= nmap) { + chosen_sets <- 1:nmap + chosen_sets <- chosen_sets + floor((length(col_sets) - length(chosen_sets)) / 2) + } else { + chosen_sets <- array(1:length(col_sets), nmap) + } + cols <- col_sets[chosen_sets] + + # Set triangle_ends, col_sup, col_inf + #NOTE: The "col" input of ColorBar() later is not NULL (since we determine it here) + # so ColorBar() cannot decide these parameters for us. + #NOTE: Here, col_inf and col_sup are prior to triangle_ends, which is consistent with ColorBar(). + #TODO: Make triangle_ends a list + if (is.null(triangle_ends)) { + if (!is.null(var_limits)) { + triangle_ends <- c(FALSE, FALSE) + #TODO: bar_limits is a list + if (bar_limits[1] >= var_limits[1] | !is.null(col_inf)) { + triangle_ends[1] <- TRUE + if (is.null(col_inf)) { + col_inf <- lapply(cols, head, 1) + cols <- lapply(cols, '[', -1) + } + } + if (bar_limits[2] < var_limits[2] | !is.null(col_sup)) { + triangle_ends[2] <- TRUE + if (is.null(col_sup)) { + col_sup <- lapply(cols, tail, 1) + cols <- lapply(cols, '[', -length(cols[[1]])) + } + } + } else { + triangle_ends <- c(!is.null(col_inf), !is.null(col_sup)) + } + } else { # triangle_ends has values + if (triangle_ends[1] & is.null(col_inf)) { + col_inf <- lapply(cols, head, 1) + cols <- lapply(cols, '[', -1) + } + if (triangle_ends[2] & is.null(col_sup)) { + col_sup <- lapply(cols, tail, 1) + cols <- lapply(cols, '[', -length(cols[[1]])) + } + } + + } else { + if (!is.list(cols)) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (!all(sapply(cols, is.character))) { + stop("Parameter 'cols' must be a list of character vectors.") + } + if (length(cols) != nmap) { + stop("Parameter 'cols' must be a list of the same length as 'nmap'.") + } + } + for (i_map in 1:length(cols)) { + if (length(cols[[i_map]]) != (length(brks[[i_map]]) - 1)) { + cols[[i_map]] <- grDevices::colorRampPalette(cols[[i_map]])(length(brks[[i_map]]) - 1) + } + } + + # Check bar_titles + if (is.null(bar_titles)) { + if (nmap == 3) { + bar_titles <- c("Below normal (%)", "Normal (%)", "Above normal (%)") + } else if (nmap == 5) { + bar_titles <- c("Low (%)", "Below normal (%)", + "Normal (%)", "Above normal (%)", "High (%)") + } else { + bar_titles <- paste0("Cat. ", 1:nmap, " (%)") + } + } + + if (plot) { + for (k in 1:nmap) { +#TODO: Add s2dv:: + ColorBar(brks = brks[[k]], cols = cols[[k]], vertical = FALSE, subsampleg = subsampleg, + bar_limits = bar_limits[[k]], #var_limits = var_limits, + triangle_ends = triangle_ends, col_inf = col_inf[[k]], col_sup = col_sup[[k]], plot = TRUE, + draw_separators = draw_separators, + title = bar_titles[[k]], title_scale = title_scale, + label_scale = label_scale, extra_margin = extra_margin) + } + } else { + return(list(brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup)) + } + +} + +.KnownLonNames <- function() { + known_lon_names <- c('lon', 'lons', 'longitude', 'x', 'i', 'nav_lon') +} + +.KnownLatNames <- function() { + known_lat_names <- c('lat', 'lats', 'latitude', 'y', 'j', 'nav_lat') +} + +.KnownTimeNames <- function() { + known_time_names <- c('time', 'ftime', 'sdate', 'sdates', 'syear', 'sweek', 'sday', 'leadtimes') +} + +.KnownForecastTimeNames <- function() { + known_time_names <- c('time', 'ftime', 'ltime', 'leadtimes') +} + +.KnownStartDateNames <- function() { + known_time_names <- c('sdate', 'sdates', 'syear', 'sweek', 'sday') +} + +.KnownMemberNames <- function() { + known_time_names <- c('memb', 'member', 'members', 'ensemble', 'ensembles') +} + +.isNullOb <- function(x) is.null(x) | all(sapply(x, is.null)) + +.rmNullObs <- function(x) { + x <- base::Filter(Negate(.isNullOb), x) + lapply(x, function(x) if (is.list(x)) .rmNullObs(x) else x) +} + +# Definition of a global variable to store the warning message used in Calibration +warning_shown <- FALSE + -- GitLab From 3ab5ce22572c593cf84613f6616aa9f9a676ef67 Mon Sep 17 00:00:00 2001 From: nperez Date: Fri, 1 Sep 2023 09:30:11 +0200 Subject: [PATCH 14/22] unify font --- modules/Visualization/R/plot_ensemble_mean.R | 1 + modules/Visualization/R/plot_most_likely_terciles_map.R | 2 +- modules/Visualization/R/plot_skill_metrics.R | 2 +- modules/Visualization/output_size.yml | 2 ++ 4 files changed, 5 insertions(+), 2 deletions(-) diff --git a/modules/Visualization/R/plot_ensemble_mean.R b/modules/Visualization/R/plot_ensemble_mean.R index 8fbe1bfd..3d0304a7 100644 --- a/modules/Visualization/R/plot_ensemble_mean.R +++ b/modules/Visualization/R/plot_ensemble_mean.R @@ -129,6 +129,7 @@ plot_ensemble_mean <- function(recipe, fcst, mask = NULL, dots = NULL, outdir, o base_args <- list(var = NULL, dots = NULL, mask = NULL, lon = longitude, lat = latitude, dot_symbol = 20, title_scale = 0.6, + font.main = 2, filled.continents = F, brks = brks, cols = cols, bar_label_digits = 4, bar_label_scale = 1.5, axes_label_scale = 1, units = units) diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index cfc0635e..4b18fd84 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -209,7 +209,7 @@ plot_most_likely_terciles <- function(recipe, # plot a square tmp$brks <- 4 tmp$draw_ticks <- F - tmp$box_label <- "40+" + tmp$box_label <- "> 40" do.call(ColorBar_onebox, tmp) } } diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index 0ecd9d64..c0b0f4f8 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -185,7 +185,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, title_scale = 0.6, filled.continents = F, brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup, - units = units, + units = units, font.main = 2, bar_label_digits = 3, bar_label_scale = 1.5, axes_label_scale = 1, width = 7, height = 5) base_args[names(output_configuration)] <- output_configuration diff --git a/modules/Visualization/output_size.yml b/modules/Visualization/output_size.yml index f5edfd7a..0cd945be 100644 --- a/modules/Visualization/output_size.yml +++ b/modules/Visualization/output_size.yml @@ -9,6 +9,7 @@ region: #units inches bar_extra_margin: !expr c(2,1,0.5,1) dot_size: 1.7 dot_symbol: 4 + font.main: 1 forecast_ensemble_mean: width: 8.5 height: 8.5 @@ -17,6 +18,7 @@ region: #units inches bar_extra_margin: !expr c(2,1,0.5,1) dot_symbol: 4 dot_size: 1.7 + font.main: 1 most_likely_terciles: width: 8.5 height: 8.5 -- GitLab From 2f5378f8d87ced825c92b718bbea39e6567b4c5c Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Tue, 5 Sep 2023 14:45:44 +0200 Subject: [PATCH 15/22] Adapt Units module to multivar case --- .../Units/R/transform_units_precipitation.R | 66 ++++++++++++------- modules/Units/R/transform_units_pressure.R | 15 +++-- modules/Units/R/transform_units_temperature.R | 23 ++++--- modules/Units/R/units_transform.R | 54 ++++++++------- modules/Units/Units.R | 28 +++++--- .../atomic_recipes/recipe_test_multivar.yml | 1 + 6 files changed, 113 insertions(+), 74 deletions(-) diff --git a/modules/Units/R/transform_units_precipitation.R b/modules/Units/R/transform_units_precipitation.R index bc840d15..afe29d4f 100644 --- a/modules/Units/R/transform_units_precipitation.R +++ b/modules/Units/R/transform_units_precipitation.R @@ -1,16 +1,20 @@ transform_units_precipitation <- function(data, original_units, new_units, - var_name, freq, flux = FALSE, ncores = NULL) { + var_name, freq, flux = FALSE, ncores = NULL, + var_index = 1) { ## TODO consider higher frequencies (e.g. 6hourly) ## could create a constant hours <- 24 or hours <- 6 and use the same code if (original_units == "ms-1") { if (new_units == "mm") { - data[[1]]$data <- data[[1]]$data * 3600 * 24 * 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 3600 * 24 * 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'mm' } else if (new_units == "m") { - data[[1]]$data <- data[[1]]$data * 3600 * 24 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 3600 * 24 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm' } else if (new_units == "kgm-2s-1") { - data[[1]]$data <- data[[1]]$data * 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'kg m-2 s-1' } else { stop(paste("Unknown transformation from", original_units, "to", @@ -18,13 +22,16 @@ transform_units_precipitation <- function(data, original_units, new_units, } } else if (original_units == "mm") { if (new_units == "ms-1") { - data[[1]]$data <- data[[1]]$data / (3600 * 24 * 1000) + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] / (3600 * 24 * 1000) data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm s-1' } else if (new_units == "m") { - data[[1]]$data <- data[[1]]$data / 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] / 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm' } else if (new_units == "kgm-2s-1") { - data[[1]]$data <- data[[1]]$data / (3600 * 24 ) + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] / (3600 * 24 ) data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'kg m-2 s-1' } else { stop(paste("Unknown transformation from", original_units, "to", @@ -32,13 +39,16 @@ transform_units_precipitation <- function(data, original_units, new_units, } } else if (original_units == "m") { if (new_units == "ms-1") { - data[[1]]$data <- data[[1]]$data / (3600 * 24) + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] / (3600 * 24) data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm s-1' } else if (new_units == "mm") { - data[[1]]$data <- data[[1]]$data * 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'mm' } else if (new_units == "kgm-2s-1") { - data[[1]]$data <- data[[1]]$data * 1000 / (3600 * 24) + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 1000 / (3600 * 24) data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'kg m-2 s-1' } else { stop(paste("Unknown transformation from", original_units, "to", @@ -46,13 +56,16 @@ transform_units_precipitation <- function(data, original_units, new_units, } } else if (original_units == "kgm-2s-1") { if (new_units == "ms-1") { - data[[1]]$data <- data[[1]]$data / 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] / 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm s-1' } else if (new_units == "mm") { - data[[1]]$data <- data[[1]]$data * 3600 * 24 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 3600 * 24 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'mm' } else if (new_units == "m") { - data[[1]]$data <- data[[1]]$data * 3600 * 24 / 1000 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 3600 * 24 / 1000 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'm' } else { stop(paste("Unknown transformation from", original_units, "to", @@ -65,21 +78,27 @@ transform_units_precipitation <- function(data, original_units, new_units, data[[1]]$attrs$Variable$metadata[[var_name]]$units <- paste0( data[[1]]$attrs$Variable$metadata[[var_name]]$units, "/day") } else { + ## TODO: Shouldn't use time dimension, need it for Compute() if (freq == "monthly_mean") { # could it not be mean? time_pos <- which(lapply(data[[1]]$attrs$Variable$metadata[[var_name]]$dim, function(x) {x$name}) == 'time') cal <- tolower(data[[1]]$attrs$Variable$metadata[[var_name]]$dim[[time_pos]]$calendar) - data[[1]]$data <- Apply(list(data[[1]]$data, data[[1]]$attrs$Dates), - target_dim = 'time', fun = function(x, y) { - date <- as.Date(y, "%Y-%m-%d") - num_days <- .days_in_month(date, cal = cal) - res <- x * num_days - }, ncores = ncores)$output1 + data_subset <- Subset(data[[1]]$data, along = "var", indices = var_index, drop = 'selected') + data[[1]]$data[ , var_index, , , , , , , ] <- + Apply(list(data_subset, data[[1]]$attrs$Dates), + target_dim = list(c('time'), c('time')), + extra_info = list(cal = cal, days_in_month = .days_in_month), + fun = function(x, y) { + date <- as.Date(y, "%Y-%m-%d") + num_days <- .days_in_month(date, cal = .cal) + res <- x * num_days + }, ncores = ncores)$output1 } } return(data) } -.days_in_month <- function (x, cal) { + +.days_in_month <- function(x, cal) { if (cal %in% c('gregorian', 'standard', 'proleptic_gregorian')) { N_DAYS_IN_MONTHS <- lubridate:::N_DAYS_IN_MONTHS if (leap_year(year(x))) { @@ -92,12 +111,9 @@ transform_units_precipitation <- function(data, original_units, new_units, N_DAYS_IN_MONTHS <- lubridate:::N_DAYS_IN_MONTHS } else { stop("Unknown calendar") - } - + } month_x <- month(x, label = TRUE, locale = "C") n_days <- N_DAYS_IN_MONTHS[month_x] n_days[month_x == "Feb" & leap_year(x)] <- 29L - n_days + return(n_days) } - - diff --git a/modules/Units/R/transform_units_pressure.R b/modules/Units/R/transform_units_pressure.R index 6ebd39ae..db3f88f1 100644 --- a/modules/Units/R/transform_units_pressure.R +++ b/modules/Units/R/transform_units_pressure.R @@ -1,23 +1,28 @@ -transform_units_pressure <- function(data, original_units, new_units, var_name) { +transform_units_pressure <- function(data, original_units, new_units, var_name, + var_index = 1) { if (original_units == 'pa') { if (new_units == 'hpa') { - data[[1]]$data <- data[[1]]$data /100 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] /100 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'hPa' } else if (new_units == 'mb') { - data[[1]]$data <- data[[1]]$data /100 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] /100 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'mb' } } else if (original_units == 'hpa') { if (new_units == 'pa') { - data[[1]]$data <- data[[1]]$data * 100 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 100 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "Pa" } else if (new_units == "mb") { data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "mb" } } else if (original_units == "mb") { if (new_units == 'pa') { - data[[1]]$data <- data[[1]]$data * 100 + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] * 100 data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "Pa" } else if (new_units == "hPa") { data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "hPa" diff --git a/modules/Units/R/transform_units_temperature.R b/modules/Units/R/transform_units_temperature.R index 09496b0f..bcd27524 100644 --- a/modules/Units/R/transform_units_temperature.R +++ b/modules/Units/R/transform_units_temperature.R @@ -1,13 +1,16 @@ transform_units_temperature <- function(data, original_units, new_units, - var_name) { - if (original_units == 'c' & new_units == 'k') { - data[[1]]$data <- data[[1]]$data + 273.15 - data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'K' - } - if (original_units == 'k' & new_units == 'c') { - data[[1]]$data <- data[[1]]$data - 273.15 - data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "C" + var_name, var_index = 1, + var_dim = "var") { + if (original_units == 'c' & new_units == 'k') { + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] + 273.15 + data[[1]]$attrs$Variable$metadata[[var_name]]$units <- 'K' + } + if (original_units == 'k' & new_units == 'c') { + data[[1]]$data[ , var_index, , , , , , , ] <- + data[[1]]$data[ , var_index, , , , , , , ] - 273.15 + data[[1]]$attrs$Variable$metadata[[var_name]]$units <- "C" - } - return(data) + } + return(data) } diff --git a/modules/Units/R/units_transform.R b/modules/Units/R/units_transform.R index fd435fa8..0779df68 100644 --- a/modules/Units/R/units_transform.R +++ b/modules/Units/R/units_transform.R @@ -2,31 +2,37 @@ # units as character units_transform <- function(data, orig_units, user_units, var_name, freq, flux = FALSE, ncores = NULL) { - var_name <- unlist(var_name) - if (orig_units %in% c("c", "k")) { - if (user_units %in% c("c", "k")) { - trans <- transform_units_temperature(data, orig_units, user_units, - var_name) - } else { - stop("Transformation temperature units not available.") + ## TODO: Change how argument 'flux' works + for (i in 1:length(var_name)) { + if (!(orig_units[i] == user_units[i])) { + if (orig_units[i] %in% c("c", "k")) { + if (user_units[i] %in% c("c", "k")) { + data <- transform_units_temperature(data, orig_units[i], user_units[i], + var_name[i], var_index = i) + } else { + stop("Transformation temperature units not available.") + } + } else if (orig_units[i] %in% c("ms-1", "kgm-2s-1", "mm", "m")) { + if (user_units[i] %in% c("ms-1", "kgm-2s-1", "mm", "m")) { + print("Converting precip units") + data <- transform_units_precipitation(data, orig_units[i], user_units[i], + var_name[i], freq, flux, + ncores = ncores, var_index = i) + } else { + stop("Transformation precipitation units not available.") + } + } else if (orig_units[i] %in% c("pa", "hpa", "mb")) { + if (user_units[i] %in% c("pa", "hpa", "mb")) { + data <- transform_units_pressure(data, orig_units[i], user_units[i], + var_name[i]) + } else { + stop("Transformation precipitation units not available.") + } + } else { + stop("Transformation unknown.") + } } - } else if (orig_units %in% c("ms-1", "kgm-2s-1", "mm", "m")) { - if (user_units %in% c("ms-1", "kgm-2s-1", "mm", "m")) { - trans <- transform_units_precipitation(data, orig_units, user_units, - var_name, freq, flux, ncores = ncores) - } else { - stop("Transformation precipitation units not available.") - } - } else if (orig_units %in% c("pa", "hpa", "mb")) { - if (user_units %in% c("pa", "hpa", "mb")) { - trans <- transform_units_pressure(data, orig_units, user_units, - var_name) - } else { - stop("Transformation precipitation units not available.") - } - } else { - stop("Transformation unknown.") } - return(trans) + return(data) } diff --git a/modules/Units/Units.R b/modules/Units/Units.R index 5e7ea88c..07b830d5 100644 --- a/modules/Units/Units.R +++ b/modules/Units/Units.R @@ -22,13 +22,21 @@ Units <- function(recipe, data) { } else { flux <- recipe$Analysis$Variable$flux } - orig_units <- lapply(1:length(data), function(x) { - data[[x]]$attrs$Variable$metadata[[var_names[[x]]]]$units}) + orig_units <- list() + for (element in names(var_names)) { + orig_units[[element]] <- c() + for (x in var_names[[element]]) { + orig_units[[element]] <- c(orig_units[[element]], + data[[element]]$attrs$Variable$metadata[[x]]$units) + } + } if (is.null(recipe$Analysis$Variables$units)) { user_units <- orig_units[[which(!is.null(orig_units))[1]]] } else { user_units <- recipe$Analysis$Variables$units } + ## TODO: How to handle spaces in multi-var case? + user_units <- strsplit(recipe$Analysis$Variables$units, ", | |,")[[1]] # remove spaces, "**", "*" and "per" from units user_units <- tolower(user_units) user_units <- gsub(" ", "", user_units) @@ -39,21 +47,21 @@ Units <- function(recipe, data) { x <- gsub(" ", "", x) x <- gsub("\\**", "", x) x <- gsub("\\*", "", x) - x <- unlist(ifelse(length(x)==0, user_units, x)) #when fcst is NULL + x <- unlist(ifelse(rep(length(x)==0, length(user_units)), user_units, x)) #when fcst is NULL }) - # if "/" appears substitute by -1 in at the end of next unit. How to know? - - if (all(orig_units == user_units)) { + ## TODO: + ## if "/" appears substitute by -1 in at the end of next unit. How to know? + identicalValue <- function(x,y) if (identical(x,y)) TRUE else FALSE + if (Reduce(identicalValue, c(orig_units, user_units))) { # no transformation needed res <- data } else { - obj2trans <- which(orig_units != user_units) res <- sapply(1:length(data), function(x) { - if (x %in% obj2trans) { + if (!all(orig_units[x] == user_units)) { result <- units_transform(data[x], orig_units = orig_units[[x]], user_units = user_units, - var_names[x], freq = freq, + var_names[[x]], freq = freq, flux = flux, ncores = ncores) } else { @@ -61,7 +69,7 @@ Units <- function(recipe, data) { } return(result) }, simplify = TRUE) # instead of lapply to get the named list directly - + info(recipe$Run$logger, "##### UNIT CONVERSION COMPLETE #####") } return(res) } diff --git a/recipes/atomic_recipes/recipe_test_multivar.yml b/recipes/atomic_recipes/recipe_test_multivar.yml index 8eb3e962..9c626a51 100644 --- a/recipes/atomic_recipes/recipe_test_multivar.yml +++ b/recipes/atomic_recipes/recipe_test_multivar.yml @@ -6,6 +6,7 @@ Analysis: Variables: name: tas prlr freq: monthly_mean + units: C mm Datasets: System: name: ECMWF-SEAS5 -- GitLab From cae7c26e7ab34e55ac4e68bf148c8dd495bde473 Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Tue, 5 Sep 2023 14:49:47 +0200 Subject: [PATCH 16/22] Add a TODO --- modules/Units/R/transform_units_precipitation.R | 1 + modules/Units/R/transform_units_pressure.R | 1 + modules/Units/R/transform_units_temperature.R | 1 + 3 files changed, 3 insertions(+) diff --git a/modules/Units/R/transform_units_precipitation.R b/modules/Units/R/transform_units_precipitation.R index afe29d4f..43d00a6a 100644 --- a/modules/Units/R/transform_units_precipitation.R +++ b/modules/Units/R/transform_units_precipitation.R @@ -1,6 +1,7 @@ transform_units_precipitation <- function(data, original_units, new_units, var_name, freq, flux = FALSE, ncores = NULL, var_index = 1) { + ## TODO: Hard-coded subsetting ## TODO consider higher frequencies (e.g. 6hourly) ## could create a constant hours <- 24 or hours <- 6 and use the same code if (original_units == "ms-1") { diff --git a/modules/Units/R/transform_units_pressure.R b/modules/Units/R/transform_units_pressure.R index db3f88f1..9712e9fe 100644 --- a/modules/Units/R/transform_units_pressure.R +++ b/modules/Units/R/transform_units_pressure.R @@ -1,6 +1,7 @@ transform_units_pressure <- function(data, original_units, new_units, var_name, var_index = 1) { + ## TODO: Hard-coded subsetting if (original_units == 'pa') { if (new_units == 'hpa') { data[[1]]$data[ , var_index, , , , , , , ] <- diff --git a/modules/Units/R/transform_units_temperature.R b/modules/Units/R/transform_units_temperature.R index bcd27524..366f0d34 100644 --- a/modules/Units/R/transform_units_temperature.R +++ b/modules/Units/R/transform_units_temperature.R @@ -1,6 +1,7 @@ transform_units_temperature <- function(data, original_units, new_units, var_name, var_index = 1, var_dim = "var") { + ## TODO: Hard-coded subsetting if (original_units == 'c' & new_units == 'k') { data[[1]]$data[ , var_index, , , , , , , ] <- data[[1]]$data[ , var_index, , , , , , , ] + 273.15 -- GitLab From c7e6f01428c6141932276475d23c7fb8c9c4b0d3 Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Tue, 5 Sep 2023 14:58:43 +0200 Subject: [PATCH 17/22] Display units in data summary --- tools/data_summary.R | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tools/data_summary.R b/tools/data_summary.R index 714c5a0b..11f365cd 100644 --- a/tools/data_summary.R +++ b/tools/data_summary.R @@ -34,8 +34,11 @@ data_summary <- function(data_cube, recipe) { info(recipe$Run$logger, paste0("Statistical summary of the data in ", object_name, ":")) for (var_index in 1:data_cube$dims[['var']]) { + variable_name <- data_cube$attrs$Variable$varName[var_index] + variable_units <- data_cube$attrs$Variable$metadata[[variable_name]]$units info(recipe$Run$logger, - paste("Variable:", data_cube$attrs$Variable$varName[var_index])) + paste0("Variable: ", variable_name, + " (units: ", variable_units, ")")) output_string <- capture.output(summary(Subset(data_cube$data, along = "var", indices = var_index))) -- GitLab From b039e4c1c4b70410b264a0d1630dc573d43d711f Mon Sep 17 00:00:00 2001 From: nperez Date: Wed, 6 Sep 2023 16:02:39 +0200 Subject: [PATCH 18/22] Fix multipanel size and typo --- modules/Visualization/R/plot_skill_metrics.R | 9 ++++----- modules/Visualization/Visualization.R | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/modules/Visualization/R/plot_skill_metrics.R b/modules/Visualization/R/plot_skill_metrics.R index c0b0f4f8..5a3eafaf 100644 --- a/modules/Visualization/R/plot_skill_metrics.R +++ b/modules/Visualization/R/plot_skill_metrics.R @@ -35,7 +35,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, } else { projection <- "cylindrical_equidistant" } - + # Define color palette and number of breaks according to output format if (tolower(recipe$Analysis$Output_format) %in% c("scorecards", "cerise")) { diverging_palette <- "purpleorange" @@ -143,7 +143,6 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, # Get variable name and long name var_name <- data_cube$attrs$Variable$varName[[var]] var_long_name <- data_cube$attrs$Variable$metadata[[var_name]]$long_name - # Multi-panel or single-panel plots if (recipe$Analysis$Workflow$Visualization$multi_panel) { # Define titles @@ -171,8 +170,8 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, extra_margin = rep(1, 4), bar_label_scale = 1.5, axes_label_scale = 1.3, - width = 7,#default i - height = 5) + width = 11,#default i + height = 11) ) } else { # Define function and parameters depending on projection @@ -187,7 +186,7 @@ plot_skill_metrics <- function(recipe, data_cube, skill_metrics, col_inf = col_inf, col_sup = col_sup, units = units, font.main = 2, bar_label_digits = 3, bar_label_scale = 1.5, - axes_label_scale = 1, width = 7, height = 5) + axes_label_scale = 1, width = 8, height = 5) base_args[names(output_configuration)] <- output_configuration } else { fun <- PlotRobinson diff --git a/modules/Visualization/Visualization.R b/modules/Visualization/Visualization.R index 251e835a..1ddd6ec7 100644 --- a/modules/Visualization/Visualization.R +++ b/modules/Visualization/Visualization.R @@ -74,7 +74,7 @@ plot_data <- function(recipe, recipe$Analysis$Workflow$Visualization$mask_ens <- FALSE } if (is.null(recipe$Analysis$Workflow$Visualization$dots)) { - recipe$Analysis$Worklow$Visualization$dots <- FALSE + recipe$Analysis$Workflow$Visualization$dots <- FALSE } # Plot without mask or dots if ((recipe$Analysis$Workflow$Visualization$mask_ens -- GitLab From 3efd87d2bfebbc6e374aff139364d1a1a7735589 Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Fri, 8 Sep 2023 11:50:10 +0200 Subject: [PATCH 19/22] Put BSC logo code in function --- tools/add_logo.R | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 tools/add_logo.R diff --git a/tools/add_logo.R b/tools/add_logo.R new file mode 100644 index 00000000..42fb87c5 --- /dev/null +++ b/tools/add_logo.R @@ -0,0 +1,15 @@ +add_logo <- function(recipe, logo) { + # recipe: SUNSET recipe + # logo: URL to the logo + system <- list.files(paste0(recipe$Run$output_dir, "/plots/")) + variable <- recipe$Analysis$Variable$name + files <- lapply(variable, function(x) { + f <- list.files(paste0(recipe$Run$output_dir, "/plots/", + system, "/", x)) + full_path <- paste0(recipe$Run$output_dir, "/plots/", + system, "/", x,"/", f)})[[1]] + dim(files) <- c(file = length(files)) + Apply(list(files), target_dims = NULL, function(x) { + system(paste("composite -gravity southeast -geometry +10+10", + logo, x, x))}, ncores = recipe$Analysis$ncores) +} -- GitLab From bd21e30375c661705e213079eb8c42a442364a61 Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Tue, 12 Sep 2023 13:23:49 +0200 Subject: [PATCH 20/22] Delete extra scripts and recipes, change line to get logo from repo --- example_scripts/exec_ecvs_seasonal_oper.R | 17 +++++- example_scripts/exec_units.R | 4 -- exec_ecvs_seasonal_oper.R | 52 ------------------ exec_units.R | 42 --------------- recipe_prlr_seasonal_oper.yml | 63 ---------------------- recipe_prlr_seasonal_units.yml | 63 ---------------------- recipe_tas_seasonal_oper.yml | 66 ----------------------- recipe_tas_seasonal_units.yml | 63 ---------------------- 8 files changed, 16 insertions(+), 354 deletions(-) delete mode 100644 exec_ecvs_seasonal_oper.R delete mode 100644 exec_units.R delete mode 100644 recipe_prlr_seasonal_oper.yml delete mode 100644 recipe_prlr_seasonal_units.yml delete mode 100644 recipe_tas_seasonal_oper.yml delete mode 100644 recipe_tas_seasonal_units.yml diff --git a/example_scripts/exec_ecvs_seasonal_oper.R b/example_scripts/exec_ecvs_seasonal_oper.R index 18f2e493..f1fa717f 100644 --- a/example_scripts/exec_ecvs_seasonal_oper.R +++ b/example_scripts/exec_ecvs_seasonal_oper.R @@ -9,6 +9,7 @@ source("modules/Skill/Skill.R") source("modules/Saving/Saving.R") source("modules/Visualization/Visualization.R") source("tools/prepare_outputs.R") +source("modules/Units/Units.R") # Read recipe args = commandArgs(trailingOnly = TRUE) @@ -20,6 +21,8 @@ recipe <- read_atomic_recipe(recipe_file) # Load datasets data <- load_datasets(recipe) +# Change units +data <- Units(recipe, data) # Calibrate datasets data <- calibrate_datasets(recipe, data) # Compute skill metrics @@ -27,9 +30,21 @@ skill_metrics <- compute_skill_metrics(recipe, data) # Compute percentiles and probability bins probabilities <- compute_probabilities(recipe, data) # Export all data to netCDF -## TODO: Fix plotting # save_data(recipe, data, skill_metrics, probabilities) # Plot data plot_data(recipe, data, skill_metrics, probabilities, significance = T) +## Add BSC logo +logo <- "tools/BSC_logo_95.jpg" +system <- list.files(paste0(recipe$Run$output_dir, "/plots")) +variable <- recipe$Analysis$Variable$name +files <- lapply(variable, function(x) { + f <- list.files(paste0(recipe$Run$output_dir, "/plots/", + system, "/", x)) + full_path <- paste0(recipe$Run$output_dir, "/plots/", + system, "/", x,"/", f)})[[1]] +dim(files) <- c(file = length(files)) +Apply(list(files), target_dims = NULL, function(x) { + system(paste("composite -gravity southeast -geometry +10+10", + logo, x, x))}, ncores = recipe$Analysis$ncores) diff --git a/example_scripts/exec_units.R b/example_scripts/exec_units.R index bcd443c5..819121c9 100644 --- a/example_scripts/exec_units.R +++ b/example_scripts/exec_units.R @@ -36,7 +36,3 @@ probabilities <- compute_probabilities(recipe, data) # save_data(recipe, data, skill_metrics, probabilities) # Plot data plot_data(recipe, data, skill_metrics, probabilities, significance = T) - - - - diff --git a/exec_ecvs_seasonal_oper.R b/exec_ecvs_seasonal_oper.R deleted file mode 100644 index cfd5bb2d..00000000 --- a/exec_ecvs_seasonal_oper.R +++ /dev/null @@ -1,52 +0,0 @@ -rm(list=ls()) -gc() -setwd("/esarchive/scratch/nperez/git/auto-s2s") - -source("modules/Loading/Loading.R") -source("modules/Calibration/Calibration.R") -source("modules/Anomalies/Anomalies.R") -source("modules/Skill/Skill.R") -source("modules/Saving/Saving.R") -source("modules/Visualization/Visualization.R") -source("tools/prepare_outputs.R") -source("modules/Units/Units.R") - -# Read recipe -args = commandArgs(trailingOnly = TRUE) -recipe_file <- args[1] -recipe <- read_atomic_recipe(recipe_file) -## to test a single recipe: -#recipe_file <- "recipe_ecvs_seasonal_oper.yml" -#recipe <- prepare_outputs(recipe_file) - -# Load datasets -data <- load_datasets(recipe) -# Change units -data <- Units(recipe, data) -# Calibrate datasets -data <- calibrate_datasets(recipe, data) -# Compute skill metrics -skill_metrics <- compute_skill_metrics(recipe, data) -# Compute percentiles and probability bins -probabilities <- compute_probabilities(recipe, data) -# Export all data to netCDF -# save_data(recipe, data, skill_metrics, probabilities) -# Plot data -plot_data(recipe, data, skill_metrics, probabilities, significance = T) - -## Add logo -logo <- "/esarchive/scratch/nperez/git/auto-s2s/tools/BSC_logo_95.jpg" -system <- list.files(paste0(recipe$Run$output_dir, "/plots")) -## This line may lead to add to logos: -# variables <- list.files(paste0(recipe$Run$output_dir, "/plots/", system)) -variable <- recipe$Analysis$Variable$name -files <- lapply(variable, function(x) { - f <- list.files(paste0(recipe$Run$output_dir, "/plots/", - system, "/", x)) - full_path <- paste0(recipe$Run$output_dir, "/plots/", - system, "/", x,"/", f)})[[1]] -dim(files) <- c(file = length(files)) -Apply(list(files), target_dims = NULL, function(x) { - system(paste("composite -gravity southeast -geometry +10+10", - logo, x, x))}, ncores = recipe$Analysis$ncores) - diff --git a/exec_units.R b/exec_units.R deleted file mode 100644 index fa95767b..00000000 --- a/exec_units.R +++ /dev/null @@ -1,42 +0,0 @@ -rm(list=ls()) -gc() -setwd("/esarchive/scratch/nperez/git/auto-s2s") - -source("modules/Loading/Loading.R") -source("modules/Calibration/Calibration.R") -source("modules/Anomalies/Anomalies.R") -source("modules/Skill/Skill.R") -source("modules/Saving/Saving.R") -source("modules/Visualization/Visualization.R") -source("tools/prepare_outputs.R") - -# Read recipe -#args = commandArgs(trailingOnly = TRUE) -#recipe_file <- args[1] -#recipe <- read_atomic_recipe(recipe_file) -## to test a single recipe: - # recipe_file <- "recipe_tas_seasonal_units.yml" - # recipe_file <- "recipe_prlr_seasonal_units.yml" - -recipe <- prepare_outputs(recipe_file) - -# Load datasets -data <- load_datasets(recipe) -# Units transformation -source("modules/Units/Units.R") -test <- Units(recipe, data) -# Calibrate datasets -data <- calibrate_datasets(recipe, test) -# Compute skill metrics -skill_metrics <- compute_skill_metrics(recipe, data) -# Compute percentiles and probability bins -probabilities <- compute_probabilities(recipe, data) -# Export all data to netCDF -## TODO: Fix plotting -# save_data(recipe, data, skill_metrics, probabilities) -# Plot data -plot_data(recipe, data, skill_metrics, probabilities, significance = T) - - - - diff --git a/recipe_prlr_seasonal_oper.yml b/recipe_prlr_seasonal_oper.yml deleted file mode 100644 index 64dc7fe8..00000000 --- a/recipe_prlr_seasonal_oper.yml +++ /dev/null @@ -1,63 +0,0 @@ -Description: - Author: nperez - Info: ECVs Oper ESS ECMWF SEAS5 Seasonal Forecast recipe (monthly mean, tas) - -Analysis: - Horizon: seasonal # Mandatory, str: either subseasonal, seasonal, or decadal - Variables: - name: prlr - freq: monthly_mean - Datasets: - System: - name: ECMWF-SEAS5.1 # Mandatory, str: system5c3s system21_m1 system35c3s - Multimodel: no # Mandatory, bool: Either yes/true or no/false - Reference: - name: ERA5 # Mandatory, str: Reference codename. See docu. - Time: - sdate: '0701' ## MMDD - fcst_year: '2023' # Optional, int: Forecast year 'YYYY' - hcst_start: '1993' # Mandatory, int: Hindcast start year 'YYYY' - hcst_end: '2016' # Mandatory, int: Hindcast end year 'YYYY' - ftime_min: 1 # Mandatory, int: First leadtime time step in months - ftime_max: 6 # Mandatory, int: Last leadtime time step in months - Region: - name: "EU" - latmin: 20 - latmax: 80 - lonmin: -20 - lonmax: 40 - Regrid: - method: bilinear # Mandatory, str: Interpolation method. See docu. - type: "to_system" - #type: /esarchive/scratch/nmilders/gitlab/git_clones/auto-s2s/conf/grid_description.txt #'r360x180' # Mandatory, str: to_system, to_reference, or CDO-accepted grid. - Workflow: - Anomalies: - compute: no - cross_validation: no - save: none - Calibration: - method: evmos # Mandatory, str: Calibration method. See docu. - cross_validation: yes - save: none - Skill: - metric: mean_bias EnsCorr rpss crpss bss10 bss90 - save: 'all' - cross_validation: yes - Probabilities: - percentiles: [[1/3, 2/3], [1/10, 9/10]] # frac: Quantile thresholds. - save: 'all' - Indicators: - index: no - Visualization: - plots: skill_metrics forecast_ensemble_mean most_likely_terciles - multi_panel: no - dots: both - ncores: 4 # Optional, int: number of cores, defaults to 1 - remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE - Output_format: scorecards - logo: yes -Run: - Loglevel: INFO - Terminal: TRUE - output_dir: /esarchive/scratch/nperez/cs_oper/ - code_dir: /esarchive/scratch/nperez/git/s2s-suite/ diff --git a/recipe_prlr_seasonal_units.yml b/recipe_prlr_seasonal_units.yml deleted file mode 100644 index e03428ac..00000000 --- a/recipe_prlr_seasonal_units.yml +++ /dev/null @@ -1,63 +0,0 @@ -Description: - Author: nperez - Info: ECVs Oper ESS ECMWF SEAS5 Seasonal Forecast recipe (monthly mean, tas) - -Analysis: - Horizon: seasonal # Mandatory, str: either subseasonal, seasonal, or decadal - Variables: - name: prlr - freq: monthly_mean - units: mm - Datasets: - System: - name: ECMWF-SEAS5.1 # Mandatory, str: system5c3s system21_m1 system35c3s - Multimodel: no # Mandatory, bool: Either yes/true or no/false - Reference: - name: ERA5 # Mandatory, str: Reference codename. See docu. - Time: - sdate: '0601' ## MMDD - fcst_year: # Optional, int: Forecast year 'YYYY' - hcst_start: '2014' # Mandatory, int: Hindcast start year 'YYYY' - hcst_end: '2016' # Mandatory, int: Hindcast end year 'YYYY' - ftime_min: 1 # Mandatory, int: First leadtime time step in months - ftime_max: 6 # Mandatory, int: Last leadtime time step in months - Region: - latmin: 30 # Mandatory, int: minimum latitude - latmax: 50 # Mandatory, int: maximum latitude - lonmin: -10 # Mandatory, int: minimum longitude - lonmax: 10 # Mandatory, int: maximum longitude - Regrid: - method: bilinear # Mandatory, str: Interpolation method. See docu. - type: "to_system" - #type: /esarchive/scratch/nmilders/gitlab/git_clones/auto-s2s/conf/grid_description.txt #'r360x180' # Mandatory, str: to_system, to_reference, or CDO-accepted grid. - Workflow: - Anomalies: - compute: no - cross_validation: no - save: none - Calibration: - method: evmos # Mandatory, str: Calibration method. See docu. - cross_validation: yes - save: none - Skill: - metric: mean_bias EnsCorr rpss crpss bss10 bss90 - save: 'all' - cross_validation: yes - Probabilities: - percentiles: [[1/3, 2/3], [1/10, 9/10]] # frac: Quantile thresholds. - save: 'all' - Indicators: - index: no - Visualization: - plots: skill_metrics forecast_ensemble_mean most_likely_terciles - multi_panel: no - mask_terciles: both - ncores: 4 # Optional, int: number of cores, defaults to 1 - remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE - Output_format: scorecards - logo: yes -Run: - Loglevel: INFO - Terminal: TRUE - output_dir: /esarchive/scratch/nperez/cs_oper/ - code_dir: /esarchive/scratch/nperez/git/s2s-suite/ diff --git a/recipe_tas_seasonal_oper.yml b/recipe_tas_seasonal_oper.yml deleted file mode 100644 index c5e6482f..00000000 --- a/recipe_tas_seasonal_oper.yml +++ /dev/null @@ -1,66 +0,0 @@ -Description: - Author: nperez - Info: ECVs Oper ESS ECMWF SEAS5 Seasonal Forecast recipe (monthly mean, tas) - -Analysis: - Horizon: seasonal # Mandatory, str: either subseasonal, seasonal, or decadal - Variables: - name: tas - freq: monthly_mean - units: C - flux: FALSE - Datasets: - System: - name: ECMWF-SEAS5.1 # Mandatory, str: system5c3s system21_m1 system35c3s - Multimodel: no # Mandatory, bool: Either yes/true or no/false - Reference: - name: ERA5 # Mandatory, str: Reference codename. See docu. - Time: - sdate: '0601' ## MMDD - fcst_year: '2023' # Optional, int: Forecast year 'YYYY' - hcst_start: '1993' # Mandatory, int: Hindcast start year 'YYYY' - hcst_end: '2016' # Mandatory, int: Hindcast end year 'YYYY' - ftime_min: 1 # Mandatory, int: First leadtime time step in months - ftime_max: 6 # Mandatory, int: Last leadtime time step in months - Region: - name: "EU" - latmin: 20 - latmax: 80 - lonmin: -20 - lonmax: 40 - Regrid: - method: bilinear # Mandatory, str: Interpolation method. See docu. - type: "to_system" - #type: /esarchive/scratch/nmilders/gitlab/git_clones/auto-s2s/conf/grid_description.txt #'r360x180' # Mandatory, str: to_system, to_reference, or CDO-accepted grid. - Workflow: - Anomalies: - compute: no - cross_validation: no - save: none - Calibration: - method: evmos # Mandatory, str: Calibration method. See docu. - cross_validation: yes - save: none - Skill: - metric: mean_bias EnsCorr rpss crpss bss10 bss90 - save: 'all' - cross_validation: yes - Probabilities: - percentiles: [[1/3, 2/3], [1/10, 9/10]] # frac: Quantile thresholds. - save: 'all' - Indicators: - index: no - Visualization: - plots: skill_metrics forecast_ensemble_mean most_likely_terciles - multi_panel: no - mask_terciles: no - dots: true - ncores: 4 # Optional, int: number of cores, defaults to 1 - remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE - Output_format: scorecards - logo: yes -Run: - Loglevel: INFO - Terminal: TRUE - output_dir: /esarchive/scratch/nperez/cs_oper/ - code_dir: /esarchive/scratch/nperez/git/s2s-suite/ diff --git a/recipe_tas_seasonal_units.yml b/recipe_tas_seasonal_units.yml deleted file mode 100644 index d2b25321..00000000 --- a/recipe_tas_seasonal_units.yml +++ /dev/null @@ -1,63 +0,0 @@ -Description: - Author: nperez - Info: ECVs Oper ESS ECMWF SEAS5 Seasonal Forecast recipe (monthly mean, tas) - -Analysis: - Horizon: seasonal # Mandatory, str: either subseasonal, seasonal, or decadal - Variables: - name: tas - freq: monthly_mean - units: K - Datasets: - System: - name: ECMWF-SEAS5.1 # Mandatory, str: system5c3s system21_m1 system35c3s - Multimodel: no # Mandatory, bool: Either yes/true or no/false - Reference: - name: ERA5 # Mandatory, str: Reference codename. See docu. - Time: - sdate: '0601' ## MMDD - fcst_year: '2023' # Optional, int: Forecast year 'YYYY' - hcst_start: '2009' # Mandatory, int: Hindcast start year 'YYYY' - hcst_end: '2016' # Mandatory, int: Hindcast end year 'YYYY' - ftime_min: 1 # Mandatory, int: First leadtime time step in months - ftime_max: 6 # Mandatory, int: Last leadtime time step in months - Region: - latmin: 30 # Mandatory, int: minimum latitude - latmax: 50 # Mandatory, int: maximum latitude - lonmin: -10 # Mandatory, int: minimum longitude - lonmax: 10 # Mandatory, int: maximum longitude - Regrid: - method: bilinear # Mandatory, str: Interpolation method. See docu. - type: "to_system" - #type: /esarchive/scratch/nmilders/gitlab/git_clones/auto-s2s/conf/grid_description.txt #'r360x180' # Mandatory, str: to_system, to_reference, or CDO-accepted grid. - Workflow: - Anomalies: - compute: no - cross_validation: no - save: none - Calibration: - method: evmos # Mandatory, str: Calibration method. See docu. - cross_validation: yes - save: none - Skill: - metric: mean_bias EnsCorr rpss crpss bss10 bss90 - save: 'all' - cross_validation: yes - Probabilities: - percentiles: [[1/3, 2/3], [1/10, 9/10]] # frac: Quantile thresholds. - save: 'all' - Indicators: - index: no - Visualization: - plots: skill_metrics forecast_ensemble_mean most_likely_terciles - multi_panel: no - mask_terciles: both - ncores: 4 # Optional, int: number of cores, defaults to 1 - remove_NAs: # Optional, bool: Whether NAs are removed, defaults to FALSE - Output_format: scorecards - logo: yes -Run: - Loglevel: INFO - Terminal: TRUE - output_dir: /esarchive/scratch/nperez/cs_oper/ - code_dir: /esarchive/scratch/nperez/git/s2s-suite/ -- GitLab From ce271859c68a71677b23110096f91e5be86a1166 Mon Sep 17 00:00:00 2001 From: Victoria Agudetse Roures Date: Tue, 12 Sep 2023 16:59:19 +0200 Subject: [PATCH 21/22] Adapt logo code to multiple variables --- example_scripts/exec_ecvs_seasonal_oper.R | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/example_scripts/exec_ecvs_seasonal_oper.R b/example_scripts/exec_ecvs_seasonal_oper.R index f1fa717f..cdb48d09 100644 --- a/example_scripts/exec_ecvs_seasonal_oper.R +++ b/example_scripts/exec_ecvs_seasonal_oper.R @@ -37,7 +37,7 @@ plot_data(recipe, data, skill_metrics, probabilities, significance = T) ## Add BSC logo logo <- "tools/BSC_logo_95.jpg" system <- list.files(paste0(recipe$Run$output_dir, "/plots")) -variable <- recipe$Analysis$Variable$name +variable <- strsplit(recipe$Analysis$Variable$name, ", | |,")[[1]] files <- lapply(variable, function(x) { f <- list.files(paste0(recipe$Run$output_dir, "/plots/", system, "/", x)) -- GitLab From 7c8a6acb518a57dbc2d20759817fb97e44b46c33 Mon Sep 17 00:00:00 2001 From: aho Date: Fri, 15 Sep 2023 10:39:14 +0200 Subject: [PATCH 22/22] add tmp function; specify function parameters to avoid warning --- .../R/plot_most_likely_terciles_map.R | 4 +- modules/Visualization/R/tmp/ColorBar_onebox.R | 496 ++++++++++++++++++ 2 files changed, 499 insertions(+), 1 deletion(-) create mode 100644 modules/Visualization/R/tmp/ColorBar_onebox.R diff --git a/modules/Visualization/R/plot_most_likely_terciles_map.R b/modules/Visualization/R/plot_most_likely_terciles_map.R index 698619c8..93d36042 100644 --- a/modules/Visualization/R/plot_most_likely_terciles_map.R +++ b/modules/Visualization/R/plot_most_likely_terciles_map.R @@ -6,7 +6,7 @@ source("modules/Visualization/R/tmp/ColorBar.R") source("modules/Visualization/R/tmp/clim.palette.R") source("modules/Visualization/R/tmp/Utils.R") source("modules/Visualization/R/tmp/PlotEquiMap.R") -source("/esarchive/scratch/aho/tmp/ColorBar_onebox.R") +source("modules/Visualization/R/tmp/ColorBar_onebox.R") source("modules/Visualization/R/tmp/GradientCatsColorBar.R") ## TODO: Change name @@ -213,6 +213,8 @@ plot_most_likely_terciles <- function(recipe, tmp$brks <- 4 tmp$draw_ticks <- F tmp$box_label <- "> 40" + tmp$triangle_ends <- c(F, F) + tmp$draw_separators <- FALSE do.call(ColorBar_onebox, tmp) } } diff --git a/modules/Visualization/R/tmp/ColorBar_onebox.R b/modules/Visualization/R/tmp/ColorBar_onebox.R new file mode 100644 index 00000000..679600df --- /dev/null +++ b/modules/Visualization/R/tmp/ColorBar_onebox.R @@ -0,0 +1,496 @@ +#============================================================================ +# This function is adapted from s2dv::ColorBar. It plots only one box with the proper +# width, which is manipulated by the length of breaks. +#============================================================================ +ColorBar_onebox <- function(brks = NULL, cols = NULL, vertical = TRUE, + subsampleg = NULL, bar_limits = NULL, var_limits = NULL, + triangle_ends = NULL, col_inf = NULL, col_sup = NULL, + color_fun = clim.palette(), plot = TRUE, + draw_ticks = TRUE, draw_separators = FALSE, + triangle_ends_scale = 1, extra_labels = NULL, + title = NULL, title_scale = 1, + label_scale = 1, tick_scale = 1, + extra_margin = rep(0, 4), label_digits = 4, box_label = NULL, ...) { + # Required checks +#---------NEW---------- +# if ((is.null(brks) || length(brks) < 2) && is.null(bar_limits) && is.null(var_limits)) { +# stop("At least one of 'brks' with the desired breaks, 'bar_limits' or ", +# "'var_limits' must be provided to generate the colour bar.") +# } + + # Check brks + if (is.null(brks)) { + stop("Parameter 'brks' must be provided.") + } + if (!is.numeric(brks) | length(brks) > 1) { + stop("Parameter 'brks' must be a number.") + } + + # Check bar_limits + if (!is.null(bar_limits)) { + if (!(all(is.na(bar_limits) | is.numeric(bar_limits)) && (length(bar_limits) == 2))) { + stop("Parameter 'bar_limits' must be a vector of two numeric elements or NAs.") + } +#-------NEW---------- + } else { + # randomly set a value + bar_limits <- c(0, 100) + } +#-------NEW_END----------- + + # Check var_limits + if (!is.null(var_limits)) { + if (!(is.numeric(var_limits) && (length(var_limits) == 2))) { + stop("Parameter 'var_limits' must be a numeric vector of length 2.") + } else if (anyNA(var_limits)) { + stop("Parameter 'var_limits' must not contain NA values.") + } else if (any(is.infinite(var_limits))) { + stop("Parameter 'var_limits' must not contain infinite values.") + } + } + + # Check cols + if (!is.null(cols)) { + if (!is.character(cols)) { + stop("Parameter 'cols' must be a vector of character strings.") + } else if (any(!sapply(cols, .IsColor))) { + stop("Parameter 'cols' must contain valid colour identifiers.") + } + } + + # Check color_fun + if (!is.function(color_fun)) { + stop("Parameter 'color_fun' must be a colour-generator function.") + } + + # Check integrity among brks, bar_limits and var_limits + if (is.null(brks) || (length(brks) < 2)) { + if (is.null(brks)) { + if (is.null(cols)) { + brks <- 21 + } else { + brks <- length(cols) + 1 + } + } + if (is.null(bar_limits) || anyNA(bar_limits)) { + # var_limits is defined + if (is.null(bar_limits)) { + bar_limits <- c(NA, NA) + } + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (brks - 1) + bar_limits[which(is.na(bar_limits))] <- c(var_limits[1] - half_width, var_limits[2] + half_width)[which(is.na(bar_limits))] + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + } else if (is.null(var_limits)) { + # bar_limits is defined + var_limits <- bar_limits + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (brks - 1) + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + var_limits[1] <- var_limits[1] + half_width / 50 + } else { + # both bar_limits and var_limits are defined + brks <- seq(bar_limits[1], bar_limits[2], length.out = brks) + } + } else if (is.null(bar_limits)) { + if (is.null(var_limits)) { + # brks is defined + bar_limits <- c(head(brks, 1), tail(brks, 1)) + var_limits <- bar_limits + half_width <- 0.5 * (var_limits[2] - var_limits[1]) / (length(brks) - 1) + var_limits[1] <- var_limits[1] + half_width / 50 + } else { + # brks and var_limits are defined + bar_limits <- c(head(brks, 1), tail(brks, 1)) + } + } else { + # brks and bar_limits are defined + # or + # brks, bar_limits and var_limits are defined + if (head(brks, 1) != bar_limits[1] || tail(brks, 1) != bar_limits[2]) { + stop("Parameters 'brks' and 'bar_limits' are inconsistent.") + } + } + +#--------NEW------------ + white_num <- length(brks) / 2 - 1 + if (length(brks) != 2 & length(cols) == 1) { + # Add whites at two sides + if (white_num %% 1 == 0) { + cols <- c(rep("white", white_num), cols, rep("white", white_num)) + } else { + warning("Set brks length even number so the color box can be in the center.") + cols <- c(rep("white", white_num), cols, rep("white", white_num), "white") + } + } +#------NEW_END---------- + + # Check col_inf + if (!is.null(col_inf)) { + if (!.IsColor(col_inf)) { + stop("Parameter 'col_inf' must be a valid colour identifier.") + } + } + + # Check col_sup + if (!is.null(col_sup)) { + if (!.IsColor(col_sup)) { + stop("Parameter 'col_sup' must be a valid colour identifier.") + } + } + + # Check triangle_ends + if (is.null(triangle_ends)) { + triangle_ends <- c(F, F) + } else if (!identical(triangle_ends, c(FALSE, FALSE))) { + warning("Plotting triangle ends is not a valid option. Set parameter 'triangle_ends' to c(F, F).") + triangle_ends <- c(F, F) + } +#------------NEW----------------- +# if (!is.null(triangle_ends) && (!is.logical(triangle_ends) || length(triangle_ends) != 2)) { +# stop("Parameter 'triangle_ends' must be a logical vector with two elements.") +# } +# teflc <- triangle_ends_from_limit_cols <- c(!is.null(col_inf), !is.null(col_sup)) +# if (is.null(triangle_ends) && is.null(col_inf) && is.null(col_sup)) { +# triangle_ends <- c(FALSE, FALSE) +# if (bar_limits[1] >= var_limits[1]) { +# triangle_ends[1] <- TRUE +# } +# if (bar_limits[2] < var_limits[2]) { +# triangle_ends[2] <- TRUE +# } +# } else if (!is.null(triangle_ends) && is.null(col_inf) && is.null(col_sup)) { +# triangle_ends <- triangle_ends +# } else if (is.null(triangle_ends) && (!is.null(col_inf) || !is.null(col_sup))) { +# triangle_ends <- teflc +# } else if (any(teflc != triangle_ends)) { +# if (!is.null(brks) && length(brks) > 1 && !is.null(cols) && length(cols) >= length(brks)) { +# triangle_ends <- teflc +# } else if (!is.null(cols)) { +# triangle_ends <- teflc +# } else { +# triangle_ends <- triangle_ends +# } +# } +# if (plot) { +# if ((bar_limits[1] >= var_limits[1]) && !triangle_ends[1]) { +# .warning("There are variable values smaller or equal to the lower limit ", +# "of the colour bar and the lower triangle end has been ", +# "disabled. These will be painted in the colour for NA values.") +# } +# if ((bar_limits[2] < var_limits[2]) && !triangle_ends[2]) { +# .warning("There are variable values greater than the higher limit ", +# "of the colour bar and the higher triangle end has been ", +# "disabled. These will be painted in the colour for NA values.") +# } +# } +#--------NEW_END------------ + + # Generate colours if needed + if (is.null(cols)) { + cols <- color_fun(length(brks) - 1 + sum(triangle_ends)) + attr_bk <- attributes(cols) + if (triangle_ends[1]) { + if (is.null(col_inf)) col_inf <- head(cols, 1) + cols <- cols[-1] + } + if (triangle_ends[2]) { + if (is.null(col_sup)) col_sup <- tail(cols, 1) + cols <- cols[-length(cols)] + } + attributes(cols) <- attr_bk + } else if ((length(cols) != (length(brks) - 1))) { + stop("Incorrect number of 'brks' and 'cols'. There must be one more break than the number of colours.") + } + + # Check vertical + if (!is.logical(vertical)) { + stop("Parameter 'vertical' must be TRUE or FALSE.") + } + + # Check extra_labels + if (is.null(extra_labels)) { + extra_labels <- numeric(0) + } + if (!is.numeric(extra_labels)) { + stop("Parameter 'extra_labels' must be numeric.") + } else { + if (any(extra_labels > bar_limits[2]) || any(extra_labels < bar_limits[1])) { + stop("Parameter 'extra_labels' must not contain ticks beyond the color bar limits.") + } + } + extra_labels <- sort(extra_labels) + + # Check subsampleg + primes <- function(x) { + # Courtesy of Chase. See http://stackoverflow.com/questions/6424856/r-function-for-returning-all-factors + x <- as.integer(x) + div <- seq_len(abs(x)) + factors <- div[x %% div == 0L] + factors <- list(neg = -factors, pos = factors) + return(factors) + } + remove_final_tick <- FALSE + added_final_tick <- TRUE + if (is.null(subsampleg)) { + subsampleg <- 1 + while (length(brks) / subsampleg > 15 - 1) { + next_factor <- primes((length(brks) - 1) / subsampleg)$pos + next_factor <- next_factor[length(next_factor) - ifelse(length(next_factor) > 2, 1, 0)] + subsampleg <- subsampleg * next_factor + } + if (subsampleg > (length(brks) - 1) / 4) { + subsampleg <- max(1, round(length(brks) / 4)) + extra_labels <- c(extra_labels, bar_limits[2]) + added_final_tick <- TRUE + if ((length(brks) - 1) %% subsampleg < (length(brks) - 1) / 4 / 2) { + remove_final_tick <- TRUE + } + } + } else if (!is.numeric(subsampleg)) { + stop("Parameter 'subsampleg' must be numeric.") + } + subsampleg <- round(subsampleg) + draw_labels <- TRUE + if ((subsampleg) < 1) { + draw_labels <- FALSE + } + + # Check plot + if (!is.logical(plot)) { + stop("Parameter 'plot' must be logical.") + } + + # Check draw_separators + if (!is.logical(draw_separators)) { + stop("Parameter 'draw_separators' must be logical.") + } +#--------NEW---------- + if (draw_separators) { + warning("Draw only one box. Parameter 'draw_separators' is not effective.") + draw_separators <- FALSE + } +#--------NEW_END---------- + + # Check triangle_ends_scale + if (!is.numeric(triangle_ends_scale)) { + stop("Parameter 'triangle_ends_scale' must be numeric.") + } + + # Check draw_ticks + if (!is.logical(draw_ticks)) { + stop("Parameter 'draw_ticks' must be logical.") + } + + # Check title + if (is.null(title)) { + title <- '' + } + if (!is.character(title)) { + stop("Parameter 'title' must be a character string.") + } + + # Check title_scale + if (!is.numeric(title_scale)) { + stop("Parameter 'title_scale' must be numeric.") + } + + # Check label_scale + if (!is.numeric(label_scale)) { + stop("Parameter 'label_scale' must be numeric.") + } + + # Check tick_scale + if (!is.numeric(tick_scale)) { + stop("Parameter 'tick_scale' must be numeric.") + } + + # Check extra_margin + if (!is.numeric(extra_margin) || length(extra_margin) != 4) { + stop("Parameter 'extra_margin' must be a numeric vector of length 4.") + } + + # Check label_digits + if (!is.numeric(label_digits)) { + stop("Parameter 'label_digits' must be numeric.") + } + label_digits <- round(label_digits) + + # Process the user graphical parameters that may be passed in the call + ## Graphical parameters to exclude + excludedArgs <- c("cex", "cex.axis", "col", "lab", "las", "mar", "mgp", "new", "ps") + userArgs <- .FilterUserGraphicArgs(excludedArgs, ...) + + # + # Plotting colorbar + # ~~~~~~~~~~~~~~~~~~~ + # + if (plot) { + pars_to_save <- c('mar', 'cex', names(userArgs), 'mai', 'mgp', 'las', 'xpd') + saved_pars <- par(pars_to_save) + par(mar = c(0, 0, 0, 0), cex = 1) + image(1, 1, t(t(1)), col = rgb(0, 0, 0, 0), axes = FALSE, xlab = '', ylab = '') + # Get the availale space + figure_size <- par('fin') + cs <- par('csi') + # This allows us to assume we always want to plot horizontally + if (vertical) { + figure_size <- rev(figure_size) + } +# pannel_to_redraw <- par('mfg') +# .SwitchToFigure(pannel_to_redraw[1], pannel_to_redraw[2]) + # Load the user parameters + par(new = TRUE) + par(userArgs) + # Set up color bar plot region + margins <- c(0.0, 0, 0.0, 0) + cex_title <- 1 * title_scale + cex_labels <- 0.9 * label_scale + cex_ticks <- -0.3 * tick_scale + spaceticklab <- max(-cex_ticks, 0) + if (vertical) { + margins[1] <- margins[1] + (1.2 * cex_labels * 3 + spaceticklab) * cs + margins <- margins + extra_margin[c(4, 1:3)] * cs + } else { + margins[1] <- margins[1] + (1.2 * cex_labels * 1 + spaceticklab) * cs + margins <- margins + extra_margin * cs + } + if (title != '') { + margins[3] <- margins[3] + (1.0 * cex_title) * cs + } + margins[3] <- margins[3] + sqrt(figure_size[2] / (margins[1] + margins[3])) * + figure_size[2] / 6 * ifelse(title != '', 0.5, 0.8) + # Set side margins + margins[2] <- margins[2] + figure_size[1] / 16 + margins[4] <- margins[4] + figure_size[1] / 16 + triangle_ends_prop <- 1 / 32 * triangle_ends_scale + triangle_ends_cex <- triangle_ends_prop * figure_size[2] + if (triangle_ends[1]) { + margins[2] <- margins[2] + triangle_ends_cex + } + if (triangle_ends[2]) { + margins[4] <- margins[4] + triangle_ends_cex + } + ncols <- length(cols) + # Set up the points of triangles + # Compute the proportion of horiz. space occupied by one plot unit + prop_unit <- (1 - (margins[2] + margins[4]) / figure_size[1]) / ncols + # Convert triangle height to plot inits + triangle_height <- triangle_ends_prop / prop_unit + left_triangle <- list(x = c(1, 1 - triangle_height, 1) - 0.5, + y = c(1.4, 1, 0.6)) + right_triangle <- list(x = c(ncols, ncols + triangle_height, ncols) + 0.5, + y = c(1.4, 1, 0.6)) + # Draw the color squares and title + if (vertical) { + par(mai = c(margins[2:4], margins[1]), + mgp = c(0, spaceticklab + 0.2, 0), las = 1) + d <- 4 + image(1, 1:ncols, t(1:ncols), axes = FALSE, col = cols, + xlab = '', ylab = '') + title(ylab = title, line = cex_title * (0.2 + 0.1), cex.lab = cex_title) + # Draw top and bottom border lines + lines(c(0.6, 0.6), c(1 - 0.5, ncols + 0.5)) + lines(c(1.4, 1.4), c(1 - 0.5, ncols + 0.5)) + # Rotate triangles + names(left_triangle) <- rev(names(left_triangle)) + names(right_triangle) <- rev(names(right_triangle)) + } else { + # The term - cex_labels / 4 * (3 / cex_labels - 1) was found by + # try and error + par(mai = margins, + mgp = c(0, cex_labels / 2 + spaceticklab + - cex_labels / 4 * (3 / cex_labels - 1), 0), + las = 1) + d <- 1 + image(1:ncols, 1, t(t(1:ncols)), axes = FALSE, col = cols, + xlab = '', ylab = '') + title(title, line = cex_title * (0.2 + 0.1), cex.main = cex_title) +#---------NEW--------------- + # Draw top and bottom border lines + lines(c(1 - 0.5 + white_num, ncols + 0.5 - white_num), c(0.6, 0.6)) + lines(c(1 - 0.5 + white_num, ncols + 0.5 - white_num), c(1.4, 1.4)) +#---------NEW_END-------------- + tick_length <- -0.4 + } + # Draw the triangles + par(xpd = TRUE) + if (triangle_ends[1]) { + # Draw left triangle + polygon(left_triangle$x, left_triangle$y, col = col_inf, border = NA) + lines(left_triangle$x, left_triangle$y) + } + if (triangle_ends[2]) { + # Draw right triangle + polygon(right_triangle$x, right_triangle$y, col = col_sup, border = NA) + lines(right_triangle$x, right_triangle$y) + } + par(xpd = FALSE) + + # Put the separators + if (vertical) { + if (draw_separators) { + for (i in 1:(ncols - 1)) { + lines(c(0.6, 1.4), c(i, i) + 0.5) + } + } + if (draw_separators || is.null(col_inf)) { + lines(c(0.6, 1.4), c(0.5, 0.5)) + } + if (draw_separators || is.null(col_sup)) { + lines(c(0.6, 1.4), c(ncols + 0.5, ncols + 0.5)) + } + } else { + if (draw_separators) { + for (i in 1:(ncols - 1)) { + lines(c(i, i) + 0.5, c(0.6, 1.4)) + } + } +#----------NEW------------- +# if (draw_separators || is.null(col_inf)) { +# lines(c(0.5, 0.5), c(0.6, 1.4)) +# } +# if (draw_separators || is.null(col_sup)) { +# lines(c(ncols + 0.5, ncols + 0.5), c(0.6, 1.4)) +# } +#---------NEW_END------------- + } +#---------NEW------------- + # Draw vertical border lines + lines(c(0.5 + white_num, 0.5 + white_num), c(0.6, 1.4)) + lines(c(ncols + 0.5 - white_num, ncols + 0.5 - white_num), c(0.6, 1.4)) +#-------NEW_END----------- + + # Put the ticks + plot_range <- length(brks) - 1 + var_range <- tail(brks, 1) - head(brks, 1) + extra_labels_at <- ((extra_labels - head(brks, 1)) / var_range) * plot_range + 0.5 + at <- seq(1, length(brks), subsampleg) + labels <- brks[at] + # Getting rid of next-to-last tick if too close to last one + if (remove_final_tick) { + at <- at[-length(at)] + labels <- labels[-length(labels)] + } + labels <- signif(labels, label_digits) + if (added_final_tick) { + extra_labels[length(extra_labels)] <- signif(tail(extra_labels, 1), label_digits) + } + at <- at - 0.5 + at <- c(at, extra_labels_at) + labels <- c(labels, extra_labels) + tick_reorder <- sort(at, index.return = TRUE) + at <- tick_reorder$x + if (draw_labels) { + labels <- labels[tick_reorder$ix] + } else { + labels <- FALSE + } +#---------NEW---------- + if (is.null(box_label)) box_label <- "" + axis(d, at = mean(at), tick = draw_ticks, labels = box_label, cex.axis = cex_labels, tcl = cex_ticks) +# axis(d, at = at, tick = draw_ticks, labels = labels, cex.axis = cex_labels, tcl = cex_ticks) +#-------NEW_END---------- + par(saved_pars) + } + invisible(list(brks = brks, cols = cols, col_inf = col_inf, col_sup = col_sup)) +} + -- GitLab